From 3e58f4e6c367ded730dd09255471b1d554a28cd6 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 20 Sep 2023 06:13:06 +0000 Subject: [PATCH 001/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-b3e67403aaf --- build-tools-internal/version.properties | 2 +- docs/Versions.asciidoc | 4 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 3 files changed, 75 insertions(+), 75 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 151381bfa0cb1..9fff8c63f5f56 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.11.0 -lucene = 9.8.0-snapshot-1f8e08481c2 +lucene = 9.9.0-snapshot-b3e67403aaf bundled_jdk_vendor = openjdk bundled_jdk = 20.0.2+9@6e380f22cbe7469fa75fb448bd903d8e diff --git a/docs/Versions.asciidoc b/docs/Versions.asciidoc index 47e9071679cc4..3f44db9928434 100644 --- a/docs/Versions.asciidoc +++ b/docs/Versions.asciidoc @@ -1,8 +1,8 @@ include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] -:lucene_version: 9.8.0 -:lucene_version_path: 9_8_0 +:lucene_version: 9.9.0 +:lucene_version_path: 9_9_0 :jdk: 11.0.2 :jdk_major: 11 :build_type: tar diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 9e8c193fa705f..67d8653732d8f 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2528,124 +2528,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 81c86035a83d23a90a6f64b200636df35ea5ec8e Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 20 Sep 2023 08:30:09 -0400 Subject: [PATCH 002/181] Adds new max_inner_product vector similarity function (#99527) Adds new max_inner_product vector similarity function. This differs from dot_product in the following ways: Doesn't require vectors to be normalized Scales the similarity between vectors differently to prevent negative scores --- docs/changelog/99527.yaml | 5 ++ .../mapping/types/dense-vector.asciidoc | 10 ++- .../test/search.vectors/40_knn_search.yml | 85 +++++++++++++++++++ .../vectors/DenseVectorFieldMapper.java | 8 ++ 4 files changed, 107 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/99527.yaml diff --git a/docs/changelog/99527.yaml b/docs/changelog/99527.yaml new file mode 100644 index 0000000000000..19eef621fa500 --- /dev/null +++ b/docs/changelog/99527.yaml @@ -0,0 +1,5 @@ +pr: 99445 +summary: Add new max_inner_product vector similarity function +area: Vector Search +type: enhancement +issues: [] diff --git a/docs/reference/mapping/types/dense-vector.asciidoc b/docs/reference/mapping/types/dense-vector.asciidoc index fb50ee36644a6..96427a01e61d5 100644 --- a/docs/reference/mapping/types/dense-vector.asciidoc +++ b/docs/reference/mapping/types/dense-vector.asciidoc @@ -159,7 +159,7 @@ distance) between the vectors. The document `_score` is computed as `1 / (1 + l2_norm(query, vector)^2)`. `dot_product`::: -Computes the dot product of two vectors. This option provides an optimized way +Computes the dot product of two unit vectors. This option provides an optimized way to perform cosine similarity. The constraints and computed score are defined by `element_type`. + @@ -181,6 +181,14 @@ original vectors and cannot normalize them in advance. The document `_score` is computed as `(1 + cosine(query, vector)) / 2`. The `cosine` similarity does not allow vectors with zero magnitude, since cosine is not defined in this case. + +`max_inner_product`::: +Computes the maximum inner product of two vectors. This is similar to `dot_product`, +but doesn't require vectors to be normalized. This means that each vector's magnitude +can significantly effect the score. The document `_score` is adjusted to prevent negative +values. For `max_inner_product` values `< 0`, the `_score` is +`1 / (1 + -1 * max_inner_product(query, vector))`. For non-negative `max_inner_product` results +the `_score` is calculated `max_inner_product(query, vector) + 1`. ==== NOTE: Although they are conceptually related, the `similarity` parameter is diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml index f34aef9b83321..340cd8f8d0f70 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml @@ -368,3 +368,88 @@ setup: filter: {"term": {"name": "cow.jpg"}} - length: {hits.hits: 0} +--- +"Knn search with mip": + - skip: + version: ' - 8.10.99' + reason: 'mip similarity added in 8.11' + features: close_to + + - do: + indices.create: + index: mip + body: + mappings: + properties: + name: + type: keyword + vector: + type: dense_vector + dims: 5 + index: true + similarity: max_inner_product + + - do: + index: + index: mip + id: "1" + body: + name: cow.jpg + vector: [230.0, 300.33, -34.8988, 15.555, -200.0] + + - do: + index: + index: mip + id: "2" + body: + name: moose.jpg + vector: [-0.5, 100.0, -13, 14.8, -156.0] + + - do: + index: + index: mip + id: "3" + body: + name: rabbit.jpg + vector: [0.5, 111.3, -13.0, 14.8, -156.0] + + - do: + indices.refresh: {} + + - do: + search: + index: mip + body: + fields: [ "name" ] + knn: + num_candidates: 3 + k: 3 + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + + + - length: {hits.hits: 3} + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 58694.902, error: 0.01}} + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 34702.79, error: 0.01}} + - match: {hits.hits.2._id: "2"} + - close_to: {hits.hits.2._score: {value: 33686.29, error: 0.01}} + + - do: + search: + index: mip + body: + fields: [ "name" ] + knn: + num_candidates: 3 + k: 3 + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + filter: { "term": { "name": "moose.jpg" } } + + + + - length: {hits.hits: 1} + - match: {hits.hits.0._id: "2"} + - close_to: {hits.hits.0._score: {value: 33686.29, error: 0.01}} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 28f83a167fda3..dc90dc7382780 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -661,6 +661,14 @@ float score(float similarity, ElementType elementType, int dim) { case FLOAT -> (1 + similarity) / 2f; }; } + }, + MAX_INNER_PRODUCT(VectorSimilarityFunction.MAXIMUM_INNER_PRODUCT) { + @Override + float score(float similarity, ElementType elementType, int dim) { + return switch (elementType) { + case BYTE, FLOAT -> similarity < 0 ? 1 / (1 + -1 * similarity) : similarity + 1; + }; + } }; public final VectorSimilarityFunction function; From 0433159cf1ae7509b839ae51ae1ec21d5174df0b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 21 Sep 2023 06:17:35 +0000 Subject: [PATCH 003/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-f01ff9d1f51 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 9fff8c63f5f56..19cb3843a40eb 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.11.0 -lucene = 9.9.0-snapshot-b3e67403aaf +lucene = 9.9.0-snapshot-f01ff9d1f51 bundled_jdk_vendor = openjdk bundled_jdk = 20.0.2+9@6e380f22cbe7469fa75fb448bd903d8e diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 67d8653732d8f..e89595774b2e1 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2528,124 +2528,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 8e24410f763188976adb9c5f3b0d9b03d32dd355 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 22 Sep 2023 06:15:53 +0000 Subject: [PATCH 004/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-be57460b060 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 19cb3843a40eb..55e98cc1482cf 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.11.0 -lucene = 9.9.0-snapshot-f01ff9d1f51 +lucene = 9.9.0-snapshot-be57460b060 bundled_jdk_vendor = openjdk bundled_jdk = 20.0.2+9@6e380f22cbe7469fa75fb448bd903d8e diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index e89595774b2e1..2179faa1052c4 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2528,124 +2528,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 0b971d2cf02f4c47863a264203f3c2ef84272cd5 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 22 Sep 2023 10:21:20 +0200 Subject: [PATCH 005/181] Fix compilation after refactoring of TermStates. --- .../index/mapper/extras/SourceConfirmedTextQuery.java | 2 +- .../org/elasticsearch/lucene/queries/BlendedTermQuery.java | 5 ++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java index 9faee0282b12c..3c6b865266e21 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java @@ -231,7 +231,7 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo for (Term term : terms) { TermStates ts = termStates.computeIfAbsent(term, t -> { try { - return TermStates.build(searcher.getTopReaderContext(), t, scoreMode.needsScores()); + return TermStates.build(searcher, t, scoreMode.needsScores()); } catch (IOException e) { throw new UncheckedIOException(e); } diff --git a/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java b/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java index a49f02acf4c4d..d88e0e0dd9fcf 100644 --- a/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java @@ -73,15 +73,14 @@ public Query rewrite(IndexSearcher searcher) throws IOException { if (rewritten != this) { return rewritten; } - IndexReader reader = searcher.getIndexReader(); - IndexReaderContext context = reader.getContext(); TermStates[] ctx = new TermStates[terms.length]; int[] docFreqs = new int[ctx.length]; for (int i = 0; i < terms.length; i++) { - ctx[i] = TermStates.build(context, terms[i], true); + ctx[i] = TermStates.build(searcher, terms[i], true); docFreqs[i] = ctx[i].docFreq(); } + final IndexReader reader = searcher.getIndexReader(); final int maxDoc = reader.maxDoc(); blend(ctx, maxDoc, reader); return topLevelQuery(terms, ctx, docFreqs, maxDoc); From 6cf0c30b66511c18e2265277e9c7cc4a8f6a3c3f Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 22 Sep 2023 12:11:05 +0200 Subject: [PATCH 006/181] Refactor changes to IndexVersion. (#99312) This adds a version for the Lucene upgrade and adjusts some tests. --- .../main/java/org/elasticsearch/index/IndexVersion.java | 8 +++++--- .../java/org/elasticsearch/index/IndexVersionTests.java | 6 ++++-- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersion.java b/server/src/main/java/org/elasticsearch/index/IndexVersion.java index 4afbbc851026f..5df3999a75316 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersion.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersion.java @@ -117,14 +117,16 @@ private static IndexVersion registerIndexVersion(int id, Version luceneVersion, public static final IndexVersion V_8_9_0 = registerIndexVersion(8_09_00_99, Version.LUCENE_9_7_0, "32f6dbab-cc24-4f5b-87b5-015a848480d9"); public static final IndexVersion V_8_9_1 = registerIndexVersion(8_09_01_99, Version.LUCENE_9_7_0, "955a80ac-f70c-40a5-9399-1d8a1e5d342d"); public static final IndexVersion V_8_10_0 = registerIndexVersion(8_10_00_99, Version.LUCENE_9_7_0, "2e107286-12ad-4c51-9a6f-f8943663b6e7"); - public static final IndexVersion V_8_11_0 = registerIndexVersion(8_11_00_99, Version.LUCENE_9_8_0, "f08382c0-06ab-41f4-a56a-cf5397275627"); + public static final IndexVersion V_8_11_0 = registerIndexVersion(8_11_00_99, Version.LUCENE_9_7_0, "f08382c0-06ab-41f4-a56a-cf5397275627"); /* * READ THE COMMENT BELOW THIS BLOCK OF DECLARATIONS BEFORE ADDING NEW INDEX VERSIONS * Detached index versions added below here. */ - public static final IndexVersion V_8_500_000 = registerIndexVersion(8_500_000, Version.LUCENE_9_8_0, "bf656f5e-5808-4eee-bf8a-e2bf6736ff55"); - public static final IndexVersion V_8_500_001 = registerIndexVersion(8_500_001, Version.LUCENE_9_8_0, "45045a5a-fc57-4462-89f6-6bc04cda6015"); + public static final IndexVersion V_8_500_000 = registerIndexVersion(8_500_000, Version.LUCENE_9_7_0, "bf656f5e-5808-4eee-bf8a-e2bf6736ff55"); + public static final IndexVersion V_8_500_001 = registerIndexVersion(8_500_001, Version.LUCENE_9_7_0, "45045a5a-fc57-4462-89f6-6bc04cda6015"); + + public static final IndexVersion UPGRADE_TO_LUCENE_9_9 = registerIndexVersion(8_500_010, Version.LUCENE_9_9_0, "ee5ab2e6-4d8f-11ee-be56-0242ac120002"); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java b/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java index 452da5279f4c1..2fd7af9dcdd87 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java @@ -107,8 +107,10 @@ public void testDefinedConstants() throws IllegalAccessException { field.getModifiers() ); - Matcher matcher = historicalVersion.matcher(field.getName()); - if (matcher.matches()) { + Matcher matcher; + if ("UPGRADE_TO_LUCENE_9_9".equals(field.getName())) { + // OK + } else if ((matcher = historicalVersion.matcher(field.getName())).matches()) { // old-style version constant String idString = matcher.group(1) + padNumber(matcher.group(2)) + padNumber(matcher.group(3)) + "99"; assertEquals( From b066509e7ba5ae0d3c18a4d562bd9ba9caced1b8 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 23 Sep 2023 06:13:10 +0000 Subject: [PATCH 007/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-d3a3391d225 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 3bfd4759b0d61..a7723a5cbce7e 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.11.0 -lucene = 9.9.0-snapshot-be57460b060 +lucene = 9.9.0-snapshot-d3a3391d225 bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2b28b9a2a17f5..f789e526e826e 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2549,124 +2549,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 82df0692857351827cfe676428c3877be33ebde0 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 26 Sep 2023 06:18:18 +0000 Subject: [PATCH 008/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-0fb47cd44a6 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index a7723a5cbce7e..e20758406ee2f 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.11.0 -lucene = 9.9.0-snapshot-d3a3391d225 +lucene = 9.9.0-snapshot-0fb47cd44a6 bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index f789e526e826e..641ac9100dd01 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2549,124 +2549,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 92deac7cd7b7c7b009cb036dc0c494be0b4e4ef5 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 27 Sep 2023 06:15:08 +0000 Subject: [PATCH 009/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-0fb47cd44a6 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 641ac9100dd01..e31c4d1a47429 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2551,122 +2551,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From c56896fefbc3b7016867eaa2fb46c5816b8e1c1b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 27 Sep 2023 14:24:13 +0000 Subject: [PATCH 010/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-350de210c36 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index e20758406ee2f..ec90ba3c4721f 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.11.0 -lucene = 9.9.0-snapshot-0fb47cd44a6 +lucene = 9.9.0-snapshot-350de210c36 bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 186f1ddf50c8d..040597c95d2a9 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2579,124 +2579,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 3b5131ec96c1a087a580813e2532fecb04be7f1e Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Wed, 27 Sep 2023 20:04:48 +0200 Subject: [PATCH 011/181] Add RandomAccessInput#length to SeekTrackingDirectoryWrapper (#99962) Fix compiling error after adding a new method to RandomAccessInput interface. --- .../test/seektracker/SeekTrackingDirectoryWrapper.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackingDirectoryWrapper.java b/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackingDirectoryWrapper.java index 9b1991b52e500..9b3d31022c589 100644 --- a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackingDirectoryWrapper.java +++ b/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackingDirectoryWrapper.java @@ -143,6 +143,11 @@ public RandomAccessInput randomAccessSlice(long offset, long length) throws IOEx IndexInput slice = wrapIndexInput(directory, name, innerSlice); // return default impl return new RandomAccessInput() { + @Override + public long length() { + return slice.length(); + } + @Override public byte readByte(long pos) throws IOException { slice.seek(pos); From dc511398d4860d83f6490ceddf245e8b59623d7a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 28 Sep 2023 06:17:09 +0000 Subject: [PATCH 012/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-350de210c36 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 040597c95d2a9..1d054b584ff29 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2581,122 +2581,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 8209aa75fd56faf6594cfcf934855ceabf60c988 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 29 Sep 2023 06:15:31 +0000 Subject: [PATCH 013/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-9ba7f2dc4bc --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index ec90ba3c4721f..b84f6f9697844 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.11.0 -lucene = 9.9.0-snapshot-350de210c36 +lucene = 9.9.0-snapshot-9ba7f2dc4bc bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 1d054b584ff29..311139031f7b4 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2579,124 +2579,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 0679604fa89521bec7bb7d9f45af5104cc43111c Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 30 Sep 2023 06:13:58 +0000 Subject: [PATCH 014/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-7c1d1147beb --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index b84f6f9697844..e3b93ad792a2d 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.11.0 -lucene = 9.9.0-snapshot-9ba7f2dc4bc +lucene = 9.9.0-snapshot-7c1d1147beb bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 311139031f7b4..8f5fdddb6a3eb 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2579,124 +2579,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 6b23b7006bd584286810670f8f213da8c26a44e0 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 1 Oct 2023 06:15:57 +0000 Subject: [PATCH 015/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-7c1d1147beb --- gradle/verification-metadata.xml | 58 ++++++++++++++++---------------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 3900d9a6db45a..a3d37e8576bcb 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -69,11 +69,11 @@ - - - - - + + + + + @@ -2581,122 +2581,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 9797c08ef0168f1df896ebcd46f268c63c83a130 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 2 Oct 2023 06:16:07 +0000 Subject: [PATCH 016/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-bab19260197 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index e3b93ad792a2d..95ee0b579ad51 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.11.0 -lucene = 9.9.0-snapshot-7c1d1147beb +lucene = 9.9.0-snapshot-bab19260197 bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index a3d37e8576bcb..782a137a75b86 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2579,124 +2579,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 39a3fbdb8da2f6e3bf7b758b759b267abc3940e4 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 3 Oct 2023 06:16:14 +0000 Subject: [PATCH 017/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-8c994d1e7c4 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 95ee0b579ad51..4b547324a1c0d 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.11.0 -lucene = 9.9.0-snapshot-bab19260197 +lucene = 9.9.0-snapshot-8c994d1e7c4 bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 782a137a75b86..1b28da4c9bb8f 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2579,124 +2579,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From a3e1d3e8e50e2f6b8914c0f6388a2aec911d5344 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 5 Oct 2023 06:24:38 +0000 Subject: [PATCH 018/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-cccaa7e7298 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 4b547324a1c0d..9e73b184d1f8e 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.11.0 -lucene = 9.9.0-snapshot-8c994d1e7c4 +lucene = 9.9.0-snapshot-cccaa7e7298 bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 1b28da4c9bb8f..1ae54ec3876f1 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2579,124 +2579,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 0b001ea9de49feda0d2dc4d7a810f901ba58d0b0 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 6 Oct 2023 06:26:48 +0000 Subject: [PATCH 019/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-b85aeb3a4fa --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 149 ++++++++++++------------ 2 files changed, 73 insertions(+), 78 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 6efa3d17d0274..0de3dc693e095 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-cccaa7e7298 +lucene = 9.9.0-snapshot-b85aeb3a4fa bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 56624b1e66354..0cac159905c9a 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -36,11 +36,6 @@ - - - - - @@ -2589,124 +2584,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 7e1ca81139586b54586691e9e7a921ce5509fc18 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 7 Oct 2023 06:20:32 +0000 Subject: [PATCH 020/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-b85aeb3a4fa --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 0cac159905c9a..d8bd1f64b29f1 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2586,122 +2586,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 9740dd55543132874dbb5f0c2847ba64defb215e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 8 Oct 2023 06:24:23 +0000 Subject: [PATCH 021/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-b85aeb3a4fa --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index d8bd1f64b29f1..7c38a4d786b87 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2586,122 +2586,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 54bc0914f4298962347d367f32c040c2f70eb732 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 10 Oct 2023 06:09:46 +0000 Subject: [PATCH 022/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-455d4152d31 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 0de3dc693e095..d9974fd10cec2 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-b85aeb3a4fa +lucene = 9.9.0-snapshot-455d4152d31 bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 7c38a4d786b87..a3b35899da816 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2584,124 +2584,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From e6999042e219508599fc165c1492028ff133ac05 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 11 Oct 2023 06:09:49 +0000 Subject: [PATCH 023/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-823af4931aa --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index d9974fd10cec2..56353b09ca80c 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-455d4152d31 +lucene = 9.9.0-snapshot-823af4931aa bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index a3b35899da816..5ef5243db8384 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2584,124 +2584,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From dce780a61be2c5608c393b9676a8e0dd4e2fd762 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 12 Oct 2023 06:09:22 +0000 Subject: [PATCH 024/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-aa968f96d6c --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 56353b09ca80c..7e57fccd7d63c 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-823af4931aa +lucene = 9.9.0-snapshot-aa968f96d6c bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 5ef5243db8384..9333eadbb5cc2 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2584,124 +2584,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 0c32da2718f870e9788e468585c84e655c1de17e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 13 Oct 2023 06:09:41 +0000 Subject: [PATCH 025/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-4533dcea4ec --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 7e57fccd7d63c..7b0866afbfd12 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-aa968f96d6c +lucene = 9.9.0-snapshot-4533dcea4ec bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2b18172c6c7d5..2fee33c5390cf 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From dae5f7a46bf6ca5cf929fa40e709446fa6b38ae6 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 14 Oct 2023 06:13:34 +0000 Subject: [PATCH 026/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-a5f94b1e81e --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 7b0866afbfd12..eaedb3e67defb 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-4533dcea4ec +lucene = 9.9.0-snapshot-a5f94b1e81e bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2fee33c5390cf..5f1caba9e9dda 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From ebb5d9eb9526171820ec17c6cccff6d70407d193 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 15 Oct 2023 06:09:27 +0000 Subject: [PATCH 027/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-a1bb48aa426 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index eaedb3e67defb..9ea4b7cce9ebb 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-a5f94b1e81e +lucene = 9.9.0-snapshot-a1bb48aa426 bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 5f1caba9e9dda..a0d2b10515821 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 85e8f65c4e30ed239dc09c4935d413567d5fc1b2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 16 Oct 2023 06:09:30 +0000 Subject: [PATCH 028/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-28255de5bee --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 9ea4b7cce9ebb..ef067340843d4 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-a1bb48aa426 +lucene = 9.9.0-snapshot-28255de5bee bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index a0d2b10515821..859a1285426a3 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 45a4c1c98c91cf97674bf7d612df96ba14b7c88a Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Mon, 16 Oct 2023 15:31:09 +0100 Subject: [PATCH 029/181] Fix compilation (#100903) --- server/src/main/java/org/elasticsearch/index/IndexVersion.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersion.java b/server/src/main/java/org/elasticsearch/index/IndexVersion.java index e15bb414aca7a..df4fe4b85b3a8 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersion.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersion.java @@ -128,7 +128,7 @@ private static IndexVersion def(int id, Version luceneVersion) { public static final IndexVersion SPARSE_VECTOR_IN_FIELD_NAMES_SUPPORT = def(8_500_002, Version.LUCENE_9_7_0); public static final IndexVersion UPGRADE_LUCENE_9_8 = def(8_500_003, Version.LUCENE_9_8_0); - public static final IndexVersion UPGRADE_TO_LUCENE_9_9 = registerIndexVersion(8_500_010, Version.LUCENE_9_9_0, "ee5ab2e6-4d8f-11ee-be56-0242ac120002"); + public static final IndexVersion UPGRADE_TO_LUCENE_9_9 = def(8_500_010, Version.LUCENE_9_9_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ From 5f9516868b0019820d52be73d0d55ac8f52b7181 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 17 Oct 2023 06:08:58 +0000 Subject: [PATCH 030/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-ba26abcaee9 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index ef067340843d4..4c0b643da3eb7 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-28255de5bee +lucene = 9.9.0-snapshot-ba26abcaee9 bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 859a1285426a3..4de38f2157c9e 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From be758b8e7e2d96885e5f70e8207c73e77ec8feb2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 18 Oct 2023 06:09:24 +0000 Subject: [PATCH 031/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-18bb826564b --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 4c0b643da3eb7..e51f4c3ae93c4 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-ba26abcaee9 +lucene = 9.9.0-snapshot-18bb826564b bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 4de38f2157c9e..92347e2e0ed36 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From c1eae02e377d205933dcbc7bc70e59af535d2cc5 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 19 Oct 2023 06:10:28 +0000 Subject: [PATCH 032/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-40fbff02f1e --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index e51f4c3ae93c4..9641d5aec7aa0 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-18bb826564b +lucene = 9.9.0-snapshot-40fbff02f1e bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 92347e2e0ed36..24b0b31540ab7 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 7d4bcd89b9e1f29470de0c408e5f53ab0a904384 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 20 Oct 2023 06:10:09 +0000 Subject: [PATCH 033/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-6fc3483e4fa --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 9641d5aec7aa0..01cea3178f44c 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-40fbff02f1e +lucene = 9.9.0-snapshot-6fc3483e4fa bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 24b0b31540ab7..ca0cf4849a4c5 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From bb83b44e7028e941d1343c75aa5a8e320b689f71 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 21 Oct 2023 06:09:33 +0000 Subject: [PATCH 034/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-07a76555d9e --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 01cea3178f44c..6b0c981726d36 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-6fc3483e4fa +lucene = 9.9.0-snapshot-07a76555d9e bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index ca0cf4849a4c5..05459c59da2b9 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 6b8723af1f26a08bb93323623f8a40445371363f Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 22 Oct 2023 06:09:31 +0000 Subject: [PATCH 035/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-3292aca1f45 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 6b0c981726d36..51cb9246b7d3a 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-07a76555d9e +lucene = 9.9.0-snapshot-3292aca1f45 bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 05459c59da2b9..548c66a504dd4 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From d011f8b59227de3e0a6693c628880245a028ba9e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 23 Oct 2023 06:09:13 +0000 Subject: [PATCH 036/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-ad0f00a6cb2 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 51cb9246b7d3a..d7b23f6fe32b5 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-3292aca1f45 +lucene = 9.9.0-snapshot-ad0f00a6cb2 bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 548c66a504dd4..b8c77dc140ef2 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From e43dfadfb73161ed6b843a8e9a67de9d7ce0c065 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 25 Oct 2023 06:08:39 +0000 Subject: [PATCH 037/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-1cb1a14cc84 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 23c7f5a2dbf75..e9b28221c279a 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-ad0f00a6cb2 +lucene = 9.9.0-snapshot-1cb1a14cc84 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index bf61379907588..cffe5644489e2 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 16ac279abad98e216183dc01d72aa237c3854fc6 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 26 Oct 2023 06:08:31 +0000 Subject: [PATCH 038/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-170f594daea --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index e9b28221c279a..65a590d0db701 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-1cb1a14cc84 +lucene = 9.9.0-snapshot-170f594daea bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index cffe5644489e2..61a112ce064c4 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 9d51f8b4629e06d7af8571e4ff7504e96894c803 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 26 Oct 2023 11:37:28 +0200 Subject: [PATCH 039/181] Update references to Lucene95Codec This is needed following https://github.com/apache/lucene/pull/12685 and https://github.com/apache/lucene/pull/12582 --- .../elasticsearch/common/lucene/Lucene.java | 2 +- .../index/codec/CodecService.java | 10 ++++---- .../index/codec/PerFieldMapperCodec.java | 4 ++-- .../vectors/DenseVectorFieldMapper.java | 6 +++-- .../IndexDiskUsageAnalyzerTests.java | 24 +++++++++---------- .../elasticsearch/index/codec/CodecTests.java | 12 +++++----- .../index/codec/PerFieldMapperCodecTests.java | 6 ++--- .../engine/CompletionStatsCacheTests.java | 4 ++-- .../vectors/DenseVectorFieldMapperTests.java | 7 +++--- .../index/mapper/MapperServiceTestCase.java | 4 ++-- .../sourceonly/SourceOnlySnapshot.java | 2 ++ 11 files changed, 43 insertions(+), 38 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java index a53df0087b251..31a4ca97aad6a 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -87,7 +87,7 @@ import java.util.Objects; public class Lucene { - public static final String LATEST_CODEC = "Lucene95"; + public static final String LATEST_CODEC = "Lucene99"; public static final String SOFT_DELETES_FIELD = "__soft_deletes"; diff --git a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java index 990d44f5baefc..d4771ba74e0fb 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java +++ b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java @@ -9,7 +9,7 @@ package org.elasticsearch.index.codec; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene95.Lucene95Codec; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.MapperService; @@ -35,11 +35,11 @@ public class CodecService { public CodecService(@Nullable MapperService mapperService, BigArrays bigArrays) { final var codecs = new HashMap(); if (mapperService == null) { - codecs.put(DEFAULT_CODEC, new Lucene95Codec()); - codecs.put(BEST_COMPRESSION_CODEC, new Lucene95Codec(Lucene95Codec.Mode.BEST_COMPRESSION)); + codecs.put(DEFAULT_CODEC, new Lucene99Codec()); + codecs.put(BEST_COMPRESSION_CODEC, new Lucene99Codec(Lucene99Codec.Mode.BEST_COMPRESSION)); } else { - codecs.put(DEFAULT_CODEC, new PerFieldMapperCodec(Lucene95Codec.Mode.BEST_SPEED, mapperService, bigArrays)); - codecs.put(BEST_COMPRESSION_CODEC, new PerFieldMapperCodec(Lucene95Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays)); + codecs.put(DEFAULT_CODEC, new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, bigArrays)); + codecs.put(BEST_COMPRESSION_CODEC, new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays)); } codecs.put(LUCENE_DEFAULT_CODEC, Codec.getDefault()); for (String codec : Codec.availableCodecs()) { diff --git a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java index df1aca3dc7b53..b406262fac3dc 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java @@ -13,7 +13,7 @@ import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene95.Lucene95Codec; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexMode; @@ -37,7 +37,7 @@ * per index in real time via the mapping API. If no specific postings format or vector format is * configured for a specific field the default postings or vector format is used. */ -public class PerFieldMapperCodec extends Lucene95Codec { +public class PerFieldMapperCodec extends Lucene99Codec { private final MapperService mapperService; private final DocValuesFormat docValuesFormat = new Lucene90DocValuesFormat(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 6aaea1dd32285..c6098b1884a73 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -11,7 +11,8 @@ import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.KnnVectorsReader; import org.apache.lucene.codecs.KnnVectorsWriter; -import org.apache.lucene.codecs.lucene95.Lucene95HnswVectorsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99ScalarQuantizedVectorsFormat; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; @@ -1086,7 +1087,8 @@ public KnnVectorsFormat getKnnVectorsFormatForField(KnnVectorsFormat defaultForm format = defaultFormat; } else { HnswIndexOptions hnswIndexOptions = (HnswIndexOptions) indexOptions; - format = new Lucene95HnswVectorsFormat(hnswIndexOptions.m, hnswIndexOptions.efConstruction); + format = new Lucene99HnswVectorsFormat(hnswIndexOptions.m, hnswIndexOptions.efConstruction, + new Lucene99ScalarQuantizedVectorsFormat()); } // It's legal to reuse the same format name as this is the same on-disk format. return new KnnVectorsFormat(format.getName()) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java index fec7a86bd3e59..57dbb1e73f7c5 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java @@ -13,8 +13,8 @@ import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; import org.apache.lucene.codecs.lucene90.Lucene90PostingsFormat; -import org.apache.lucene.codecs.lucene95.Lucene95Codec; -import org.apache.lucene.codecs.lucene95.Lucene95HnswVectorsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat; @@ -263,7 +263,7 @@ public void testKnnVectors() throws Exception { logger.info("--> stats {}", stats); long dataBytes = (long) numDocs * dimension * Float.BYTES; // size of flat vector data - long indexBytesEstimate = (long) numDocs * (Lucene95HnswVectorsFormat.DEFAULT_MAX_CONN / 4); // rough size of HNSW graph + long indexBytesEstimate = (long) numDocs * (Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN / 4); // rough size of HNSW graph assertThat("numDocs=" + numDocs + ";dimension=" + dimension, stats.total().getKnnVectorsBytes(), greaterThan(dataBytes)); long connectionOverhead = stats.total().getKnnVectorsBytes() - dataBytes; assertThat("numDocs=" + numDocs, connectionOverhead, greaterThan(indexBytesEstimate)); @@ -326,7 +326,7 @@ public void testTriangle() throws Exception { public void testCompletionField() throws Exception { IndexWriterConfig config = new IndexWriterConfig().setCommitOnClose(true) .setUseCompoundFile(false) - .setCodec(new Lucene95Codec(Lucene95Codec.Mode.BEST_SPEED) { + .setCodec(new Lucene99Codec(Lucene99Codec.Mode.BEST_SPEED) { @Override public PostingsFormat getPostingsFormatForField(String field) { if (field.startsWith("suggest_")) { @@ -413,25 +413,25 @@ private static void addFieldsToDoc(Document doc, IndexableField[] fields) { enum CodecMode { BEST_SPEED { @Override - Lucene95Codec.Mode mode() { - return Lucene95Codec.Mode.BEST_SPEED; + Lucene99Codec.Mode mode() { + return Lucene99Codec.Mode.BEST_SPEED; } }, BEST_COMPRESSION { @Override - Lucene95Codec.Mode mode() { - return Lucene95Codec.Mode.BEST_COMPRESSION; + Lucene99Codec.Mode mode() { + return Lucene99Codec.Mode.BEST_COMPRESSION; } }; - abstract Lucene95Codec.Mode mode(); + abstract Lucene99Codec.Mode mode(); } static void indexRandomly(Directory directory, CodecMode codecMode, int numDocs, Consumer addFields) throws IOException { IndexWriterConfig config = new IndexWriterConfig().setCommitOnClose(true) .setUseCompoundFile(randomBoolean()) - .setCodec(new Lucene95Codec(codecMode.mode())); + .setCodec(new Lucene99Codec(codecMode.mode())); try (IndexWriter writer = new IndexWriter(directory, config)) { for (int i = 0; i < numDocs; i++) { final Document doc = new Document(); @@ -639,7 +639,7 @@ static void rewriteIndexWithPerFieldCodec(Directory source, CodecMode mode, Dire try (DirectoryReader reader = DirectoryReader.open(source)) { IndexWriterConfig config = new IndexWriterConfig().setSoftDeletesField(Lucene.SOFT_DELETES_FIELD) .setUseCompoundFile(randomBoolean()) - .setCodec(new Lucene95Codec(mode.mode()) { + .setCodec(new Lucene99Codec(mode.mode()) { @Override public PostingsFormat getPostingsFormatForField(String field) { return new Lucene90PostingsFormat(); @@ -652,7 +652,7 @@ public DocValuesFormat getDocValuesFormatForField(String field) { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { - return new Lucene95HnswVectorsFormat(); + return new Lucene99HnswVectorsFormat(); } @Override diff --git a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java index b7a5b665ce58f..625c536a1c0d5 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -10,7 +10,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.lucene90.Lucene90StoredFieldsFormat; -import org.apache.lucene.codecs.lucene95.Lucene95Codec; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; @@ -44,21 +44,21 @@ public class CodecTests extends ESTestCase { public void testResolveDefaultCodecs() throws Exception { CodecService codecService = createCodecService(); assertThat(codecService.codec("default"), instanceOf(PerFieldMapperCodec.class)); - assertThat(codecService.codec("default"), instanceOf(Lucene95Codec.class)); + assertThat(codecService.codec("default"), instanceOf(Lucene99Codec.class)); } public void testDefault() throws Exception { Codec codec = createCodecService().codec("default"); - assertStoredFieldsCompressionEquals(Lucene95Codec.Mode.BEST_SPEED, codec); + assertStoredFieldsCompressionEquals(Lucene99Codec.Mode.BEST_SPEED, codec); } public void testBestCompression() throws Exception { Codec codec = createCodecService().codec("best_compression"); - assertStoredFieldsCompressionEquals(Lucene95Codec.Mode.BEST_COMPRESSION, codec); + assertStoredFieldsCompressionEquals(Lucene99Codec.Mode.BEST_COMPRESSION, codec); } // write some docs with it, inspect .si to see this was the used compression - private void assertStoredFieldsCompressionEquals(Lucene95Codec.Mode expected, Codec actual) throws Exception { + private void assertStoredFieldsCompressionEquals(Lucene99Codec.Mode expected, Codec actual) throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(null); iwc.setCodec(actual); @@ -70,7 +70,7 @@ private void assertStoredFieldsCompressionEquals(Lucene95Codec.Mode expected, Co SegmentReader sr = (SegmentReader) ir.leaves().get(0).reader(); String v = sr.getSegmentInfo().info.getAttribute(Lucene90StoredFieldsFormat.MODE_KEY); assertNotNull(v); - assertEquals(expected, Lucene95Codec.Mode.valueOf(v)); + assertEquals(expected, Lucene99Codec.Mode.valueOf(v)); ir.close(); dir.close(); } diff --git a/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java index adb6ef77f2873..e2a2c72d3eae3 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.index.codec; -import org.apache.lucene.codecs.lucene95.Lucene95Codec; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; @@ -168,7 +168,7 @@ private PerFieldMapperCodec createCodec(boolean timestampField, boolean timeSeri """; mapperService.merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); } - return new PerFieldMapperCodec(Lucene95Codec.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE); + return new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE); } public void testUseES87TSDBEncodingSettingDisabled() throws IOException { @@ -207,7 +207,7 @@ private PerFieldMapperCodec createCodec(boolean enableES87TSDBCodec, boolean tim settings.put(IndexSettings.TIME_SERIES_ES87TSDB_CODEC_ENABLED_SETTING.getKey(), enableES87TSDBCodec); MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), settings.build(), "test"); mapperService.merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - return new PerFieldMapperCodec(Lucene95Codec.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE); + return new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE); } } diff --git a/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java b/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java index 2a72b1fe40ec6..96c38efed5b53 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.index.engine; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene95.Lucene95Codec; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; @@ -44,7 +44,7 @@ public void testExceptionsAreNotCached() { public void testCompletionStatsCache() throws IOException, InterruptedException { final IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); final PostingsFormat postingsFormat = new Completion90PostingsFormat(); - indexWriterConfig.setCodec(new Lucene95Codec() { + indexWriterConfig.setCodec(new Lucene99Codec() { @Override public PostingsFormat getPostingsFormatForField(String field) { return postingsFormat; // all fields are suggest fields diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index b10d756a6e458..d61960cfc0f51 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -53,8 +53,8 @@ import java.util.List; import java.util.Set; -import static org.apache.lucene.codecs.lucene95.Lucene95HnswVectorsFormat.DEFAULT_BEAM_WIDTH; -import static org.apache.lucene.codecs.lucene95.Lucene95HnswVectorsFormat.DEFAULT_MAX_CONN; +import static org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH; +import static org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -973,10 +973,11 @@ public void testKnnVectorsFormat() throws IOException { Codec codec = codecService.codec("default"); assertThat(codec, instanceOf(PerFieldMapperCodec.class)); KnnVectorsFormat knnVectorsFormat = ((PerFieldMapperCodec) codec).getKnnVectorsFormatForField("field"); - String expectedString = "Lucene95HnswVectorsFormat(name=Lucene95HnswVectorsFormat, maxConn=" + String expectedString = "Lucene99HnswVectorsFormat(name=Lucene99HnswVectorsFormat, maxConn=" + m + ", beamWidth=" + efConstruction + + ", quantizer=Lucene99ScalarQuantizedVectorsFormat(name=Lucene99ScalarQuantizedVectorsFormat, quantile=null)" + ")"; assertEquals(expectedString, knnVectorsFormat.toString()); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java index 168ab8663a153..a0c6d34fc1f6a 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java @@ -10,7 +10,7 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; -import org.apache.lucene.codecs.lucene95.Lucene95Codec; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriterConfig; @@ -245,7 +245,7 @@ protected static void withLuceneIndex( CheckedConsumer test ) throws IOException { IndexWriterConfig iwc = new IndexWriterConfig(IndexShard.buildIndexAnalyzer(mapperService)).setCodec( - new PerFieldMapperCodec(Lucene95Codec.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE) + new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE) ); try (Directory dir = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc)) { builder.accept(iw); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java index 50485ecc21d9a..4a6f6951ec4b2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java @@ -30,6 +30,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Sort; import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FilterDirectory; @@ -234,6 +235,7 @@ private SegmentCommitInfo syncSegment( si.name, si.maxDoc(), false, + si.getHasBlocks(), si.getCodec(), si.getDiagnostics(), si.getId(), From 6fd0776b43f416c2d71c77be537c24292dbe00a8 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 26 Oct 2023 11:42:40 +0200 Subject: [PATCH 040/181] Fix spotless issues --- .../index/mapper/vectors/DenseVectorFieldMapper.java | 7 +++++-- .../snapshots/sourceonly/SourceOnlySnapshot.java | 1 - 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index c6098b1884a73..5e89a25fe2eb2 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -1087,8 +1087,11 @@ public KnnVectorsFormat getKnnVectorsFormatForField(KnnVectorsFormat defaultForm format = defaultFormat; } else { HnswIndexOptions hnswIndexOptions = (HnswIndexOptions) indexOptions; - format = new Lucene99HnswVectorsFormat(hnswIndexOptions.m, hnswIndexOptions.efConstruction, - new Lucene99ScalarQuantizedVectorsFormat()); + format = new Lucene99HnswVectorsFormat( + hnswIndexOptions.m, + hnswIndexOptions.efConstruction, + new Lucene99ScalarQuantizedVectorsFormat() + ); } // It's legal to reuse the same format name as this is the same on-disk format. return new KnnVectorsFormat(format.getName()) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java index 4a6f6951ec4b2..c332694d93975 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java @@ -30,7 +30,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.Sort; import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FilterDirectory; From 156063945632b591d99f370e647ce414f907c5d5 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 26 Oct 2023 12:23:37 +0200 Subject: [PATCH 041/181] Address compile error in BWCCodec --- .../java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java index 5d834e0303a37..714f8be73c135 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java @@ -129,6 +129,7 @@ public static SegmentInfo wrap(SegmentInfo segmentInfo) { org.apache.lucene.util.Version.LATEST, segmentInfo.name, segmentInfo.maxDoc(), + segmentInfo.getHasBlocks(), segmentInfo.getUseCompoundFile(), codec, segmentInfo.getDiagnostics(), From 2d061460ab272108a50b1fcb4a0a90238c937f5d Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 26 Oct 2023 15:45:07 +0200 Subject: [PATCH 042/181] Fix compile errors on Lucene62SegmentInfoFormat and Lucene50SegmentInfoFormat These are needed after https://github.com/apache/lucene/pull/12685 --- .../lucene50/Lucene50SegmentInfoFormat.java | 15 ++++++++++++++- .../lucene62/Lucene62SegmentInfoFormat.java | 1 + 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50SegmentInfoFormat.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50SegmentInfoFormat.java index cf4437a230c0d..a260722ee3501 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50SegmentInfoFormat.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50SegmentInfoFormat.java @@ -70,7 +70,20 @@ public SegmentInfo read(Directory dir, String segment, byte[] segmentID, IOConte final Set files = input.readSetOfStrings(); final Map attributes = input.readMapOfStrings(); - si = new SegmentInfo(dir, version, null, segment, docCount, isCompoundFile, null, diagnostics, segmentID, attributes, null); + si = new SegmentInfo( + dir, + version, + null, + segment, + docCount, + isCompoundFile, + false, + null, + diagnostics, + segmentID, + attributes, + null + ); si.setFiles(files); } catch (Throwable exception) { priorE = exception; diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene62/Lucene62SegmentInfoFormat.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene62/Lucene62SegmentInfoFormat.java index b700c39591819..5416b1a9fbc5a 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene62/Lucene62SegmentInfoFormat.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene62/Lucene62SegmentInfoFormat.java @@ -210,6 +210,7 @@ public SegmentInfo read(Directory dir, String segment, byte[] segmentID, IOConte segment, docCount, isCompoundFile, + false, null, diagnostics, segmentID, From 8cf48efe1256e9eb27245d2d1514063c13d427f3 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 27 Oct 2023 06:09:07 +0000 Subject: [PATCH 043/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-2bb54320c33 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 65a590d0db701..a63cc5ff15927 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-170f594daea +lucene = 9.9.0-snapshot-2bb54320c33 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 61a112ce064c4..5da643e984688 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 5039406b9cfc730f3755b746a9c7974c9a4e18bf Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 27 Oct 2023 14:52:31 +0200 Subject: [PATCH 044/181] Fix arguments order in BWCCodec#wrap --- .../org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java index 714f8be73c135..df6fded49e6bb 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java @@ -129,8 +129,8 @@ public static SegmentInfo wrap(SegmentInfo segmentInfo) { org.apache.lucene.util.Version.LATEST, segmentInfo.name, segmentInfo.maxDoc(), - segmentInfo.getHasBlocks(), segmentInfo.getUseCompoundFile(), + segmentInfo.getHasBlocks(), codec, segmentInfo.getDiagnostics(), segmentInfo.getId(), From 21de5fe18f222321c616b5c56a19d8e6706319b5 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 28 Oct 2023 06:08:58 +0000 Subject: [PATCH 045/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-063cfa7a85c --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index a63cc5ff15927..65aa0da7db3ea 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-2bb54320c33 +lucene = 9.9.0-snapshot-063cfa7a85c bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 5da643e984688..45e4a03680939 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 6b01689068316682670ff04db797cb243fc9a4a9 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 29 Oct 2023 06:09:16 +0000 Subject: [PATCH 046/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-5fe48424a25 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 65aa0da7db3ea..1154110164e6e 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-063cfa7a85c +lucene = 9.9.0-snapshot-5fe48424a25 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 45e4a03680939..4c7d9f7fbc717 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 0bd8b91a211105f9521ea63b422fd8071a90bf32 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 31 Oct 2023 06:09:05 +0000 Subject: [PATCH 047/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-5b26498ec72 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 1154110164e6e..1fdb01602227d 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-5fe48424a25 +lucene = 9.9.0-snapshot-5b26498ec72 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 4c7d9f7fbc717..754fcabf059a7 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From e9776b20349800a245595628fbc831ba685f32d5 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Tue, 31 Oct 2023 11:44:42 +0100 Subject: [PATCH 048/181] Switch ContextIndexSearcher to use Lucene's TaskExecutor (#101537) We have contributed back to Lucene the changes we had made around running concurrent tasks. These include waiting for all tasks to finish when an exception is thrown, as well as not starting tasks when one of the previously run tasks throws an exception. The execution of concurrent tasks is now generalized within Lucene and exposed through a TaskExecutor that can be retrieved from the IndexSearcher and used to run tasks. We still have customizations that require us to override some of the search method, but with this change we rely on standard Lucene's behaviour for running concurrent tasks. --- .../search/internal/ContextIndexSearcher.java | 116 +----- .../internal/ContextIndexSearcherTests.java | 334 ------------------ 2 files changed, 9 insertions(+), 441 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index 3c69db98c7588..3eac5b5378bdd 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -8,8 +8,6 @@ package org.elasticsearch.search.internal; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; @@ -36,9 +34,7 @@ import org.apache.lucene.util.BitSetIterator; import org.apache.lucene.util.Bits; import org.apache.lucene.util.SparseFixedBitSet; -import org.apache.lucene.util.ThreadInterruptedException; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.core.Releasable; import org.elasticsearch.lucene.util.CombinedBitSet; import org.elasticsearch.search.dfs.AggregatedDfs; @@ -58,21 +54,14 @@ import java.util.Objects; import java.util.PriorityQueue; import java.util.Set; -import java.util.concurrent.CancellationException; -import java.util.concurrent.ExecutionException; +import java.util.concurrent.Callable; import java.util.concurrent.Executor; -import java.util.concurrent.Future; -import java.util.concurrent.FutureTask; -import java.util.concurrent.RunnableFuture; -import java.util.concurrent.atomic.AtomicInteger; /** * Context-aware extension of {@link IndexSearcher}. */ public class ContextIndexSearcher extends IndexSearcher implements Releasable { - private static final Logger logger = LogManager.getLogger(ContextIndexSearcher.class); - /** * The interval at which we check for search cancellation when we cannot use * a {@link CancellableBulkScorer}. See {@link #intersectScorerAndBitSet}. @@ -143,7 +132,6 @@ public ContextIndexSearcher( int maximumNumberOfSlices, int minimumDocsPerSlice ) throws IOException { - // we need to pass the executor up so it can potentially be used by query rewrite, which does not rely on slicing super(wrapWithExitableDirectoryReader ? new ExitableDirectoryReader((DirectoryReader) reader, cancellable) : reader, executor); setSimilarity(similarity); setQueryCache(queryCache); @@ -324,22 +312,12 @@ public T search(Query query, CollectorManager col /** * Similar to the lucene implementation, with the following changes made: - * 1) it will wait for all threads to finish before returning when an exception is thrown. In that case, subsequent exceptions will be - * ignored and the first exception is re-thrown after all tasks are completed. - * 2) Tasks are cancelled on exception, as well as on timeout, to prevent needless computation - * 3) collection is unconditionally offloaded to the executor when set, even when there is a single slice or the request does not - * support concurrent collection. The executor is not set only when concurrent search has been explicitly disabled at the cluster level. - * 4) postCollection is performed after each segment is collected. This is needed for aggregations, performed by search worker threads + * 1) postCollection is performed after each segment is collected. This is needed for aggregations, performed by search worker threads * so it can be parallelized. Also, it needs to happen in the same thread where doc_values are read, as it consumes them and Lucene * does not allow consuming them from a different thread. - * 5) handles the ES TimeExceededException + * 2) handles the ES TimeExceededException * */ private T search(Weight weight, CollectorManager collectorManager, C firstCollector) throws IOException { - // the executor will be null only when concurrency is disabled at the cluster level - if (getExecutor() == null) { - search(leafContexts, weight, firstCollector); - return collectorManager.reduce(Collections.singletonList(firstCollector)); - } LeafSlice[] leafSlices = getSlices(); if (leafSlices.length == 0) { assert leafContexts.isEmpty(); @@ -356,92 +334,16 @@ private T search(Weight weight, CollectorManager throw new IllegalStateException("CollectorManager does not always produce collectors with the same score mode"); } } - final List> listTasks = new ArrayList<>(); + final List> listTasks = new ArrayList<>(); for (int i = 0; i < leafSlices.length; ++i) { final LeafReaderContext[] leaves = leafSlices[i].leaves; final C collector = collectors.get(i); - AtomicInteger state = new AtomicInteger(0); - RunnableFuture task = new FutureTask<>(() -> { - if (state.compareAndSet(0, 1)) { - // A slice throws exception or times out: cancel all the tasks, to prevent slices that haven't started yet from - // starting and performing needless computation. - // TODO we will also want to cancel tasks that have already started, reusing the timeout mechanism - try { - search(Arrays.asList(leaves), weight, collector); - if (timeExceeded) { - for (Future future : listTasks) { - FutureUtils.cancel(future); - } - } - } catch (Exception e) { - for (Future future : listTasks) { - FutureUtils.cancel(future); - } - throw e; - } - return collector; - } - throw new CancellationException(); - }) { - @Override - public boolean cancel(boolean mayInterruptIfRunning) { - /* - Future#get (called down below after submitting all tasks) throws CancellationException for a cancelled task while - it is still running. It's important to make sure that search does not leave any tasks behind when it returns. - Overriding cancel ensures that tasks that are already started are left alone once cancelled, so Future#get will - wait for them to finish instead of throwing CancellationException. - Tasks that are cancelled before they are started won't start (same behaviour as the original implementation). - */ - return state.compareAndSet(0, -1); - } - - @Override - public boolean isCancelled() { - return state.get() == -1; - } - }; - listTasks.add(task); - } - logger.trace("Collecting using " + listTasks.size() + " tasks."); - - for (Runnable task : listTasks) { - getExecutor().execute(task); - } - RuntimeException exception = null; - final List collectedCollectors = new ArrayList<>(); - boolean cancellation = false; - for (Future future : listTasks) { - try { - collectedCollectors.add(future.get()); - } catch (InterruptedException e) { - if (exception == null) { - exception = new ThreadInterruptedException(e); - } else { - // we ignore further exceptions - } - } catch (ExecutionException e) { - if (exception == null) { - if (e.getCause() instanceof CancellationException) { - // thrown by the manual cancellation implemented above - we cancel on exception and we will throw the root cause - cancellation = true; - } else { - if (e.getCause() instanceof RuntimeException runtimeException) { - exception = runtimeException; - } else if (e.getCause() instanceof IOException ioException) { - throw ioException; - } else { - exception = new RuntimeException(e.getCause()); - } - } - } else { - // we ignore further exceptions - } - } - } - assert cancellation == false || exception != null || timeExceeded : "cancellation without an exception or timeout?"; - if (exception != null) { - throw exception; + listTasks.add(() -> { + search(Arrays.asList(leaves), weight, collector); + return collector; + }); } + List collectedCollectors = getTaskExecutor().invokeAll(listTasks); return collectorManager.reduce(collectedCollectors); } } diff --git a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java index 9e6b6330d2f23..a4e52af5f43c2 100644 --- a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java @@ -45,7 +45,6 @@ import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.SimpleCollector; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHitCountCollectorManager; @@ -58,13 +57,10 @@ import org.apache.lucene.util.Bits; import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.SparseFixedBitSet; -import org.apache.lucene.util.ThreadInterruptedException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; @@ -81,17 +77,11 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; -import java.util.HashSet; import java.util.IdentityHashMap; import java.util.List; import java.util.Set; -import java.util.concurrent.CopyOnWriteArraySet; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Executor; import java.util.concurrent.Executors; -import java.util.concurrent.Future; import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.atomic.AtomicInteger; import static org.elasticsearch.search.internal.ContextIndexSearcher.intersectScorerAndBitSet; import static org.elasticsearch.search.internal.ExitableDirectoryReader.ExitableLeafReader; @@ -525,330 +515,6 @@ public boolean isCacheable(LeafReaderContext ctx) { } } - /** - * Simulate one or more exceptions being thrown while collecting, through a custom query that throws IOException in its Weight#scorer. - * Verify that the slices that had to wait because there were no available threads in the pool are not started following the exception, - * which triggers a cancellation of all the tasks that are part of the running search. - * Simulate having N threads busy doing other work (e.g. other searches) otherwise all slices can be executed directly, given that - * the number of slices is dependent on the max pool size. - */ - public void testCancelSliceTasksOnException() throws Exception { - try (Directory dir = newDirectory()) { - indexDocs(dir); - int numThreads = randomIntBetween(4, 6); - int numBusyThreads = randomIntBetween(0, 3); - int numAvailableThreads = numThreads - numBusyThreads; - ThreadPoolExecutor executor = EsExecutors.newFixed( - ContextIndexSearcherTests.class.getName(), - numThreads, - -1, - EsExecutors.daemonThreadFactory(""), - new ThreadContext(Settings.EMPTY), - EsExecutors.TaskTrackingConfig.DO_NOT_TRACK - ); - ExecutorTestWrapper executorTestWrapper = new ExecutorTestWrapper(executor, numBusyThreads); - try (DirectoryReader directoryReader = DirectoryReader.open(dir)) { - Set throwingLeaves = new HashSet<>(); - Set scoredLeaves = new CopyOnWriteArraySet<>(); - final int[] newCollectorsCalls; - final boolean[] reduceCalled; - LeafSlice[] leafSlices; - try ( - ContextIndexSearcher contextIndexSearcher = new ContextIndexSearcher( - directoryReader, - IndexSearcher.getDefaultSimilarity(), - IndexSearcher.getDefaultQueryCache(), - IndexSearcher.getDefaultQueryCachingPolicy(), - true, - executorTestWrapper, - executor.getMaximumPoolSize(), - 1 - ) - ) { - leafSlices = contextIndexSearcher.getSlices(); - int numThrowingLeafSlices = randomIntBetween(1, 3); - for (int i = 0; i < numThrowingLeafSlices; i++) { - LeafSlice throwingLeafSlice = leafSlices[randomIntBetween(0, Math.min(leafSlices.length, numAvailableThreads) - 1)]; - throwingLeaves.add(randomFrom(throwingLeafSlice.leaves)); - } - Query query = new TestQuery() { - @Override - public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) { - return new ConstantScoreWeight(this, boost) { - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - if (throwingLeaves.contains(context)) { - // a random segment of some random slices throws exception. Other slices may or may not have started - throw new IOException(); - } - scoredLeaves.add(context); - return new ConstantScoreScorer( - this, - boost, - ScoreMode.COMPLETE, - DocIdSetIterator.all(context.reader().maxDoc()) - ); - } - - @Override - public boolean isCacheable(LeafReaderContext ctx) { - return false; - } - }; - } - }; - newCollectorsCalls = new int[] { 0 }; - reduceCalled = new boolean[] { false }; - CollectorManager collectorManager = new CollectorManager<>() { - @Override - public Collector newCollector() { - newCollectorsCalls[0]++; - return new SimpleCollector() { - @Override - public void collect(int doc) { - - } - - @Override - public ScoreMode scoreMode() { - return ScoreMode.COMPLETE; - } - }; - } - - @Override - public Integer reduce(Collection collectors) { - reduceCalled[0] = true; - return null; - } - }; - expectThrows(IOException.class, () -> contextIndexSearcher.search(query, collectorManager)); - assertBusy(() -> { - // active count is approximate, wait until it converges to the expected number - if (executor.getActiveCount() > numBusyThreads) { - throw new AssertionError("no search tasks should be left running"); - } - }); - } - // as many tasks as slices have been created - assertEquals(leafSlices.length, newCollectorsCalls[0]); - // unexpected exception thrown, reduce is not called, there are no results to return - assertFalse(reduceCalled[0]); - Set expectedScoredLeaves = new HashSet<>(); - // the first N slices, where N is the number of available permits, will run straight-away, the others will be cancelled - for (int i = 0; i < leafSlices.length; i++) { - if (i == numAvailableThreads) { - break; - } - LeafSlice leafSlice = leafSlices[i]; - for (LeafReaderContext context : leafSlice.leaves) { - // collect the segments that we expect to score in each slice, and stop at those that throw - if (throwingLeaves.contains(context)) { - break; - } - expectedScoredLeaves.add(context); - } - } - // The slice that threw exception is not counted. The others that could be executed directly are, but they may have been - // cancelled before they could even start, hence we are going to score at most the segments that the slices that can be - // executed straight-away (before reaching the max pool size) are made of. We can't guarantee that we score all of them. - // We do want to guarantee that the remaining slices won't even start and none of their leaves are scored. - assertTrue(expectedScoredLeaves.containsAll(scoredLeaves)); - } finally { - executorTestWrapper.stopBusyThreads(); - terminate(executor); - } - } - } - - /** - * Simulate one or more timeout being thrown while collecting, through a custom query that times out in its Weight#scorer. - * Verify that the slices that had to wait because there were no available threads in the pool are not started following the timeout, - * which triggers a cancellation of all the tasks that are part of the running search. - * Simulate having N threads busy doing other work (e.g. other searches) otherwise all slices can be executed directly, given that - * the number of slices is dependent on the max pool size. - */ - public void testCancelSliceTasksOnTimeout() throws Exception { - try (Directory dir = newDirectory()) { - indexDocs(dir); - int numThreads = randomIntBetween(4, 6); - int numBusyThreads = randomIntBetween(0, 3); - int numAvailableThreads = numThreads - numBusyThreads; - ThreadPoolExecutor executor = EsExecutors.newFixed( - ContextIndexSearcherTests.class.getName(), - numThreads, - -1, - EsExecutors.daemonThreadFactory(""), - new ThreadContext(Settings.EMPTY), - EsExecutors.TaskTrackingConfig.DO_NOT_TRACK - ); - ExecutorTestWrapper executorTestWrapper = new ExecutorTestWrapper(executor, numBusyThreads); - try (DirectoryReader directoryReader = DirectoryReader.open(dir)) { - Set throwingLeaves = new HashSet<>(); - Set scoredLeaves = new CopyOnWriteArraySet<>(); - final int[] newCollectorsCalls; - final boolean[] reduceCalled; - LeafSlice[] leafSlices; - try ( - ContextIndexSearcher contextIndexSearcher = new ContextIndexSearcher( - directoryReader, - IndexSearcher.getDefaultSimilarity(), - IndexSearcher.getDefaultQueryCache(), - IndexSearcher.getDefaultQueryCachingPolicy(), - true, - executorTestWrapper, - executor.getMaximumPoolSize(), - 1 - ) - ) { - leafSlices = contextIndexSearcher.getSlices(); - int numThrowingLeafSlices = randomIntBetween(1, 3); - for (int i = 0; i < numThrowingLeafSlices; i++) { - LeafSlice throwingLeafSlice = leafSlices[randomIntBetween(0, Math.min(leafSlices.length, numAvailableThreads) - 1)]; - throwingLeaves.add(randomFrom(throwingLeafSlice.leaves)); - } - Query query = new TestQuery() { - @Override - public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) { - return new ConstantScoreWeight(this, boost) { - @Override - public Scorer scorer(LeafReaderContext context) { - if (throwingLeaves.contains(context)) { - // a random segment of some random slices throws exception. Other slices may or may not have - // started. - contextIndexSearcher.throwTimeExceededException(); - } - scoredLeaves.add(context); - return new ConstantScoreScorer( - this, - boost, - ScoreMode.COMPLETE, - DocIdSetIterator.all(context.reader().maxDoc()) - ); - } - - @Override - public boolean isCacheable(LeafReaderContext ctx) { - return false; - } - }; - } - }; - newCollectorsCalls = new int[] { 0 }; - reduceCalled = new boolean[] { false }; - CollectorManager collectorManager = new CollectorManager<>() { - @Override - public Collector newCollector() { - newCollectorsCalls[0]++; - return new SimpleCollector() { - @Override - public void collect(int doc) { - - } - - @Override - public ScoreMode scoreMode() { - return ScoreMode.COMPLETE; - } - }; - } - - @Override - public Integer reduce(Collection collectors) { - reduceCalled[0] = true; - return null; - } - }; - contextIndexSearcher.search(query, collectorManager); - assertBusy(() -> { - // active count is approximate, wait until it converges to the expected number - if (executor.getActiveCount() > numBusyThreads) { - throw new AssertionError("no search tasks should be left running"); - } - }); - assertTrue(contextIndexSearcher.timeExceeded()); - } - // as many tasks as slices have been created - assertEquals(leafSlices.length, newCollectorsCalls[0]); - assertTrue(reduceCalled[0]); - Set expectedScoredLeaves = new HashSet<>(); - // the first N slices, where N is the number of available permits, will run straight-away, the others will be cancelled - for (int i = 0; i < leafSlices.length; i++) { - if (i == numAvailableThreads) { - break; - } - LeafSlice leafSlice = leafSlices[i]; - for (LeafReaderContext context : leafSlice.leaves) { - // collect the segments that we expect to score in each slice, and stop at those that throw - if (throwingLeaves.contains(context)) { - break; - } - expectedScoredLeaves.add(context); - } - } - // The slice that timed out is not counted. The others that could be executed directly are, but they may have been - // cancelled before they could even start, hence we are going to score at most the segments that the slices that can be - // executed straight-away (before reaching the max pool size) are made of. We can't guarantee that we score all of them. - // We do want to guarantee that the remaining slices won't even start and none of their leaves are scored. - assertTrue(expectedScoredLeaves.containsAll(scoredLeaves)); - } finally { - executorTestWrapper.stopBusyThreads(); - terminate(executor); - } - } - } - - private static class ExecutorTestWrapper implements Executor { - private final ThreadPoolExecutor executor; - private final AtomicInteger startedTasks = new AtomicInteger(0); - private final CountDownLatch busyThreadsLatch = new CountDownLatch(1); - - ExecutorTestWrapper(ThreadPoolExecutor executor, int numBusyThreads) { - this.executor = executor; - // keep some of the threads occupied to simulate the situation where the slices tasks get queued up. - // This is a realistic scenario that does not get tested otherwise by executing a single concurrent search, given that the - // number of slices is capped by max pool size. - for (int i = 0; i < numBusyThreads; i++) { - execute(() -> { - try { - busyThreadsLatch.await(); - } catch (InterruptedException e) { - throw new ThreadInterruptedException(e); - } - }); - } - } - - void stopBusyThreads() { - busyThreadsLatch.countDown(); - } - - @Override - public void execute(Runnable command) { - int started = startedTasks.incrementAndGet(); - if (started > executor.getMaximumPoolSize()) { - try { - /* - There could be tasks that complete quickly before the exception is handled, which leaves room for new tasks that are - about to get cancelled to start before their cancellation becomes effective. We can accept that cancellation may or may - not be effective for the slices that belong to the first batch of tasks until all threads are busy and adjust the - test expectations accordingly, but for the subsequent slices, we want to assert that they are cancelled and never - executed. The only way to guarantee that is waiting for cancellation to kick in. - */ - assertBusy(() -> { - Future future = (Future) command; - if (future.isCancelled() == false) { - throw new AssertionError("task should be cancelled"); - } - }); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - executor.execute(command); - } - } - private static class TestQuery extends Query { @Override public String toString(String field) { From 58cf676d2038e07e00d2a73388b9eaf411b15529 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 1 Nov 2023 06:09:36 +0000 Subject: [PATCH 049/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-44479b3b48b --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 1fdb01602227d..ce36e431fb62c 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-5b26498ec72 +lucene = 9.9.0-snapshot-44479b3b48b bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 754fcabf059a7..90ae97b0ec1de 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 0a1d15f1bcbb29de742d5d68afe176bab0102eb2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 2 Nov 2023 06:10:04 +0000 Subject: [PATCH 050/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-83727a88e62 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index ce36e431fb62c..e99c5f47c7a47 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-44479b3b48b +lucene = 9.9.0-snapshot-83727a88e62 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 90ae97b0ec1de..903f354776124 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From b13d5a4662cc8e314c9d1cb006d4abb38fd4b851 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 3 Nov 2023 06:09:24 +0000 Subject: [PATCH 051/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-ab9cbe5aa00 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index e99c5f47c7a47..dfb403ac75ee3 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-83727a88e62 +lucene = 9.9.0-snapshot-ab9cbe5aa00 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 903f354776124..a40e5b128f12d 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 7256febbf48ec94cb40bb107f147dcbaf4a51b4f Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 4 Nov 2023 06:09:03 +0000 Subject: [PATCH 052/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-71b3e4c97fb --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index dfb403ac75ee3..46a7ef02c5d99 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-ab9cbe5aa00 +lucene = 9.9.0-snapshot-71b3e4c97fb bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index a40e5b128f12d..607721884ec1b 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 34afe08991663bdb0146105f37712ba855946f4a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 6 Nov 2023 16:58:00 +0000 Subject: [PATCH 053/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-6684da1908a --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 46a7ef02c5d99..b3e85b93f13dd 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-71b3e4c97fb +lucene = 9.9.0-snapshot-6684da1908a bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 607721884ec1b..38113c66b90c4 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From da728c58154cc6ba204ba7557fb8bddd1c30af7a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 7 Nov 2023 12:40:31 +0000 Subject: [PATCH 054/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-f7c1de55999 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index b3e85b93f13dd..d5854f6fbe428 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-6684da1908a +lucene = 9.9.0-snapshot-f7c1de55999 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 38113c66b90c4..966f453198006 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 4c099703a298e9c61673200f78fa0a365407bc46 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 9 Nov 2023 07:09:20 +0000 Subject: [PATCH 055/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-d9109907bca --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index d5854f6fbe428..d04783eeaa845 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-f7c1de55999 +lucene = 9.9.0-snapshot-d9109907bca bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 966f453198006..2bbeb9bf5738b 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 180ef285c066f82575fdc1e2595869bcdb2c5748 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 9 Nov 2023 14:17:07 +0100 Subject: [PATCH 056/181] Fix compile errors (#101874) IndexDiskUsageAnalyzer and IndexDiskUsageAnalyzerTests, as well as CompletionFieldMapper, CompletionFieldMapperTests and CompletionStatsCacheTests need adjusting after apache/lucene#12741 , to refer to the latest postings format. KuromojiTokenizerFactory needs adjusting after apache/lucene#12390 --- .../analysis/kuromoji/KuromojiTokenizerFactory.java | 2 +- .../admin/indices/diskusage/IndexDiskUsageAnalyzer.java | 7 ++++++- .../elasticsearch/index/mapper/CompletionFieldMapper.java | 2 +- .../indices/diskusage/IndexDiskUsageAnalyzerTests.java | 8 ++++---- .../index/engine/CompletionStatsCacheTests.java | 4 ++-- .../index/mapper/CompletionFieldMapperTests.java | 4 ++-- 6 files changed, 16 insertions(+), 11 deletions(-) diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiTokenizerFactory.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiTokenizerFactory.java index 038af3c2357f9..d662003530c22 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiTokenizerFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiTokenizerFactory.java @@ -12,7 +12,7 @@ import org.apache.lucene.analysis.ja.JapaneseTokenizer; import org.apache.lucene.analysis.ja.JapaneseTokenizer.Mode; import org.apache.lucene.analysis.ja.dict.UserDictionary; -import org.apache.lucene.analysis.ja.util.CSVUtil; +import org.apache.lucene.analysis.util.CSVUtil; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java index f232591a05a68..6587bf27f604a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.backward_codecs.lucene50.Lucene50PostingsFormat; import org.apache.lucene.backward_codecs.lucene84.Lucene84PostingsFormat; +import org.apache.lucene.backward_codecs.lucene90.Lucene90PostingsFormat; import org.apache.lucene.codecs.DocValuesProducer; import org.apache.lucene.codecs.FieldsProducer; import org.apache.lucene.codecs.KnnVectorsReader; @@ -18,7 +19,7 @@ import org.apache.lucene.codecs.PointsReader; import org.apache.lucene.codecs.StoredFieldsReader; import org.apache.lucene.codecs.TermVectorsReader; -import org.apache.lucene.codecs.lucene90.Lucene90PostingsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99PostingsFormat; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.DirectoryReader; @@ -301,6 +302,9 @@ private static void readProximity(Terms terms, PostingsEnum postings) throws IOE private static BlockTermState getBlockTermState(TermsEnum termsEnum, BytesRef term) throws IOException { if (term != null && termsEnum.seekExact(term)) { final TermState termState = termsEnum.termState(); + if (termState instanceof final Lucene99PostingsFormat.IntBlockTermState blockTermState) { + return new BlockTermState(blockTermState.docStartFP, blockTermState.posStartFP, blockTermState.payStartFP); + } if (termState instanceof final Lucene90PostingsFormat.IntBlockTermState blockTermState) { return new BlockTermState(blockTermState.docStartFP, blockTermState.posStartFP, blockTermState.payStartFP); } @@ -310,6 +314,7 @@ private static BlockTermState getBlockTermState(TermsEnum termsEnum, BytesRef te if (termState instanceof final Lucene50PostingsFormat.IntBlockTermState blockTermState) { return new BlockTermState(blockTermState.docStartFP, blockTermState.posStartFP, blockTermState.payStartFP); } + assert false : "unsupported postings format: " + termState; } return null; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java index 2859d8bb29917..94b937c534491 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java @@ -370,7 +370,7 @@ public CompletionFieldType fieldType() { } static PostingsFormat postingsFormat() { - return PostingsFormat.forName("Completion90"); + return PostingsFormat.forName("Completion99"); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java index 57dbb1e73f7c5..dbbba6d325cd4 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java @@ -12,9 +12,9 @@ import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene90.Lucene90PostingsFormat; import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99PostingsFormat; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat; @@ -54,7 +54,7 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; -import org.apache.lucene.search.suggest.document.Completion90PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion99PostingsFormat; import org.apache.lucene.search.suggest.document.CompletionPostingsFormat; import org.apache.lucene.search.suggest.document.SuggestField; import org.apache.lucene.store.Directory; @@ -330,7 +330,7 @@ public void testCompletionField() throws Exception { @Override public PostingsFormat getPostingsFormatForField(String field) { if (field.startsWith("suggest_")) { - return new Completion90PostingsFormat(randomFrom(CompletionPostingsFormat.FSTLoadMode.values())); + return new Completion99PostingsFormat(randomFrom(CompletionPostingsFormat.FSTLoadMode.values())); } else { return super.postingsFormat(); } @@ -642,7 +642,7 @@ static void rewriteIndexWithPerFieldCodec(Directory source, CodecMode mode, Dire .setCodec(new Lucene99Codec(mode.mode()) { @Override public PostingsFormat getPostingsFormatForField(String field) { - return new Lucene90PostingsFormat(); + return new Lucene99PostingsFormat(); } @Override diff --git a/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java b/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java index 96c38efed5b53..7c2c40e078cb4 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java @@ -13,7 +13,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.search.suggest.document.Completion90PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion99PostingsFormat; import org.apache.lucene.search.suggest.document.SuggestField; import org.apache.lucene.store.Directory; import org.elasticsearch.ElasticsearchException; @@ -43,7 +43,7 @@ public void testExceptionsAreNotCached() { public void testCompletionStatsCache() throws IOException, InterruptedException { final IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); - final PostingsFormat postingsFormat = new Completion90PostingsFormat(); + final PostingsFormat postingsFormat = new Completion99PostingsFormat(); indexWriterConfig.setCodec(new Lucene99Codec() { @Override public PostingsFormat getPostingsFormatForField(String field) { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java index 99302e377b61f..1f473d0ade35b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java @@ -15,7 +15,7 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; -import org.apache.lucene.search.suggest.document.Completion90PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion99PostingsFormat; import org.apache.lucene.search.suggest.document.CompletionAnalyzer; import org.apache.lucene.search.suggest.document.ContextSuggestField; import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery; @@ -149,7 +149,7 @@ public void testPostingsFormat() throws IOException { Codec codec = codecService.codec("default"); assertThat(codec, instanceOf(PerFieldMapperCodec.class)); PerFieldMapperCodec perFieldCodec = (PerFieldMapperCodec) codec; - assertThat(perFieldCodec.getPostingsFormatForField("field"), instanceOf(Completion90PostingsFormat.class)); + assertThat(perFieldCodec.getPostingsFormatForField("field"), instanceOf(Completion99PostingsFormat.class)); } public void testDefaultConfiguration() throws IOException { From 3f9ab8a3cbcd3c38c24914c78e5cb37747742f8a Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Thu, 9 Nov 2023 14:32:21 -0500 Subject: [PATCH 057/181] Adjust SortField comparators to use new Pruning API (#101983) Introduced in https://github.com/apache/lucene/pull/12405 We should account for the changes in our overrides and API. Now, to indicate that no skipping can occur, we utilize `Pruning.NONE`. --- .../action/search/BottomSortValuesCollector.java | 3 ++- .../fieldcomparator/BytesRefFieldComparatorSource.java | 5 +++-- .../fieldcomparator/DoubleValuesComparatorSource.java | 5 +++-- .../fieldcomparator/FloatValuesComparatorSource.java | 5 +++-- .../fieldcomparator/LongValuesComparatorSource.java | 5 +++-- .../lucene/grouping/SinglePassGroupingCollector.java | 3 ++- .../org/elasticsearch/lucene/grouping/TopFieldGroups.java | 3 ++- .../lucene/queries/SearchAfterSortedDocQuery.java | 3 ++- .../aggregations/bucket/composite/CompositeAggregator.java | 5 +++-- .../elasticsearch/search/sort/GeoDistanceSortBuilder.java | 5 +++-- .../org/elasticsearch/search/sort/ShardDocSortField.java | 5 +++-- .../action/search/BottomSortValuesCollectorTests.java | 3 ++- .../search/aggregations/metrics/InternalTopHitsTests.java | 3 ++- .../java/org/elasticsearch/search/query/QueryPhaseTests.java | 3 ++- .../search/searchafter/SearchAfterBuilderTests.java | 3 ++- 15 files changed, 37 insertions(+), 22 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java b/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java index 34566ec48ccad..4461b71be9047 100644 --- a/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java +++ b/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java @@ -10,6 +10,7 @@ import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldDoc; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopFieldDocs; import org.elasticsearch.search.DocValueFormat; @@ -35,7 +36,7 @@ class BottomSortValuesCollector { this.reverseMuls = new int[sortFields.length]; this.sortFields = sortFields; for (int i = 0; i < sortFields.length; i++) { - comparators[i] = sortFields[i].getComparator(1, false); + comparators[i] = sortFields[i].getComparator(1, Pruning.NONE); reverseMuls[i] = sortFields[i].getReverse() ? -1 : 1; } } diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java index addc6f33c9eba..2f80826c6cda0 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java @@ -14,6 +14,7 @@ import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.SortField; import org.apache.lucene.search.comparators.TermOrdValComparator; @@ -68,13 +69,13 @@ protected SortedBinaryDocValues getValues(LeafReaderContext context) throws IOEx protected void setScorer(Scorable scorer) {} @Override - public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + public FieldComparator newComparator(String fieldname, int numHits, Pruning enableSkipping, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final boolean sortMissingLast = sortMissingLast(missingValue) ^ reversed; final BytesRef missingBytes = (BytesRef) missingObject(missingValue, reversed); if (indexFieldData instanceof IndexOrdinalsFieldData) { - return new TermOrdValComparator(numHits, null, sortMissingLast, reversed, false) { + return new TermOrdValComparator(numHits, null, sortMissingLast, reversed, Pruning.NONE) { @Override protected SortedDocValues getSortedDocValues(LeafReaderContext context, String field) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java index 76463807942a2..f717ff440570d 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java @@ -13,6 +13,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.SortField; import org.apache.lucene.search.comparators.DoubleComparator; @@ -72,13 +73,13 @@ private NumericDoubleValues getNumericDocValues(LeafReaderContext context, doubl protected void setScorer(Scorable scorer) {} @Override - public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + public FieldComparator newComparator(String fieldname, int numHits, Pruning enableSkipping, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final double dMissingValue = (Double) missingObject(missingValue, reversed); // NOTE: it's important to pass null as a missing value in the constructor so that // the comparator doesn't check docsWithField since we replace missing values in select() - return new DoubleComparator(numHits, null, null, reversed, false) { + return new DoubleComparator(numHits, null, null, reversed, Pruning.NONE) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new DoubleLeafComparator(context) { diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java index 4b8351f430e05..e071be6c2a9a0 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java @@ -12,6 +12,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.SortField; import org.apache.lucene.search.comparators.FloatComparator; @@ -65,13 +66,13 @@ private NumericDoubleValues getNumericDocValues(LeafReaderContext context, float } @Override - public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + public FieldComparator newComparator(String fieldname, int numHits, Pruning enableSkipping, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final float fMissingValue = (Float) missingObject(missingValue, reversed); // NOTE: it's important to pass null as a missing value in the constructor so that // the comparator doesn't check docsWithField since we replace missing values in select() - return new FloatComparator(numHits, null, null, reversed, false) { + return new FloatComparator(numHits, null, null, reversed, Pruning.NONE) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new FloatLeafComparator(context) { diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java index 827e1618adde2..989b09700890b 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java @@ -13,6 +13,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.SortField; import org.apache.lucene.search.comparators.LongComparator; import org.apache.lucene.util.BitSet; @@ -94,13 +95,13 @@ private NumericDocValues getNumericDocValues(LeafReaderContext context, long mis } @Override - public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + public FieldComparator newComparator(String fieldname, int numHits, Pruning enableSkipping, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final long lMissingValue = (Long) missingObject(missingValue, reversed); // NOTE: it's important to pass null as a missing value in the constructor so that // the comparator doesn't check docsWithField since we replace missing values in select() - return new LongComparator(numHits, null, null, reversed, false) { + return new LongComparator(numHits, null, null, reversed, Pruning.NONE) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new LongLeafComparator(context) { diff --git a/server/src/main/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollector.java b/server/src/main/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollector.java index eaa49fceb4e63..b11a034ce4e4c 100644 --- a/server/src/main/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollector.java +++ b/server/src/main/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollector.java @@ -27,6 +27,7 @@ import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreMode; @@ -169,7 +170,7 @@ private SinglePassGroupingCollector( for (int i = 0; i < sortFields.length; i++) { final SortField sortField = sortFields[i]; // use topNGroups + 1 so we have a spare slot to use for comparing (tracked by this.spareSlot): - comparators[i] = sortField.getComparator(topNGroups + 1, false); + comparators[i] = sortField.getComparator(topNGroups + 1, Pruning.NONE); reversed[i] = sortField.getReverse() ? -1 : 1; } if (after != null) { diff --git a/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java b/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java index 39c807119c481..8e5efa8a880b7 100644 --- a/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java +++ b/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java @@ -9,6 +9,7 @@ import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldDoc; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; @@ -121,7 +122,7 @@ private static class MergeSortQueue extends PriorityQueue { reverseMul = new int[sortFields.length]; for (int compIDX = 0; compIDX < sortFields.length; compIDX++) { final SortField sortField = sortFields[compIDX]; - comparators[compIDX] = sortField.getComparator(1, false); + comparators[compIDX] = sortField.getComparator(1, Pruning.NONE); reverseMul[compIDX] = sortField.getReverse() ? -1 : 1; } } diff --git a/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java b/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java index 1bf6a1cd4f76c..c5802f092c033 100644 --- a/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java @@ -16,6 +16,7 @@ import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; @@ -52,7 +53,7 @@ public SearchAfterSortedDocQuery(Sort sort, FieldDoc after) { this.reverseMuls = new int[numFields]; for (int i = 0; i < numFields; i++) { SortField sortField = sort.getSort()[i]; - FieldComparator fieldComparator = sortField.getComparator(1, false); + FieldComparator fieldComparator = sortField.getComparator(1, Pruning.NONE); @SuppressWarnings("unchecked") FieldComparator comparator = (FieldComparator) fieldComparator; comparator.setTopValue(after.fields[i]); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java index dff95332d3f16..1e8f2dbac33b3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -21,6 +21,7 @@ import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Sort; @@ -359,8 +360,8 @@ public int hashCode() { } @Override - public FieldComparator getComparator(int numHits, boolean enableSkipping) { - return new LongComparator(1, delegate.getField(), (Long) missingValue, delegate.getReverse(), false) { + public FieldComparator getComparator(int numHits, Pruning enableSkipping) { + return new LongComparator(1, delegate.getField(), (Long) missingValue, delegate.getReverse(), Pruning.NONE) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new LongLeafComparator(context) { diff --git a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java index 2dceca2e9ad65..d53d3d2d637c9 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java @@ -14,6 +14,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.SortField; import org.apache.lucene.search.comparators.DoubleComparator; import org.apache.lucene.util.BitSet; @@ -663,8 +664,8 @@ private NumericDoubleValues getNumericDoubleValues(LeafReaderContext context) th } @Override - public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { - return new DoubleComparator(numHits, null, null, reversed, false) { + public FieldComparator newComparator(String fieldname, int numHits, Pruning enableSkipping, boolean reversed) { + return new DoubleComparator(numHits, null, null, reversed, Pruning.NONE) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new DoubleLeafComparator(context) { diff --git a/server/src/main/java/org/elasticsearch/search/sort/ShardDocSortField.java b/server/src/main/java/org/elasticsearch/search/sort/ShardDocSortField.java index 58fd3029c0105..9cb554f560d84 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/ShardDocSortField.java +++ b/server/src/main/java/org/elasticsearch/search/sort/ShardDocSortField.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.SortField; import org.apache.lucene.search.comparators.DocComparator; @@ -34,8 +35,8 @@ int getShardRequestIndex() { } @Override - public FieldComparator getComparator(int numHits, boolean enableSkipping) { - final DocComparator delegate = new DocComparator(numHits, getReverse(), false); + public FieldComparator getComparator(int numHits, Pruning enableSkipping) { + final DocComparator delegate = new DocComparator(numHits, getReverse(), Pruning.NONE); return new FieldComparator() { @Override diff --git a/server/src/test/java/org/elasticsearch/action/search/BottomSortValuesCollectorTests.java b/server/src/test/java/org/elasticsearch/action/search/BottomSortValuesCollectorTests.java index 31f3fe7066bed..4305d0af9a7c1 100644 --- a/server/src/test/java/org/elasticsearch/action/search/BottomSortValuesCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/BottomSortValuesCollectorTests.java @@ -10,6 +10,7 @@ import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldDoc; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TotalHits; @@ -234,7 +235,7 @@ private Object[] newDateNanoArray(String... values) { private TopFieldDocs createTopDocs(SortField sortField, int totalHits, Object[] values) { FieldDoc[] fieldDocs = new FieldDoc[values.length]; @SuppressWarnings("unchecked") - FieldComparator cmp = (FieldComparator) sortField.getComparator(1, false); + FieldComparator cmp = (FieldComparator) sortField.getComparator(1, Pruning.NONE); for (int i = 0; i < values.length; i++) { fieldDocs[i] = new FieldDoc(i, Float.NaN, new Object[] { values[i] }); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java index 35fe9c400888c..7d3799b2db35d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.IndexWriter; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldDoc; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopDocs; @@ -367,7 +368,7 @@ private Comparator sortFieldsComparator(SortField[] sortFields) { FieldComparator[] comparators = new FieldComparator[sortFields.length]; for (int i = 0; i < sortFields.length; i++) { // Values passed to getComparator shouldn't matter - comparators[i] = sortFields[i].getComparator(0, false); + comparators[i] = sortFields[i].getComparator(0, Pruning.NONE); } return (lhs, rhs) -> { FieldDoc l = (FieldDoc) lhs; diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java index 9569bd982363e..59360b2d2013a 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java @@ -44,6 +44,7 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.ScoreDoc; @@ -720,7 +721,7 @@ public void testIndexSortScrollOptimization() throws Exception { @SuppressWarnings("unchecked") FieldComparator comparator = (FieldComparator) searchSortAndFormat.sort.getSort()[i].getComparator( 1, - i == 0 + i == 0 ? Pruning.GREATER_THAN : Pruning.NONE ); int cmp = comparator.compareValues(firstDoc.fields[i], lastDoc.fields[i]); if (cmp == 0) { diff --git a/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java b/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java index 74c4b991ff401..ff963835f55f6 100644 --- a/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.document.LatLonDocValuesField; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldDoc; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedNumericSortField; @@ -216,7 +217,7 @@ public SortField.Type reducedType() { } @Override - public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + public FieldComparator newComparator(String fieldname, int numHits, Pruning enableSkipping, boolean reversed) { return null; } From 2247cb80a905922a89c52568f6b110dab5a928fa Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 10 Nov 2023 07:08:28 +0000 Subject: [PATCH 058/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-9a0245333ff --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index d04783eeaa845..728740897f72c 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-d9109907bca +lucene = 9.9.0-snapshot-9a0245333ff bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2bbeb9bf5738b..b21cc16639aa5 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From df125f2423440cb18ee6642cf854a748b4ee14c3 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 12 Nov 2023 07:08:55 +0000 Subject: [PATCH 059/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-448e6112954 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 728740897f72c..f59cb6bf876fe 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-9a0245333ff +lucene = 9.9.0-snapshot-448e6112954 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index b21cc16639aa5..f6d84dc8f8298 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From e3fc167bb8994c26d82802ce70830d267a02499a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 13 Nov 2023 07:09:17 +0000 Subject: [PATCH 060/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-448e6112954 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index f6d84dc8f8298..127597284c632 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2641,122 +2641,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 6c4479db24e295c8d8987ee67ffa23a89218b023 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 13 Nov 2023 15:56:16 +0100 Subject: [PATCH 061/181] Resolve compile error in DenseVectorFieldMapper (#102066) * Resolve compile error in DenseVectorFieldMapper A change in Lucene99HnswVectorsFormat requires that we adapt our code, see https://github.com/apache/lucene/pull/12729 * Add new Lucene file extensions * Fixing format name check --------- Co-authored-by: Benjamin Trent <4357155+benwtrent@users.noreply.github.com> --- .../index/mapper/vectors/DenseVectorFieldMapper.java | 7 +------ .../elasticsearch/index/store/LuceneFilesExtensions.java | 5 ++++- .../index/mapper/vectors/DenseVectorFieldMapperTests.java | 2 +- 3 files changed, 6 insertions(+), 8 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 5e89a25fe2eb2..fc3570c443aca 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -12,7 +12,6 @@ import org.apache.lucene.codecs.KnnVectorsReader; import org.apache.lucene.codecs.KnnVectorsWriter; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; -import org.apache.lucene.codecs.lucene99.Lucene99ScalarQuantizedVectorsFormat; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; @@ -1087,11 +1086,7 @@ public KnnVectorsFormat getKnnVectorsFormatForField(KnnVectorsFormat defaultForm format = defaultFormat; } else { HnswIndexOptions hnswIndexOptions = (HnswIndexOptions) indexOptions; - format = new Lucene99HnswVectorsFormat( - hnswIndexOptions.m, - hnswIndexOptions.efConstruction, - new Lucene99ScalarQuantizedVectorsFormat() - ); + format = new Lucene99HnswVectorsFormat(hnswIndexOptions.m, hnswIndexOptions.efConstruction); } // It's legal to reuse the same format name as this is the same on-disk format. return new KnnVectorsFormat(format.getName()) { diff --git a/server/src/main/java/org/elasticsearch/index/store/LuceneFilesExtensions.java b/server/src/main/java/org/elasticsearch/index/store/LuceneFilesExtensions.java index 7504f8983b87e..463ff90b47870 100644 --- a/server/src/main/java/org/elasticsearch/index/store/LuceneFilesExtensions.java +++ b/server/src/main/java/org/elasticsearch/index/store/LuceneFilesExtensions.java @@ -76,7 +76,10 @@ public enum LuceneFilesExtensions { // kNN vectors format VEC("vec", "Vector Data", false, true), VEX("vex", "Vector Index", false, true), - VEM("vem", "Vector Metadata", true, false); + VEM("vem", "Vector Metadata", true, false), + VEMF("vemf", "Flat Vector Metadata", true, false), + VEMQ("vemq", "Scalar Quantized Vector Metadata", true, false), + VEQ("veq", "Scalar Quantized Vector Data", false, true); /** * Allow plugin developers of custom codecs to opt out of the assertion in {@link #fromExtension} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index d61960cfc0f51..6c71a43e714fe 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -977,7 +977,7 @@ public void testKnnVectorsFormat() throws IOException { + m + ", beamWidth=" + efConstruction - + ", quantizer=Lucene99ScalarQuantizedVectorsFormat(name=Lucene99ScalarQuantizedVectorsFormat, quantile=null)" + + ", flatVectorFormat=Lucene99FlatVectorsFormat()" + ")"; assertEquals(expectedString, knnVectorsFormat.toString()); } From dbd5cfc1beb6346469890ff10778a07472a51309 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 14 Nov 2023 07:08:56 +0000 Subject: [PATCH 062/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-0eda40a371b --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index f59cb6bf876fe..2993f7dd9bf4b 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-448e6112954 +lucene = 9.9.0-snapshot-0eda40a371b bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 127597284c632..9a0a9bbfe92f9 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 440450ad7692a776c095bd284748fd9361e931f2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 15 Nov 2023 07:08:36 +0000 Subject: [PATCH 063/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-910c721e065 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 2993f7dd9bf4b..4286d1a41b850 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-0eda40a371b +lucene = 9.9.0-snapshot-910c721e065 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 9a0a9bbfe92f9..6edb5c6f03d0e 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 30ab3b5f7daccbedf6793e773e545dd6fcb4c2bf Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 15 Nov 2023 09:26:14 -0500 Subject: [PATCH 064/181] Fix failing tests due to new query to string format (#102186) There have been some changes around range query toString format. This commit adjusts tests expecting particular outputs from the previous Lucene version --- .../extras/ScaledFloatFieldTypeTests.java | 28 ++++++++++--------- .../validate/SimpleValidateQueryIT.java | 5 +++- .../index/mapper/RangeFieldTypeTests.java | 4 +-- .../geo/GeoDistanceQueryBuilderTestCase.java | 4 +-- 4 files changed, 23 insertions(+), 18 deletions(-) diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldTypeTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldTypeTests.java index 603b19623a0e7..222f0f05d548d 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldTypeTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldTypeTests.java @@ -35,6 +35,8 @@ import java.util.Collections; import java.util.List; +import static org.hamcrest.Matchers.containsString; + public class ScaledFloatFieldTypeTests extends FieldTypeTestCase { public void testTermQuery() { @@ -136,35 +138,35 @@ public void testRangeQuery() throws IOException { public void testRoundsUpperBoundCorrectly() { ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType("scaled_float", 100); Query scaledFloatQ = ft.rangeQuery(null, 0.1, true, false, MOCK_CONTEXT); - assertEquals("scaled_float:[-9223372036854775808 TO 9]", scaledFloatQ.toString()); + assertThat(scaledFloatQ.toString(), containsString("scaled_float:[-9223372036854775808 TO 9]")); scaledFloatQ = ft.rangeQuery(null, 0.1, true, true, MOCK_CONTEXT); - assertEquals("scaled_float:[-9223372036854775808 TO 10]", scaledFloatQ.toString()); + assertThat(scaledFloatQ.toString(), containsString("scaled_float:[-9223372036854775808 TO 10]")); scaledFloatQ = ft.rangeQuery(null, 0.095, true, false, MOCK_CONTEXT); - assertEquals("scaled_float:[-9223372036854775808 TO 9]", scaledFloatQ.toString()); + assertThat(scaledFloatQ.toString(), containsString("scaled_float:[-9223372036854775808 TO 9]")); scaledFloatQ = ft.rangeQuery(null, 0.095, true, true, MOCK_CONTEXT); - assertEquals("scaled_float:[-9223372036854775808 TO 9]", scaledFloatQ.toString()); + assertThat(scaledFloatQ.toString(), containsString("scaled_float:[-9223372036854775808 TO 9]")); scaledFloatQ = ft.rangeQuery(null, 0.105, true, false, MOCK_CONTEXT); - assertEquals("scaled_float:[-9223372036854775808 TO 10]", scaledFloatQ.toString()); + assertThat(scaledFloatQ.toString(), containsString("scaled_float:[-9223372036854775808 TO 10]")); scaledFloatQ = ft.rangeQuery(null, 0.105, true, true, MOCK_CONTEXT); - assertEquals("scaled_float:[-9223372036854775808 TO 10]", scaledFloatQ.toString()); + assertThat(scaledFloatQ.toString(), containsString("scaled_float:[-9223372036854775808 TO 10]")); scaledFloatQ = ft.rangeQuery(null, 79.99, true, true, MOCK_CONTEXT); - assertEquals("scaled_float:[-9223372036854775808 TO 7999]", scaledFloatQ.toString()); + assertThat(scaledFloatQ.toString(), containsString("scaled_float:[-9223372036854775808 TO 7999]")); } public void testRoundsLowerBoundCorrectly() { ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType("scaled_float", 100); Query scaledFloatQ = ft.rangeQuery(-0.1, null, false, true, MOCK_CONTEXT); - assertEquals("scaled_float:[-9 TO 9223372036854775807]", scaledFloatQ.toString()); + assertThat(scaledFloatQ.toString(), containsString("scaled_float:[-9 TO 9223372036854775807]")); scaledFloatQ = ft.rangeQuery(-0.1, null, true, true, MOCK_CONTEXT); - assertEquals("scaled_float:[-10 TO 9223372036854775807]", scaledFloatQ.toString()); + assertThat(scaledFloatQ.toString(), containsString("scaled_float:[-10 TO 9223372036854775807]")); scaledFloatQ = ft.rangeQuery(-0.095, null, false, true, MOCK_CONTEXT); - assertEquals("scaled_float:[-9 TO 9223372036854775807]", scaledFloatQ.toString()); + assertThat(scaledFloatQ.toString(), containsString("scaled_float:[-9 TO 9223372036854775807]")); scaledFloatQ = ft.rangeQuery(-0.095, null, true, true, MOCK_CONTEXT); - assertEquals("scaled_float:[-9 TO 9223372036854775807]", scaledFloatQ.toString()); + assertThat(scaledFloatQ.toString(), containsString("scaled_float:[-9 TO 9223372036854775807]")); scaledFloatQ = ft.rangeQuery(-0.105, null, false, true, MOCK_CONTEXT); - assertEquals("scaled_float:[-10 TO 9223372036854775807]", scaledFloatQ.toString()); + assertThat(scaledFloatQ.toString(), containsString("scaled_float:[-10 TO 9223372036854775807]")); scaledFloatQ = ft.rangeQuery(-0.105, null, true, true, MOCK_CONTEXT); - assertEquals("scaled_float:[-10 TO 9223372036854775807]", scaledFloatQ.toString()); + assertThat(scaledFloatQ.toString(), containsString("scaled_float:[-10 TO 9223372036854775807]")); } public void testValueForSearch() { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/validate/SimpleValidateQueryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/validate/SimpleValidateQueryIT.java index afb86bd175973..27fa53481edb7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/validate/SimpleValidateQueryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/validate/SimpleValidateQueryIT.java @@ -219,7 +219,10 @@ public void testExplainDateRangeInQueryString() { long twoMonthsAgo = now.minus(2, ChronoUnit.MONTHS).truncatedTo(ChronoUnit.DAYS).toEpochSecond() * 1000; long rangeEnd = (now.plus(1, ChronoUnit.DAYS).truncatedTo(ChronoUnit.DAYS).toEpochSecond() * 1000) - 1; - assertThat(response.getQueryExplanation().get(0).getExplanation(), equalTo("past:[" + twoMonthsAgo + " TO " + rangeEnd + "]")); + assertThat( + response.getQueryExplanation().get(0).getExplanation(), + containsString("past:[" + twoMonthsAgo + " TO " + rangeEnd + "]") + ); assertThat(response.isValid(), equalTo(true)); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java index 5fe3711b1d034..1602e76c1a5fd 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java @@ -233,12 +233,12 @@ public void testDateRangeQueryUsingMappingFormat() { RangeFieldType fieldType = new RangeFieldType("field", formatter); final Query query = fieldType.rangeQuery(from, to, true, true, relation, null, fieldType.dateMathParser(), context); - assertEquals("field:", query.toString()); + assertThat(query.toString(), containsString("field:")); // compare lower and upper bounds with what we would get on a `date` field DateFieldType dateFieldType = new DateFieldType("field", DateFieldMapper.Resolution.MILLISECONDS, formatter); final Query queryOnDateField = dateFieldType.rangeQuery(from, to, true, true, relation, null, fieldType.dateMathParser(), context); - assertEquals("field:[1465975790000 TO 1466062190999]", queryOnDateField.toString()); + assertThat(queryOnDateField.toString(), containsString("field:[1465975790000 TO 1466062190999]")); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoDistanceQueryBuilderTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoDistanceQueryBuilderTestCase.java index c9520bcfd051e..3866a57761fef 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoDistanceQueryBuilderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoDistanceQueryBuilderTestCase.java @@ -325,9 +325,9 @@ private void assertGeoDistanceRangeQuery(String query, double lat, double lon, d // so we cannot access its fields directly to check and have to use toString() here instead. double qLat = GeoEncodingUtils.decodeLatitude(GeoEncodingUtils.encodeLatitude(lat)); double qLon = GeoEncodingUtils.decodeLongitude(GeoEncodingUtils.encodeLongitude(lon)); - assertEquals( + assertThat( parsedQuery.toString(), - "mapped_geo_point:" + qLat + "," + qLon + " +/- " + distanceUnit.toMeters(distance) + " meters" + containsString("mapped_geo_point:" + qLat + "," + qLon + " +/- " + distanceUnit.toMeters(distance) + " meters") ); } From fc87985f9b112ad8a45f88f25bf1cfe2a4d5e32a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 16 Nov 2023 07:09:14 +0000 Subject: [PATCH 065/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-b13e4a121ab --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 4286d1a41b850..3da18cd611e78 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-910c721e065 +lucene = 9.9.0-snapshot-b13e4a121ab bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 6edb5c6f03d0e..7f2c0d2ca3c90 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 47f19f56f61d3d69eba74447036c12554f936512 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 17 Nov 2023 07:09:10 +0000 Subject: [PATCH 066/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-b13e4a121ab --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index fc8e3e4a115f9..5fb16c67e0caa 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2641,122 +2641,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 6e361631001b10fd5c2fafea19927b4edd2c4a8c Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 18 Nov 2023 07:08:58 +0000 Subject: [PATCH 067/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-2e8dfac07e2 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 3da18cd611e78..a988ea573b4af 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-b13e4a121ab +lucene = 9.9.0-snapshot-2e8dfac07e2 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 5fb16c67e0caa..1f59b87ff24f6 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 61891b42cb2cecc3a088b33cac9da4c7502db6d7 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 19 Nov 2023 07:08:36 +0000 Subject: [PATCH 068/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-85e4deab437 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index a988ea573b4af..9359d4e68708b 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-2e8dfac07e2 +lucene = 9.9.0-snapshot-85e4deab437 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 1f59b87ff24f6..f8a4cc3986946 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 0b27968705fc47a12261385dfa13946d01edfe6a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 20 Nov 2023 07:09:25 +0000 Subject: [PATCH 069/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-85e4deab437 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index f8a4cc3986946..68581a6f04a0b 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2641,122 +2641,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 94ba92259222b096fe61fbd4dabcc0e033ea34eb Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 21 Nov 2023 07:08:53 +0000 Subject: [PATCH 070/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-6cd78318eab --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 9359d4e68708b..7450cad8fb9c2 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-85e4deab437 +lucene = 9.9.0-snapshot-6cd78318eab bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 68581a6f04a0b..168e29ff3ef35 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From d4f01fc7b32e87f87426357be8a3f674fa48b6c2 Mon Sep 17 00:00:00 2001 From: Saikat Sarkar <132922331+saikatsarkar056@users.noreply.github.com> Date: Tue, 21 Nov 2023 12:16:21 -0700 Subject: [PATCH 071/181] Gather vector_operation count for knn search (#102032) --- docs/changelog/102032.yaml | 5 ++ docs/reference/search/profile.asciidoc | 3 +- .../rest-api-spec/test/search/370_profile.yml | 66 +++++++++++++++++++ .../org/elasticsearch/TransportVersions.java | 1 + .../vectors/DenseVectorFieldMapper.java | 14 ++-- .../elasticsearch/search/dfs/DfsPhase.java | 6 ++ .../search/profile/Profilers.java | 3 +- .../profile/SearchProfileDfsPhaseResult.java | 3 +- .../search/profile/dfs/DfsProfiler.java | 7 +- .../query/QueryProfileShardResult.java | 28 +++++++- .../search/profile/query/QueryProfiler.java | 10 +++ ...iversifyingChildrenByteKnnVectorQuery.java | 41 ++++++++++++ ...versifyingChildrenFloatKnnVectorQuery.java | 41 ++++++++++++ .../vectors/ProfilingKnnByteVectorQuery.java | 34 ++++++++++ .../vectors/ProfilingKnnFloatVectorQuery.java | 34 ++++++++++ .../search/vectors/ProfilingQuery.java | 27 ++++++++ .../query/QueryProfileShardResultTests.java | 4 +- ...AbstractKnnVectorQueryBuilderTestCase.java | 14 ++-- 18 files changed, 320 insertions(+), 21 deletions(-) create mode 100644 docs/changelog/102032.yaml create mode 100644 server/src/main/java/org/elasticsearch/search/vectors/ProfilingDiversifyingChildrenByteKnnVectorQuery.java create mode 100644 server/src/main/java/org/elasticsearch/search/vectors/ProfilingDiversifyingChildrenFloatKnnVectorQuery.java create mode 100644 server/src/main/java/org/elasticsearch/search/vectors/ProfilingKnnByteVectorQuery.java create mode 100644 server/src/main/java/org/elasticsearch/search/vectors/ProfilingKnnFloatVectorQuery.java create mode 100644 server/src/main/java/org/elasticsearch/search/vectors/ProfilingQuery.java diff --git a/docs/changelog/102032.yaml b/docs/changelog/102032.yaml new file mode 100644 index 0000000000000..40463b9f252b9 --- /dev/null +++ b/docs/changelog/102032.yaml @@ -0,0 +1,5 @@ +pr: 102032 +summary: Add vector_operation_count in profile output for knn searches +area: Vector Search +type: enhancement +issues: [] diff --git a/docs/reference/search/profile.asciidoc b/docs/reference/search/profile.asciidoc index 52dfb91475c53..5b63929934770 100644 --- a/docs/reference/search/profile.asciidoc +++ b/docs/reference/search/profile.asciidoc @@ -1272,6 +1272,7 @@ One of the `dfs.knn` sections for a shard looks like the following: "dfs" : { "knn" : [ { + "vector_operations_count" : 4, "query" : [ { "type" : "DocAndScoreQuery", @@ -1321,7 +1322,7 @@ In the `dfs.knn` portion of the response we can see the output the of timings for <>, <>, and <>. Unlike many other queries, kNN search does the bulk of the work during the query rewrite. This means -`rewrite_time` represents the time spent on kNN search. +`rewrite_time` represents the time spent on kNN search. The attribute `vector_operations_count` represents the overall count of vector operations performed during the kNN search. [[profiling-considerations]] ===== Profiling Considerations diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml index 38212ba59a51e..0ead7b87f8acf 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml @@ -229,6 +229,72 @@ dfs knn vector profiling: - match: { profile.shards.0.dfs.knn.0.collector.0.reason: "search_top_hits" } - gt: { profile.shards.0.dfs.knn.0.collector.0.time_in_nanos: 0 } +--- +dfs knn vector profiling with vector_operations_count: + - skip: + version: ' - 8.11.99' + reason: vector_operations_count in dfs profiling added in 8.12.0 + + - do: + indices.create: + index: images + body: + settings: + index.number_of_shards: 1 + mappings: + properties: + image: + type: "dense_vector" + dims: 3 + index: true + similarity: "l2_norm" + + - do: + index: + index: images + id: "1" + refresh: true + body: + image: [1, 5, -20] + + - do: + search: + index: images + body: + profile: true + knn: + field: "image" + query_vector: [-5, 9, -12] + k: 1 + num_candidates: 100 + + - match: { hits.total.value: 1 } + - match: { profile.shards.0.dfs.knn.0.query.0.type: "DocAndScoreQuery" } + - match: { profile.shards.0.dfs.knn.0.query.0.description: "DocAndScore[100]" } + - match: { profile.shards.0.dfs.knn.0.vector_operations_count: 1 } + - gt: { profile.shards.0.dfs.knn.0.query.0.time_in_nanos: 0 } + - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score_count: 0 } + - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score: 0 } + - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.match_count: 0 } + - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.match: 0 } + - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.shallow_advance_count: 0 } + - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.shallow_advance: 0 } + - gt: { profile.shards.0.dfs.knn.0.query.0.breakdown.next_doc_count: 0 } + - gt: { profile.shards.0.dfs.knn.0.query.0.breakdown.next_doc: 0 } + - gt: { profile.shards.0.dfs.knn.0.query.0.breakdown.score_count: 0 } + - gt: { profile.shards.0.dfs.knn.0.query.0.breakdown.score: 0 } + - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.compute_max_score_count: 0 } + - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.compute_max_score: 0 } + - gt: { profile.shards.0.dfs.knn.0.query.0.breakdown.build_scorer_count: 0 } + - gt: { profile.shards.0.dfs.knn.0.query.0.breakdown.build_scorer: 0 } + - gt: { profile.shards.0.dfs.knn.0.query.0.breakdown.create_weight: 0 } + - gt: { profile.shards.0.dfs.knn.0.query.0.breakdown.create_weight_count: 0 } + - gt: { profile.shards.0.dfs.knn.0.rewrite_time: 0 } + - match: { profile.shards.0.dfs.knn.0.collector.0.name: "SimpleTopScoreDocCollector" } + - match: { profile.shards.0.dfs.knn.0.collector.0.reason: "search_top_hits" } + - gt: { profile.shards.0.dfs.knn.0.collector.0.time_in_nanos: 0 } + + --- dfs profile for search with dfs_query_then_fetch: - skip: diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 5ad1d43c0d4f8..0e340f2336415 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -173,6 +173,7 @@ static TransportVersion def(int id) { public static final TransportVersion ML_INFERENCE_OPENAI_ADDED = def(8_542_00_0); public static final TransportVersion SHUTDOWN_MIGRATION_STATUS_INCLUDE_COUNTS = def(8_543_00_0); public static final TransportVersion TRANSFORM_GET_CHECKPOINT_QUERY_AND_CLUSTER_ADDED = def(8_544_00_0); + public static final TransportVersion VECTOR_OPS_COUNT_ADDED = def(8_545_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index fc3570c443aca..bd598b29e3717 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -27,11 +27,9 @@ import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.search.FieldExistsQuery; import org.apache.lucene.search.KnnByteVectorQuery; -import org.apache.lucene.search.KnnFloatVectorQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.search.join.DiversifyingChildrenByteKnnVectorQuery; -import org.apache.lucene.search.join.DiversifyingChildrenFloatKnnVectorQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.VectorUtil; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -53,6 +51,10 @@ import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; +import org.elasticsearch.search.vectors.ProfilingDiversifyingChildrenByteKnnVectorQuery; +import org.elasticsearch.search.vectors.ProfilingDiversifyingChildrenFloatKnnVectorQuery; +import org.elasticsearch.search.vectors.ProfilingKnnByteVectorQuery; +import org.elasticsearch.search.vectors.ProfilingKnnFloatVectorQuery; import org.elasticsearch.search.vectors.VectorSimilarityQuery; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -905,12 +907,12 @@ public Query createKnnQuery( bytes[i] = (byte) queryVector[i]; } yield parentFilter != null - ? new DiversifyingChildrenByteKnnVectorQuery(name(), bytes, filter, numCands, parentFilter) - : new KnnByteVectorQuery(name(), bytes, numCands, filter); + ? new ProfilingDiversifyingChildrenByteKnnVectorQuery(name(), bytes, filter, numCands, parentFilter) + : new ProfilingKnnByteVectorQuery(name(), bytes, numCands, filter); } case FLOAT -> parentFilter != null - ? new DiversifyingChildrenFloatKnnVectorQuery(name(), queryVector, filter, numCands, parentFilter) - : new KnnFloatVectorQuery(name(), queryVector, numCands, filter); + ? new ProfilingDiversifyingChildrenFloatKnnVectorQuery(name(), queryVector, filter, numCands, parentFilter) + : new ProfilingKnnFloatVectorQuery(name(), queryVector, numCands, filter); }; if (similarityThreshold != null) { diff --git a/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java b/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java index 66ccae1746197..5d3288408c99b 100644 --- a/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java @@ -33,6 +33,7 @@ import org.elasticsearch.search.rescore.RescoreContext; import org.elasticsearch.search.vectors.KnnSearchBuilder; import org.elasticsearch.search.vectors.KnnVectorQueryBuilder; +import org.elasticsearch.search.vectors.ProfilingQuery; import org.elasticsearch.tasks.TaskCancelledException; import java.io.IOException; @@ -215,6 +216,11 @@ static DfsKnnResults singleKnnSearch(Query knnQuery, int k, Profilers profilers, CollectorResult.REASON_SEARCH_TOP_HITS ); topDocs = searcher.search(knnQuery, ipcm); + + if (knnQuery instanceof ProfilingQuery profilingQuery) { + profilingQuery.profile(knnProfiler); + } + knnProfiler.setCollectorResult(ipcm.getCollectorTree()); } // Set profiler back after running KNN searches diff --git a/server/src/main/java/org/elasticsearch/search/profile/Profilers.java b/server/src/main/java/org/elasticsearch/search/profile/Profilers.java index 2cc29d654ec86..44ad9be7e1e94 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/Profilers.java +++ b/server/src/main/java/org/elasticsearch/search/profile/Profilers.java @@ -65,7 +65,8 @@ public SearchProfileQueryPhaseResult buildQueryPhaseResults() { QueryProfileShardResult result = new QueryProfileShardResult( queryProfiler.getTree(), queryProfiler.getRewriteTime(), - queryProfiler.getCollectorResult() + queryProfiler.getCollectorResult(), + null ); AggregationProfileShardResult aggResults = new AggregationProfileShardResult(aggProfiler.getTree()); return new SearchProfileQueryPhaseResult(Collections.singletonList(result), aggResults); diff --git a/server/src/main/java/org/elasticsearch/search/profile/SearchProfileDfsPhaseResult.java b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileDfsPhaseResult.java index 4e301d5a3300d..5f8e6a893c1b5 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/SearchProfileDfsPhaseResult.java +++ b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileDfsPhaseResult.java @@ -148,7 +148,8 @@ QueryProfileShardResult combineQueryProfileShardResults() { return new QueryProfileShardResult( profileResults, totalRewriteTime, - new CollectorResult("KnnQueryCollector", CollectorResult.REASON_SEARCH_MULTI, totalCollectionTime, subCollectorResults) + new CollectorResult("KnnQueryCollector", CollectorResult.REASON_SEARCH_MULTI, totalCollectionTime, subCollectorResults), + null ); } } diff --git a/server/src/main/java/org/elasticsearch/search/profile/dfs/DfsProfiler.java b/server/src/main/java/org/elasticsearch/search/profile/dfs/DfsProfiler.java index 72104aea8a9b8..0ef4704fa1894 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/dfs/DfsProfiler.java +++ b/server/src/main/java/org/elasticsearch/search/profile/dfs/DfsProfiler.java @@ -68,7 +68,12 @@ public SearchProfileDfsPhaseResult buildDfsPhaseResults() { final List queryProfileShardResult = new ArrayList<>(knnQueryProfilers.size()); for (QueryProfiler queryProfiler : knnQueryProfilers) { queryProfileShardResult.add( - new QueryProfileShardResult(queryProfiler.getTree(), queryProfiler.getRewriteTime(), queryProfiler.getCollectorResult()) + new QueryProfileShardResult( + queryProfiler.getTree(), + queryProfiler.getRewriteTime(), + queryProfiler.getCollectorResult(), + queryProfiler.getVectorOpsCount() + ) ); } return new SearchProfileDfsPhaseResult(dfsProfileResult, queryProfileShardResult); diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java b/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java index 6c9f1edd6c583..e779152890541 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java @@ -8,10 +8,12 @@ package org.elasticsearch.search.profile.query; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Nullable; import org.elasticsearch.search.profile.ProfileResult; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -35,17 +37,27 @@ public final class QueryProfileShardResult implements Writeable, ToXContentObjec public static final String REWRITE_TIME = "rewrite_time"; public static final String QUERY_ARRAY = "query"; + public static final String VECTOR_OPERATIONS_COUNT = "vector_operations_count"; + private final List queryProfileResults; private final CollectorResult profileCollector; private final long rewriteTime; - public QueryProfileShardResult(List queryProfileResults, long rewriteTime, CollectorResult profileCollector) { + private final Long vectorOperationsCount; + + public QueryProfileShardResult( + List queryProfileResults, + long rewriteTime, + CollectorResult profileCollector, + @Nullable Long vectorOperationsCount + ) { assert (profileCollector != null); this.queryProfileResults = queryProfileResults; this.profileCollector = profileCollector; this.rewriteTime = rewriteTime; + this.vectorOperationsCount = vectorOperationsCount; } /** @@ -60,6 +72,9 @@ public QueryProfileShardResult(StreamInput in) throws IOException { profileCollector = new CollectorResult(in); rewriteTime = in.readLong(); + vectorOperationsCount = (in.getTransportVersion().onOrAfter(TransportVersions.VECTOR_OPS_COUNT_ADDED)) + ? in.readOptionalLong() + : null; } @Override @@ -70,6 +85,9 @@ public void writeTo(StreamOutput out) throws IOException { } profileCollector.writeTo(out); out.writeLong(rewriteTime); + if (out.getTransportVersion().onOrAfter(TransportVersions.VECTOR_OPS_COUNT_ADDED)) { + out.writeOptionalLong(vectorOperationsCount); + } } public List getQueryResults() { @@ -87,6 +105,9 @@ public CollectorResult getCollectorResult() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); + if (vectorOperationsCount != null) { + builder.field(VECTOR_OPERATIONS_COUNT, vectorOperationsCount); + } builder.startArray(QUERY_ARRAY); for (ProfileResult p : queryProfileResults) { p.toXContent(builder, params); @@ -127,6 +148,7 @@ public static QueryProfileShardResult fromXContent(XContentParser parser) throws String currentFieldName = null; List queryProfileResults = new ArrayList<>(); long rewriteTime = 0; + Long vectorOperationsCount = null; CollectorResult collector = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { @@ -134,6 +156,8 @@ public static QueryProfileShardResult fromXContent(XContentParser parser) throws } else if (token.isValue()) { if (REWRITE_TIME.equals(currentFieldName)) { rewriteTime = parser.longValue(); + } else if (VECTOR_OPERATIONS_COUNT.equals(currentFieldName)) { + vectorOperationsCount = parser.longValue(); } else { parser.skipChildren(); } @@ -153,6 +177,6 @@ public static QueryProfileShardResult fromXContent(XContentParser parser) throws parser.skipChildren(); } } - return new QueryProfileShardResult(queryProfileResults, rewriteTime, collector); + return new QueryProfileShardResult(queryProfileResults, rewriteTime, collector, vectorOperationsCount); } } diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfiler.java b/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfiler.java index 8cfbecc14ecf5..a40b1284238b2 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfiler.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfiler.java @@ -31,10 +31,20 @@ public final class QueryProfiler extends AbstractProfiler This interface includes the declaration of an abstract method, profile(). Classes implementing this interface + * must provide an implementation for profile() to store profiling information in the {@link QueryProfiler}. + */ + +public interface ProfilingQuery { + + /** + * Store the profiling information in the {@link QueryProfiler} + * @param queryProfiler an instance of {@link KnnFloatVectorField}. + */ + void profile(QueryProfiler queryProfiler); +} diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfileShardResultTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfileShardResultTests.java index f8c8d38e92805..f28425172ead5 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfileShardResultTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfileShardResultTests.java @@ -33,7 +33,9 @@ public static QueryProfileShardResult createTestItem() { if (randomBoolean()) { rewriteTime = rewriteTime % 1000; // make sure to often test this with small values too } - return new QueryProfileShardResult(queryProfileResults, rewriteTime, profileCollector); + + Long vectorOperationsCount = randomBoolean() ? null : randomNonNegativeLong(); + return new QueryProfileShardResult(queryProfileResults, rewriteTime, profileCollector, vectorOperationsCount); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java b/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java index 0bb170ed04430..474f891767081 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java @@ -10,8 +10,6 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.KnnByteVectorQuery; -import org.apache.lucene.search.KnnFloatVectorQuery; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; @@ -101,13 +99,13 @@ protected void doAssertLuceneQuery(KnnVectorQueryBuilder queryBuilder, Query que Query knnQuery = ((VectorSimilarityQuery) query).getInnerKnnQuery(); assertThat(((VectorSimilarityQuery) query).getSimilarity(), equalTo(queryBuilder.getVectorSimilarity())); switch (elementType()) { - case FLOAT -> assertTrue(knnQuery instanceof KnnFloatVectorQuery); - case BYTE -> assertTrue(knnQuery instanceof KnnByteVectorQuery); + case FLOAT -> assertTrue(knnQuery instanceof ProfilingKnnFloatVectorQuery); + case BYTE -> assertTrue(knnQuery instanceof ProfilingKnnByteVectorQuery); } } else { switch (elementType()) { - case FLOAT -> assertTrue(query instanceof KnnFloatVectorQuery); - case BYTE -> assertTrue(query instanceof KnnByteVectorQuery); + case FLOAT -> assertTrue(query instanceof ProfilingKnnFloatVectorQuery); + case BYTE -> assertTrue(query instanceof ProfilingKnnByteVectorQuery); } } @@ -119,13 +117,13 @@ protected void doAssertLuceneQuery(KnnVectorQueryBuilder queryBuilder, Query que Query filterQuery = booleanQuery.clauses().isEmpty() ? null : booleanQuery; // The field should always be resolved to the concrete field Query knnVectorQueryBuilt = switch (elementType()) { - case BYTE -> new KnnByteVectorQuery( + case BYTE -> new ProfilingKnnByteVectorQuery( VECTOR_FIELD, getByteQueryVector(queryBuilder.queryVector()), queryBuilder.numCands(), filterQuery ); - case FLOAT -> new KnnFloatVectorQuery(VECTOR_FIELD, queryBuilder.queryVector(), queryBuilder.numCands(), filterQuery); + case FLOAT -> new ProfilingKnnFloatVectorQuery(VECTOR_FIELD, queryBuilder.queryVector(), queryBuilder.numCands(), filterQuery); }; if (query instanceof VectorSimilarityQuery vectorSimilarityQuery) { query = vectorSimilarityQuery.getInnerKnnQuery(); From adfd7f8cea388705ab7512c9be0ba4509357512b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 22 Nov 2023 07:09:32 +0000 Subject: [PATCH 072/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-175031da6ae --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 7450cad8fb9c2..69d3ad5aa5e17 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-6cd78318eab +lucene = 9.9.0-snapshot-175031da6ae bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 168e29ff3ef35..557954d2f0f12 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From c2bc2fb2a55301de2086af14a263cf2e17a8eadf Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 23 Nov 2023 07:09:40 +0000 Subject: [PATCH 073/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-1138a4064e2 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 69d3ad5aa5e17..16fd63380b2a1 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-175031da6ae +lucene = 9.9.0-snapshot-1138a4064e2 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 557954d2f0f12..08bb26d001c92 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 581b6ed4085980fb585786469d14edf33f977c22 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 24 Nov 2023 07:09:51 +0000 Subject: [PATCH 074/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-ea8b6476ed3 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 16fd63380b2a1..53533a4d4c6d7 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-1138a4064e2 +lucene = 9.9.0-snapshot-ea8b6476ed3 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 08bb26d001c92..2b4c16529a6ed 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 954dee6f692666b90059e84d7eed2dabcccfd8ef Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 24 Nov 2023 15:11:07 +0100 Subject: [PATCH 075/181] Introduce transport version for the next lucene upgrade (#102587) --- .../main/java/org/elasticsearch/TransportVersions.java | 8 +++----- .../search/profile/query/QueryProfileShardResult.java | 4 ++-- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 631b501d0ce90..37547ca3ee3ce 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -178,11 +178,9 @@ static TransportVersion def(int id) { public static final TransportVersion GRANT_API_KEY_CLIENT_AUTHENTICATION_ADDED = def(8_545_00_0); public static final TransportVersion PIT_WITH_INDEX_FILTER = def(8_546_00_0); public static final TransportVersion NODE_INFO_VERSION_AS_STRING = def(8_547_00_0); - /* - * Transport versions added for features that require the next lucene minor version. - * Their id needs to be adjusted prior to merging lucene_snapshot into main. - */ - public static final TransportVersion VECTOR_OPS_COUNT_ADDED = def(8_900_00_0); + + // Placeholder for features that require the next lucene version. Its id needs to be adjusted when merging lucene_snapshot into main. + public static final TransportVersion UPGRADE_TO_LUCENE_9_9 = def(8_900_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java b/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java index e779152890541..1b799983dd0a4 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java @@ -72,7 +72,7 @@ public QueryProfileShardResult(StreamInput in) throws IOException { profileCollector = new CollectorResult(in); rewriteTime = in.readLong(); - vectorOperationsCount = (in.getTransportVersion().onOrAfter(TransportVersions.VECTOR_OPS_COUNT_ADDED)) + vectorOperationsCount = (in.getTransportVersion().onOrAfter(TransportVersions.UPGRADE_TO_LUCENE_9_9)) ? in.readOptionalLong() : null; } @@ -85,7 +85,7 @@ public void writeTo(StreamOutput out) throws IOException { } profileCollector.writeTo(out); out.writeLong(rewriteTime); - if (out.getTransportVersion().onOrAfter(TransportVersions.VECTOR_OPS_COUNT_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.UPGRADE_TO_LUCENE_9_9)) { out.writeOptionalLong(vectorOperationsCount); } } From fd8cbb6fd21dab139badd2a2888dd7a7dfef13da Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 25 Nov 2023 07:08:59 +0000 Subject: [PATCH 076/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-02677650e19 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index f5694e349d1db..87efaaff84c6b 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-ea8b6476ed3 +lucene = 9.9.0-snapshot-02677650e19 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 73712f1697d12..b3b29ae69ceee 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From a183379b76886cfb4076efebd381bee874043af4 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 26 Nov 2023 07:09:07 +0000 Subject: [PATCH 077/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-c367ee3ea1a --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 87efaaff84c6b..326882aa2da26 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-02677650e19 +lucene = 9.9.0-snapshot-c367ee3ea1a bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index b3b29ae69ceee..72a0b727e709e 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 74d3748a92be5c5c204a542ca45a6341305547b2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 27 Nov 2023 07:09:12 +0000 Subject: [PATCH 078/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-c367ee3ea1a --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 72a0b727e709e..cb86d33c6ff57 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2661,122 +2661,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From c3b47a38b4d55ac4ea440aa61b697baf0cf6d76c Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 27 Nov 2023 14:04:34 +0100 Subject: [PATCH 079/181] spotless --- server/src/main/java/org/elasticsearch/TransportVersions.java | 1 - 1 file changed, 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 9cc0cdf5212fe..4e813eb2b5224 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -180,7 +180,6 @@ static TransportVersion def(int id) { public static final TransportVersion NODE_INFO_VERSION_AS_STRING = def(8_547_00_0); public static final TransportVersion GET_API_KEY_INVALIDATION_TIME_ADDED = def(8_548_00_0); - // Placeholder for features that require the next lucene version. Its id needs to be adjusted when merging lucene_snapshot into main. public static final TransportVersion UPGRADE_TO_LUCENE_9_9 = def(8_900_00_0); From 96b20f28cd58c3d521c07755b4695af75837dec6 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Mon, 27 Nov 2023 14:06:35 -0500 Subject: [PATCH 080/181] Fix IndexDiskUsageAnalyzerTests with vectors (#102320) --- .../admin/indices/diskusage/IndexDiskUsageAnalyzer.java | 9 ++++++++- .../indices/diskusage/IndexDiskUsageAnalyzerTests.java | 2 +- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java index 6587bf27f604a..17b28ebbe3b4b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java @@ -532,7 +532,6 @@ void analyzeKnnVectors(SegmentReader reader, IndexDiskUsageStats stats) throws I for (FieldInfo field : reader.getFieldInfos()) { cancellationChecker.checkForCancellation(); directory.resetBytesRead(); - final KnnCollector collector = new TopKnnCollector(100, Integer.MAX_VALUE); if (field.getVectorDimension() > 0) { switch (field.getVectorEncoding()) { case BYTE -> { @@ -543,6 +542,10 @@ void analyzeKnnVectors(SegmentReader reader, IndexDiskUsageStats stats) throws I // do a couple of randomized searches to figure out min and max offsets of index file ByteVectorValues vectorValues = vectorReader.getByteVectorValues(field.name); + final KnnCollector collector = new TopKnnCollector( + Math.max(1, Math.min(100, vectorValues.size() - 1)), + Integer.MAX_VALUE + ); int numDocsToVisit = reader.maxDoc() < 10 ? reader.maxDoc() : 10 * (int) Math.log10(reader.maxDoc()); int skipFactor = Math.max(reader.maxDoc() / numDocsToVisit, 1); for (int i = 0; i < reader.maxDoc(); i += skipFactor) { @@ -562,6 +565,10 @@ void analyzeKnnVectors(SegmentReader reader, IndexDiskUsageStats stats) throws I // do a couple of randomized searches to figure out min and max offsets of index file FloatVectorValues vectorValues = vectorReader.getFloatVectorValues(field.name); + final KnnCollector collector = new TopKnnCollector( + Math.max(1, Math.min(100, vectorValues.size() - 1)), + Integer.MAX_VALUE + ); int numDocsToVisit = reader.maxDoc() < 10 ? reader.maxDoc() : 10 * (int) Math.log10(reader.maxDoc()); int skipFactor = Math.max(reader.maxDoc() / numDocsToVisit, 1); for (int i = 0; i < reader.maxDoc(); i += skipFactor) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java index dbbba6d325cd4..6c79946cce15f 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java @@ -709,7 +709,7 @@ static void collectPerFieldStats(SegmentReader reader, IndexDiskUsageStats stats stats.addStoredField("_all_stored_fields", bytes); case TVX, TVD -> stats.addTermVectors("_all_vectors_fields", bytes); case NVD, NVM -> stats.addNorms("_all_norms_fields", bytes); - case VEM, VEC, VEX -> stats.addKnnVectors(fieldLookup.getVectorsField(file), bytes); + case VEM, VEMF, VEC, VEX, VEQ, VEMQ -> stats.addKnnVectors(fieldLookup.getVectorsField(file), bytes); } } } finally { From 129a30b7982aedb2fb9acd0a16c5af90c815c8c3 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 28 Nov 2023 07:08:17 +0000 Subject: [PATCH 081/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-41da5c0b6a9 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 326882aa2da26..8a1a4f392f653 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-c367ee3ea1a +lucene = 9.9.0-snapshot-41da5c0b6a9 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index cb86d33c6ff57..d71182bbdeea1 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 8502d7a3ad5549ee9609b543b95c6595fe10f718 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 29 Nov 2023 07:09:22 +0000 Subject: [PATCH 082/181] [Automated] Update Lucene snapshot to 9.9.0-snapshot-a6d788e1138 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 8a1a4f392f653..575d8310e9e24 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-41da5c0b6a9 +lucene = 9.9.0-snapshot-a6d788e1138 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index d71182bbdeea1..15920b437ee9e 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From f00364aefd09d5c59ef5199218d0d46abf05c151 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 29 Nov 2023 12:29:55 -0500 Subject: [PATCH 083/181] Add byte quantization for float vectors in HNSW (#102093) Adds new `quantization_options` to `dense_vector`. This allows for vectors to be automatically quantized to `byte` when indexed. Example: ``` PUT vectors { "mappings": { "properties": { "my_vector": { "type": "dense_vector", "index": true, "index_options": { "type": "int8_hnsw" } } } } } ``` When querying, the query vector is automatically quantized and used when querying the HNSW graph. This reduces the memory required to only `25%` of what was previously required for `float` vectors at a slight loss of accuracy. This is currently only available when `index: true` and when using `hnsw` --- docs/reference/how-to/knn-search.asciidoc | 15 +- .../mapping/types/dense-vector.asciidoc | 47 ++- .../search-your-data/knn-search.asciidoc | 110 +++++- .../41_knn_search_byte_quantized.yml | 366 ++++++++++++++++++ .../vectors/DenseVectorFieldMapper.java | 153 ++++++-- .../vectors/DenseVectorFieldMapperTests.java | 79 +++- 6 files changed, 733 insertions(+), 37 deletions(-) create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml diff --git a/docs/reference/how-to/knn-search.asciidoc b/docs/reference/how-to/knn-search.asciidoc index 330847f5806de..066008ce26110 100644 --- a/docs/reference/how-to/knn-search.asciidoc +++ b/docs/reference/how-to/knn-search.asciidoc @@ -52,7 +52,12 @@ of datasets and configurations that we use for our nightly benchmarks. include::search-speed.asciidoc[tag=warm-fs-cache] The following file extensions are used for the approximate kNN search: -"vec" (for vector values), "vex" (for HNSW graph), "vem" (for metadata). ++ +-- +* `vec` and `veq` for vector values +* `vex` for HNSW graph +* `vem`, `vemf`, and `vemq` for metadata +-- [discrete] === Reduce vector dimensionality @@ -66,6 +71,14 @@ reduction techniques like PCA. When experimenting with different approaches, it's important to measure the impact on relevance to ensure the search quality is still acceptable. +[discrete] +=== Reduce vector memory foot-print + +The default <> is `float`. But this can be +automatically quantized during index time through <>. Quantization will +reduce the required memory by 4x, but it will also reduce the precision of the vectors. For `float` vectors with +`dim` greater than or equal to `384`, using a <> index is highly recommended. + [discrete] === Exclude vector fields from `_source` diff --git a/docs/reference/mapping/types/dense-vector.asciidoc b/docs/reference/mapping/types/dense-vector.asciidoc index 446e6c8ea4c43..a2ab44a173a62 100644 --- a/docs/reference/mapping/types/dense-vector.asciidoc +++ b/docs/reference/mapping/types/dense-vector.asciidoc @@ -111,6 +111,36 @@ PUT my-index-2 efficient kNN search. Like most kNN algorithms, HNSW is an approximate method that sacrifices result accuracy for improved speed. +[[dense-vector-quantization]] +==== Automatically quantize vectors for kNN search + +The `dense_vector` type supports quantization to reduce the memory footprint required when <> `float` vectors. +Currently the only quantization method supported is `int8` and provided vectors `element_type` must be `float`. To use +a quantized index, you can set your index type to `int8_hnsw`. + +When using the `int8_hnsw` index, each of the `float` vectors' dimensions are quantized to 1-byte integers. This can +reduce the memory footprint by as much as 75% at the cost of some accuracy. However, the disk usage can increase by +25% due to the overhead of storing the quantized and raw vectors. + +[source,console] +-------------------------------------------------- +PUT my-byte-quantized-index +{ + "mappings": { + "properties": { + "my_vector": { + "type": "dense_vector", + "dims": 3, + "index": true, + "index_options": { + "type": "int8_hnsw" + } + } + } + } +} +-------------------------------------------------- + [role="child_attributes"] [[dense-vector-params]] ==== Parameters for dense vector fields @@ -198,8 +228,7 @@ a distinct set of options. An optional section that configures the kNN indexing algorithm. The HNSW algorithm has two internal parameters that influence how the data structure is built. These can be adjusted to improve the accuracy of results, at the -expense of slower indexing speed. When `index_options` is provided, all of its -properties must be defined. +expense of slower indexing speed. + ^*^ This parameter can only be specified when `index` is `true`. + @@ -209,17 +238,25 @@ properties must be defined. ==== `type`::: (Required, string) -The type of kNN algorithm to use. Currently only `hnsw` is supported. +The type of kNN algorithm to use. Can be either `hnsw` or `int8_hnsw`. `m`::: -(Required, integer) +(Optional, integer) The number of neighbors each node will be connected to in the HNSW graph. Defaults to `16`. `ef_construction`::: -(Required, integer) +(Optional, integer) The number of candidates to track while assembling the list of nearest neighbors for each new node. Defaults to `100`. + +`confidence_interval`::: +(Optional, float) +Only applicable to `int8_hnsw` index types. The confidence interval to use when quantizing the vectors, +can be any value between and including `0.90` and `1.0`. This value restricts the values used when calculating +the quantization thresholds. For example, a value of `0.95` will only use the middle 95% of the values when +calculating the quantization thresholds (e.g. the highest and lowest 2.5% of values will be ignored). +Defaults to `1/(dims + 1)`. ==== [[dense-vector-synthetic-source]] diff --git a/docs/reference/search/search-your-data/knn-search.asciidoc b/docs/reference/search/search-your-data/knn-search.asciidoc index c39719f1a3b61..ff64535c705d9 100644 --- a/docs/reference/search/search-your-data/knn-search.asciidoc +++ b/docs/reference/search/search-your-data/knn-search.asciidoc @@ -242,6 +242,114 @@ POST byte-image-index/_search // TEST[s/"k": 10/"k": 3/] // TEST[s/"num_candidates": 100/"num_candidates": 3/] +[discrete] +[[knn-search-quantized-example]] +==== Byte quantized kNN search + +If you want to provide `float` vectors, but want the memory savings of `byte` vectors, you can use the +<> feature. Quantization allows you to provide `float` vectors, but +internally they are indexed as `byte` vectors. Additionally, the original `float` vectors are still retained +in the index. + +To use quantization, you can use the index type `int8_hnsw` object in the `dense_vector` mapping. + +[source,console] +---- +PUT quantized-image-index +{ + "mappings": { + "properties": { + "image-vector": { + "type": "dense_vector", + "element_type": "float", + "dims": 2, + "index": true, + "index_options": { + "type": "int8_hnsw" + } + }, + "title": { + "type": "text" + } + } + } +} +---- +// TEST[continued] + +. Index your `float` vectors. ++ +[source,console] +---- +POST quantized-image-index/_bulk?refresh=true +{ "index": { "_id": "1" } } +{ "image-vector": [0.1, -2], "title": "moose family" } +{ "index": { "_id": "2" } } +{ "image-vector": [0.75, -1], "title": "alpine lake" } +{ "index": { "_id": "3" } } +{ "image-vector": [1.2, 0.1], "title": "full moon" } +---- +//TEST[continued] + +. Run the search using the <>. When searching, the `float` vector is +automatically quantized to a `byte` vector. ++ +[source,console] +---- +POST quantized-image-index/_search +{ + "knn": { + "field": "image-vector", + "query_vector": [0.1, -2], + "k": 10, + "num_candidates": 100 + }, + "fields": [ "title" ] +} +---- +// TEST[continued] +// TEST[s/"k": 10/"k": 3/] +// TEST[s/"num_candidates": 100/"num_candidates": 3/] + +Since the original `float` vectors are still retained in the index, you can optionally use them for re-scoring. Meaning, +you can search over all the vectors quickly using the `int8_hnsw` index and then rescore only the top `k` results. This +provides the best of both worlds, fast search and accurate scoring. + +[source,console] +---- +POST quantized-image-index/_search +{ + "knn": { + "field": "image-vector", + "query_vector": [0.1, -2], + "k": 15, + "num_candidates": 100 + }, + "fields": [ "title" ], + "rescore": { + "window_size": 10, + "query": { + "rescore_query": { + "script_score": { + "query": { + "match_all": {} + }, + "script": { + "source": "cosineSimilarity(params.query_vector, 'image-vector') + 1.0", + "params": { + "query_vector": [0.1, -2] + } + } + } + } + } + } +} +---- +// TEST[continued] +// TEST[s/"k": 15/"k": 3/] +// TEST[s/"num_candidates": 100/"num_candidates": 3/] + [discrete] [[knn-search-filter-example]] ==== Filtered kNN search @@ -903,7 +1011,7 @@ the global top `k` matches across shards. You cannot set the To run an exact kNN search, use a `script_score` query with a vector function. . Explicitly map one or more `dense_vector` fields. If you don't intend to use -the field for approximate kNN, set the `index` mapping option to `false`. This +the field for approximate kNN, set the `index` mapping option to `false`. This can significantly improve indexing speed. + [source,console] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml new file mode 100644 index 0000000000000..f700664c43fc1 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml @@ -0,0 +1,366 @@ +setup: + - skip: + version: ' - 8.11.99' + reason: 'kNN float to byte quantization added in 8.12' + - do: + indices.create: + index: hnsw_byte_quantized + body: + mappings: + properties: + name: + type: keyword + vector: + type: dense_vector + dims: 5 + index: true + similarity: l2_norm + index_options: + type: int8_hnsw + another_vector: + type: dense_vector + dims: 5 + index: true + similarity: l2_norm + index_options: + type: int8_hnsw + + - do: + index: + index: hnsw_byte_quantized + id: "1" + body: + name: cow.jpg + vector: [230.0, 300.33, -34.8988, 15.555, -200.0] + another_vector: [130.0, 115.0, -1.02, 15.555, -100.0] + + - do: + index: + index: hnsw_byte_quantized + id: "2" + body: + name: moose.jpg + vector: [-0.5, 100.0, -13, 14.8, -156.0] + another_vector: [-0.5, 50.0, -1, 1, 120] + + - do: + index: + index: hnsw_byte_quantized + id: "3" + body: + name: rabbit.jpg + vector: [0.5, 111.3, -13.0, 14.8, -156.0] + another_vector: [-0.5, 11.0, 0, 12, 111.0] + + - do: + indices.refresh: {} + +--- +"kNN search only": + - do: + search: + index: hnsw_byte_quantized + body: + fields: [ "name" ] + knn: + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 2 + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - match: {hits.hits.0.fields.name.0: "moose.jpg"} + + - match: {hits.hits.1._id: "3"} + - match: {hits.hits.1.fields.name.0: "rabbit.jpg"} +--- +"kNN multi-field search only": + - do: + search: + index: hnsw_byte_quantized + body: + fields: [ "name" ] + knn: + - {field: vector, query_vector: [-0.5, 90.0, -10, 14.8, -156.0], k: 2, num_candidates: 3} + - {field: another_vector, query_vector: [-0.5, 11.0, 0, 12, 111.0], k: 2, num_candidates: 3} + + - match: {hits.hits.0._id: "3"} + - match: {hits.hits.0.fields.name.0: "rabbit.jpg"} + + - match: {hits.hits.1._id: "2"} + - match: {hits.hits.1.fields.name.0: "moose.jpg"} +--- +"kNN search plus query": + - do: + search: + index: hnsw_byte_quantized + body: + fields: [ "name" ] + knn: + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 2 + num_candidates: 3 + query: + term: + name: cow.jpg + + - match: {hits.hits.0._id: "1"} + - match: {hits.hits.0.fields.name.0: "cow.jpg"} + + - match: {hits.hits.1._id: "2"} + - match: {hits.hits.1.fields.name.0: "moose.jpg"} + + - match: {hits.hits.2._id: "3"} + - match: {hits.hits.2.fields.name.0: "rabbit.jpg"} +--- +"kNN multi-field search with query": + - do: + search: + index: hnsw_byte_quantized + body: + fields: [ "name" ] + knn: + - {field: vector, query_vector: [-0.5, 90.0, -10, 14.8, -156.0], k: 2, num_candidates: 3} + - {field: another_vector, query_vector: [-0.5, 11.0, 0, 12, 111.0], k: 2, num_candidates: 3} + query: + term: + name: cow.jpg + + - match: {hits.hits.0._id: "3"} + - match: {hits.hits.0.fields.name.0: "rabbit.jpg"} + + - match: {hits.hits.1._id: "1"} + - match: {hits.hits.1.fields.name.0: "cow.jpg"} + + - match: {hits.hits.2._id: "2"} + - match: {hits.hits.2.fields.name.0: "moose.jpg"} +--- +"kNN search with filter": + - do: + search: + index: hnsw_byte_quantized + body: + fields: [ "name" ] + knn: + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 2 + num_candidates: 3 + filter: + term: + name: "rabbit.jpg" + + - match: {hits.total.value: 1} + - match: {hits.hits.0._id: "3"} + - match: {hits.hits.0.fields.name.0: "rabbit.jpg"} + + - do: + search: + index: hnsw_byte_quantized + body: + fields: [ "name" ] + knn: + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 2 + num_candidates: 3 + filter: + - term: + name: "rabbit.jpg" + - term: + _id: 2 + + - match: {hits.total.value: 0} + +--- +"KNN Vector similarity search only": + - do: + search: + index: hnsw_byte_quantized + body: + fields: [ "name" ] + knn: + num_candidates: 3 + k: 3 + field: vector + similarity: 10.3 + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + + - length: {hits.hits: 1} + + - match: {hits.hits.0._id: "2"} + - match: {hits.hits.0.fields.name.0: "moose.jpg"} +--- +"Vector similarity with filter only": + - do: + search: + index: hnsw_byte_quantized + body: + fields: [ "name" ] + knn: + num_candidates: 3 + k: 3 + field: vector + similarity: 11 + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + filter: {"term": {"name": "moose.jpg"}} + + - length: {hits.hits: 1} + + - match: {hits.hits.0._id: "2"} + - match: {hits.hits.0.fields.name.0: "moose.jpg"} + + - do: + search: + index: hnsw_byte_quantized + body: + fields: [ "name" ] + knn: + num_candidates: 3 + k: 3 + field: vector + similarity: 110 + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + filter: {"term": {"name": "cow.jpg"}} + + - length: {hits.hits: 0} +--- +"Knn search with mip": + - do: + indices.create: + index: mip + body: + mappings: + properties: + name: + type: keyword + vector: + type: dense_vector + dims: 5 + index: true + similarity: max_inner_product + index_options: + type: int8_hnsw + + - do: + index: + index: mip + id: "1" + body: + name: cow.jpg + vector: [230.0, 300.33, -34.8988, 15.555, -200.0] + + - do: + index: + index: mip + id: "2" + body: + name: moose.jpg + vector: [-0.5, 100.0, -13, 14.8, -156.0] + + - do: + index: + index: mip + id: "3" + body: + name: rabbit.jpg + vector: [0.5, 111.3, -13.0, 14.8, -156.0] + + - do: + indices.refresh: {} + + - do: + search: + index: mip + body: + fields: [ "name" ] + knn: + num_candidates: 3 + k: 3 + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + + + - length: {hits.hits: 3} + - match: {hits.hits.0._id: "1"} + - match: {hits.hits.1._id: "3"} + - match: {hits.hits.2._id: "2"} + + - do: + search: + index: mip + body: + fields: [ "name" ] + knn: + num_candidates: 3 + k: 3 + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + filter: { "term": { "name": "moose.jpg" } } + + + + - length: {hits.hits: 1} + - match: {hits.hits.0._id: "2"} +--- +"Cosine similarity with indexed vector": + - skip: + features: "headers" + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "cosineSimilarity(params.query_vector, 'vector')" + params: + query_vector: [0.5, 111.3, -13.0, 14.8, -156.0] + + - match: {hits.total: 3} + + - match: {hits.hits.0._id: "3"} + - gte: {hits.hits.0._score: 0.999} + - lte: {hits.hits.0._score: 1.001} + + - match: {hits.hits.1._id: "2"} + - gte: {hits.hits.1._score: 0.998} + - lte: {hits.hits.1._score: 1.0} + + - match: {hits.hits.2._id: "1"} + - gte: {hits.hits.2._score: 0.78} + - lte: {hits.hits.2._score: 0.791} +--- +"Test bad quantization parameters": + - do: + catch: bad_request + indices.create: + index: bad_hnsw_quantized + body: + mappings: + properties: + vector: + type: dense_vector + dims: 5 + element_type: byte + index: true + index_options: + type: int8_hnsw + + - do: + catch: bad_request + indices.create: + index: bad_hnsw_quantized + body: + mappings: + properties: + vector: + type: dense_vector + dims: 5 + index: false + index_options: + type: int8_hnsw diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index bd598b29e3717..dde2bcf06b0c7 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -11,6 +11,7 @@ import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.KnnVectorsReader; import org.apache.lucene.codecs.KnnVectorsWriter; +import org.apache.lucene.codecs.lucene99.Lucene99HnswScalarQuantizedVectorsFormat; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Field; @@ -68,6 +69,7 @@ import java.util.Locale; import java.util.Map; import java.util.Objects; +import java.util.Optional; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Stream; @@ -173,6 +175,13 @@ public Builder(String name, IndexVersion indexVersionCreated) { } } }); + this.indexOptions.addValidator(v -> { + if (v instanceof Int8HnswIndexOptions && elementType.getValue() == ElementType.BYTE) { + throw new IllegalArgumentException( + "[element_type] cannot be [byte] when using index type [" + VectorIndexType.INT8_HNSW.name + "]" + ); + } + }); } @Override @@ -702,26 +711,124 @@ private abstract static class IndexOptions implements ToXContent { IndexOptions(String type) { this.type = type; } + + abstract KnnVectorsFormat getVectorsFormat(); } - private static class HnswIndexOptions extends IndexOptions { + private enum VectorIndexType { + HNSW("hnsw") { + @Override + public IndexOptions parseIndexOptions(String fieldName, Map indexOptionsMap) { + Object mNode = indexOptionsMap.remove("m"); + Object efConstructionNode = indexOptionsMap.remove("ef_construction"); + if (mNode == null) { + mNode = Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN; + } + if (efConstructionNode == null) { + efConstructionNode = Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH; + } + int m = XContentMapValues.nodeIntegerValue(mNode); + int efConstruction = XContentMapValues.nodeIntegerValue(efConstructionNode); + MappingParser.checkNoRemainingFields(fieldName, indexOptionsMap); + return new HnswIndexOptions(m, efConstruction); + } + }, + INT8_HNSW("int8_hnsw") { + @Override + public IndexOptions parseIndexOptions(String fieldName, Map indexOptionsMap) { + Object mNode = indexOptionsMap.remove("m"); + Object efConstructionNode = indexOptionsMap.remove("ef_construction"); + Object confidenceIntervalNode = indexOptionsMap.remove("confidence_interval"); + if (mNode == null) { + mNode = Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN; + } + if (efConstructionNode == null) { + efConstructionNode = Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH; + } + int m = XContentMapValues.nodeIntegerValue(mNode); + int efConstruction = XContentMapValues.nodeIntegerValue(efConstructionNode); + Float confidenceInterval = null; + if (confidenceIntervalNode != null) { + confidenceInterval = (float) XContentMapValues.nodeDoubleValue(confidenceIntervalNode); + } + MappingParser.checkNoRemainingFields(fieldName, indexOptionsMap); + return new Int8HnswIndexOptions(m, efConstruction, confidenceInterval); + } + }; + + static Optional fromString(String type) { + return Stream.of(VectorIndexType.values()).filter(vectorIndexType -> vectorIndexType.name.equals(type)).findFirst(); + } + + private final String name; + + VectorIndexType(String name) { + this.name = name; + } + + abstract IndexOptions parseIndexOptions(String fieldName, Map indexOptionsMap); + } + + private static class Int8HnswIndexOptions extends IndexOptions { private final int m; private final int efConstruction; + private final Float confidenceInterval; - static IndexOptions parseIndexOptions(String fieldName, Map indexOptionsMap) { - Object mNode = indexOptionsMap.remove("m"); - Object efConstructionNode = indexOptionsMap.remove("ef_construction"); - if (mNode == null) { - throw new MapperParsingException("[index_options] of type [hnsw] requires field [m] to be configured"); - } - if (efConstructionNode == null) { - throw new MapperParsingException("[index_options] of type [hnsw] requires field [ef_construction] to be configured"); + private Int8HnswIndexOptions(int m, int efConstruction, Float confidenceInterval) { + super("int8_hnsw"); + this.m = m; + this.efConstruction = efConstruction; + this.confidenceInterval = confidenceInterval; + } + + @Override + public KnnVectorsFormat getVectorsFormat() { + return new Lucene99HnswScalarQuantizedVectorsFormat(m, efConstruction, 1, confidenceInterval, null); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("type", type); + builder.field("m", m); + builder.field("ef_construction", efConstruction); + if (confidenceInterval != null) { + builder.field("confidence_interval", confidenceInterval); } - int m = XContentMapValues.nodeIntegerValue(mNode); - int efConstruction = XContentMapValues.nodeIntegerValue(efConstructionNode); - MappingParser.checkNoRemainingFields(fieldName, indexOptionsMap); - return new HnswIndexOptions(m, efConstruction); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Int8HnswIndexOptions that = (Int8HnswIndexOptions) o; + return m == that.m && efConstruction == that.efConstruction && Objects.equals(confidenceInterval, that.confidenceInterval); + } + + @Override + public int hashCode() { + return Objects.hash(m, efConstruction, confidenceInterval); + } + + @Override + public String toString() { + return "{type=" + + type + + ", m=" + + m + + ", ef_construction=" + + efConstruction + + ", confidence_interval=" + + confidenceInterval + + "}"; } + } + + private static class HnswIndexOptions extends IndexOptions { + private final int m; + private final int efConstruction; private HnswIndexOptions(int m, int efConstruction) { super("hnsw"); @@ -729,6 +836,11 @@ private HnswIndexOptions(int m, int efConstruction) { this.efConstruction = efConstruction; } + @Override + public KnnVectorsFormat getVectorsFormat() { + return new Lucene99HnswVectorsFormat(m, efConstruction, 1, null); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -754,7 +866,7 @@ public int hashCode() { @Override public String toString() { - return "{type=" + type + ", m=" + m + ", ef_construction=" + efConstruction + " }"; + return "{type=" + type + ", m=" + m + ", ef_construction=" + efConstruction + "}"; } } @@ -1071,11 +1183,9 @@ private static IndexOptions parseIndexOptions(String fieldName, Object propNode) throw new MapperParsingException("[index_options] requires field [type] to be configured"); } String type = XContentMapValues.nodeStringValue(typeNode); - if (type.equals("hnsw")) { - return HnswIndexOptions.parseIndexOptions(fieldName, indexOptionsMap); - } else { - throw new MapperParsingException("Unknown vector index options type [" + type + "] for field [" + fieldName + "]"); - } + return VectorIndexType.fromString(type) + .orElseThrow(() -> new MapperParsingException("Unknown vector index options type [" + type + "] for field [" + fieldName + "]")) + .parseIndexOptions(fieldName, indexOptionsMap); } /** @@ -1083,12 +1193,11 @@ private static IndexOptions parseIndexOptions(String fieldName, Object propNode) * {@code null} if the default format should be used. */ public KnnVectorsFormat getKnnVectorsFormatForField(KnnVectorsFormat defaultFormat) { - KnnVectorsFormat format; + final KnnVectorsFormat format; if (indexOptions == null) { format = defaultFormat; } else { - HnswIndexOptions hnswIndexOptions = (HnswIndexOptions) indexOptions; - format = new Lucene99HnswVectorsFormat(hnswIndexOptions.m, hnswIndexOptions.efConstruction); + format = indexOptions.getVectorsFormat(); } // It's legal to reuse the same format name as this is the same on-disk format. return new KnnVectorsFormat(format.getName()) { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index 6c71a43e714fe..1e45ddaf9e8a7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -495,6 +495,11 @@ public void testInvalidParameters() { ); assertThat(e.getMessage(), containsString("[index_options] requires field [type] to be configured")); + e = expectThrows( + MapperParsingException.class, + () -> createDocumentMapper(fieldMapping(b -> b.field("type", "dense_vector").field("dims", 3).field("element_type", "foo"))) + ); + assertThat(e.getMessage(), containsString("invalid element_type [foo]; available types are ")); e = expectThrows( MapperParsingException.class, () -> createDocumentMapper( @@ -505,18 +510,35 @@ public void testInvalidParameters() { .field("index", true) .startObject("index_options") .field("type", "hnsw") - .field("ef_construction", 100) + .startObject("foo") + .endObject() .endObject() ) ) ); - assertThat(e.getMessage(), containsString("[index_options] of type [hnsw] requires field [m] to be configured")); - + assertThat( + e.getMessage(), + containsString("Failed to parse mapping: Mapping definition for [field] has unsupported parameters: [foo : {}]") + ); e = expectThrows( MapperParsingException.class, - () -> createDocumentMapper(fieldMapping(b -> b.field("type", "dense_vector").field("dims", 3).field("element_type", "bytes"))) + () -> createDocumentMapper( + fieldMapping( + b -> b.field("type", "dense_vector") + .field("dims", 3) + .field("element_type", "byte") + .field("similarity", "l2_norm") + .field("index", true) + .startObject("index_options") + .field("type", "int8_hnsw") + .endObject() + ) + ) + ); + assertThat( + e.getMessage(), + containsString("Failed to parse mapping: [element_type] cannot be [byte] when using index type [int8_hnsw]") ); - assertThat(e.getMessage(), containsString("invalid element_type [bytes]; available types are ")); } public void testInvalidParametersBeforeIndexedByDefault() { @@ -958,6 +980,8 @@ public void testFloatVectorQueryBoundaries() throws IOException { public void testKnnVectorsFormat() throws IOException { final int m = randomIntBetween(1, DEFAULT_MAX_CONN + 10); final int efConstruction = randomIntBetween(1, DEFAULT_BEAM_WIDTH + 10); + boolean setM = randomBoolean(); + boolean setEfConstruction = randomBoolean(); MapperService mapperService = createMapperService(fieldMapping(b -> { b.field("type", "dense_vector"); b.field("dims", 4); @@ -965,20 +989,59 @@ public void testKnnVectorsFormat() throws IOException { b.field("similarity", "dot_product"); b.startObject("index_options"); b.field("type", "hnsw"); + if (setM) { + b.field("m", m); + } + if (setEfConstruction) { + b.field("ef_construction", efConstruction); + } + b.endObject(); + })); + CodecService codecService = new CodecService(mapperService, BigArrays.NON_RECYCLING_INSTANCE); + Codec codec = codecService.codec("default"); + assertThat(codec, instanceOf(PerFieldMapperCodec.class)); + KnnVectorsFormat knnVectorsFormat = ((PerFieldMapperCodec) codec).getKnnVectorsFormatForField("field"); + String expectedString = "Lucene99HnswVectorsFormat(name=Lucene99HnswVectorsFormat, maxConn=" + + (setM ? m : DEFAULT_MAX_CONN) + + ", beamWidth=" + + (setEfConstruction ? efConstruction : DEFAULT_BEAM_WIDTH) + + ", flatVectorFormat=Lucene99FlatVectorsFormat()" + + ")"; + assertEquals(expectedString, knnVectorsFormat.toString()); + } + + public void testKnnQuantizedHNSWVectorsFormat() throws IOException { + final int m = randomIntBetween(1, DEFAULT_MAX_CONN + 10); + final int efConstruction = randomIntBetween(1, DEFAULT_BEAM_WIDTH + 10); + boolean setConfidenceInterval = randomBoolean(); + float confidenceInterval = (float) randomDoubleBetween(0.90f, 1.0f, true); + MapperService mapperService = createMapperService(fieldMapping(b -> { + b.field("type", "dense_vector"); + b.field("dims", 4); + b.field("index", true); + b.field("similarity", "dot_product"); + b.startObject("index_options"); + b.field("type", "int8_hnsw"); b.field("m", m); b.field("ef_construction", efConstruction); + if (setConfidenceInterval) { + b.field("confidence_interval", confidenceInterval); + } b.endObject(); })); CodecService codecService = new CodecService(mapperService, BigArrays.NON_RECYCLING_INSTANCE); Codec codec = codecService.codec("default"); assertThat(codec, instanceOf(PerFieldMapperCodec.class)); KnnVectorsFormat knnVectorsFormat = ((PerFieldMapperCodec) codec).getKnnVectorsFormatForField("field"); - String expectedString = "Lucene99HnswVectorsFormat(name=Lucene99HnswVectorsFormat, maxConn=" + String expectedString = "Lucene99HnswScalarQuantizedVectorsFormat(name=Lucene99HnswScalarQuantizedVectorsFormat, maxConn=" + m + ", beamWidth=" + efConstruction - + ", flatVectorFormat=Lucene99FlatVectorsFormat()" - + ")"; + + ", flatVectorFormat=Lucene99ScalarQuantizedVectorsFormat(" + + "name=Lucene99ScalarQuantizedVectorsFormat, confidenceInterval=" + + (setConfidenceInterval ? confidenceInterval : null) + + ", rawVectorFormat=Lucene99FlatVectorsFormat()" + + "))"; assertEquals(expectedString, knnVectorsFormat.toString()); } From 0f394d06068765951b6babc583480c9b6964c4cd Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Wed, 29 Nov 2023 21:35:55 +0000 Subject: [PATCH 084/181] Use lucene 9.0.0 RC1 --- build-tools-internal/version.properties | 2 +- build.gradle | 5 + gradle/verification-metadata.xml | 142 +++++++++++------------- 3 files changed, 72 insertions(+), 77 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 575d8310e9e24..9763cef8aefeb 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-a6d788e1138 +lucene = 9.9.0 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/build.gradle b/build.gradle index c0b613beefea4..4783868f4e0b2 100644 --- a/build.gradle +++ b/build.gradle @@ -195,6 +195,11 @@ if (project.gradle.startParameter.taskNames.any { it.startsWith("checkPart") || subprojects { proj -> apply plugin: 'elasticsearch.base' + + repositories { + // TODO: Temporary for Lucene RC builds. REMOVE + maven { url "https://dist.apache.org/repos/dist/dev/lucene/lucene-9.9.0-RC1-rev-92a5e5b02e0e083126c4122f2b7a02426c21a037/lucene/maven" } + } } allprojects { diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index d90d60bf701e1..5f2795e343162 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,114 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - - - - - - - - - - - + + + From 7b2479ce61f2a9f5b8c9e520237fa763f085a07e Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Wed, 29 Nov 2023 21:53:29 +0000 Subject: [PATCH 085/181] Fix gradle verification metadata --- gradle/verification-metadata.xml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 5f2795e343162..9d6f8f21bc74a 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2664,6 +2664,16 @@ + + + + + + + + + + From dc84d359dfaa485eb64f9e886657c009183d1467 Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Wed, 29 Nov 2023 21:55:59 +0000 Subject: [PATCH 086/181] Update docs/changelog/102782.yaml --- docs/changelog/102782.yaml | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 docs/changelog/102782.yaml diff --git a/docs/changelog/102782.yaml b/docs/changelog/102782.yaml new file mode 100644 index 0000000000000..ed0a004765859 --- /dev/null +++ b/docs/changelog/102782.yaml @@ -0,0 +1,5 @@ +pr: 102782 +summary: Upgrade to Lucene 9.9.0 +area: Search +type: upgrade +issues: [] From 75c5b870b4eb68fe0049a2e5f5395c938d031c11 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Thu, 30 Nov 2023 09:13:46 +0000 Subject: [PATCH 087/181] Use a real UPGRADE_TO_LUCENE_9_9 TransportVersion id --- server/src/main/java/org/elasticsearch/TransportVersions.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index cdae87a0b9bd1..84445e9e0bb43 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -181,9 +181,7 @@ static TransportVersion def(int id) { public static final TransportVersion GET_API_KEY_INVALIDATION_TIME_ADDED = def(8_548_00_0); public static final TransportVersion ML_INFERENCE_GET_MULTIPLE_MODELS = def(8_549_00_0); public static final TransportVersion INFERENCE_SERVICE_RESULTS_ADDED = def(8_550_00_0); - - // Placeholder for features that require the next lucene version. Its id needs to be adjusted when merging lucene_snapshot into main. - public static final TransportVersion UPGRADE_TO_LUCENE_9_9 = def(8_900_00_0); + public static final TransportVersion UPGRADE_TO_LUCENE_9_9 = def(8_551_00_0); /* * STOP! READ THIS FIRST! No, really, From 12906cd8f33dae7c8d1169b3aa6d55f3f4bfe065 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Thu, 30 Nov 2023 09:21:53 +0000 Subject: [PATCH 088/181] Fix IndexVersion to use Lucene 9.9 --- .../src/main/java/org/elasticsearch/index/IndexVersions.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 7c99764e44283..eb3a7dd075f9f 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -89,9 +89,7 @@ private static IndexVersion def(int id, Version luceneVersion) { public static final IndexVersion NEW_SPARSE_VECTOR = def(8_500_001, Version.LUCENE_9_7_0); public static final IndexVersion SPARSE_VECTOR_IN_FIELD_NAMES_SUPPORT = def(8_500_002, Version.LUCENE_9_7_0); public static final IndexVersion UPGRADE_LUCENE_9_8 = def(8_500_003, Version.LUCENE_9_8_0); - public static final IndexVersion ES_VERSION_8_12 = def(8_500_004, Version.LUCENE_9_8_0); - - public static final IndexVersion UPGRADE_TO_LUCENE_9_9 = def(8_500_010, Version.LUCENE_9_9_0); + public static final IndexVersion UPGRADE_LUCENE_9_9 = def(8_500_004, Version.LUCENE_9_9_0); /* * STOP! READ THIS FIRST! No, really, From eaa5889c5c1ce1fd0b02634832e506cbf66470df Mon Sep 17 00:00:00 2001 From: Benjamin Trent <4357155+benwtrent@users.noreply.github.com> Date: Thu, 30 Nov 2023 13:30:58 -0500 Subject: [PATCH 089/181] Fixing byte quantized search test flakiness Quantized search result scores can be slightly different depending on if they are merged into a single segment, etc. --- .../test/search.vectors/41_knn_search_byte_quantized.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml index f700664c43fc1..12fb4d1bbcb1d 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml @@ -257,7 +257,7 @@ setup: id: "2" body: name: moose.jpg - vector: [-0.5, 100.0, -13, 14.8, -156.0] + vector: [-0.5, 10.0, -13, 14.8, 15.0] - do: index: From d2584ecfa249fa6a0f3c5e98182f10876ec9e283 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Thu, 30 Nov 2023 18:57:48 +0000 Subject: [PATCH 090/181] Bump to lucene 9.9.0 RC2 --- build.gradle | 2 +- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 2 files changed, 25 insertions(+), 25 deletions(-) diff --git a/build.gradle b/build.gradle index 4783868f4e0b2..d10f836db4024 100644 --- a/build.gradle +++ b/build.gradle @@ -198,7 +198,7 @@ subprojects { proj -> repositories { // TODO: Temporary for Lucene RC builds. REMOVE - maven { url "https://dist.apache.org/repos/dist/dev/lucene/lucene-9.9.0-RC1-rev-92a5e5b02e0e083126c4122f2b7a02426c21a037/lucene/maven" } + maven { url "https://dist.apache.org/repos/dist/dev/lucene/lucene-9.9.0-RC2-rev-06070c0dceba07f0d33104192d9ac98ca16fc500/lucene/maven" } } } diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 9d6f8f21bc74a..72422a28039f9 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2661,122 +2661,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 36869d417d54e8e19ccf5ec613468059e55cd958 Mon Sep 17 00:00:00 2001 From: Benjamin Trent <4357155+benwtrent@users.noreply.github.com> Date: Thu, 30 Nov 2023 16:21:44 -0500 Subject: [PATCH 091/181] Adding changelog for PR #102093 --- docs/changelog/102093.yaml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 docs/changelog/102093.yaml diff --git a/docs/changelog/102093.yaml b/docs/changelog/102093.yaml new file mode 100644 index 0000000000000..f6922c0d36be6 --- /dev/null +++ b/docs/changelog/102093.yaml @@ -0,0 +1,14 @@ +pr: 102093 +summary: Add byte quantization for float vectors in HNSW +area: Vector Search +type: feature +issues: [] +highlight: + title: Add new `int8_hsnw` index type for int8 quantization for HNSW + body: |- + This commit adds a new index type called `int8_hnsw`. This index will + automatically quantized float32 values into int8 byte values. While + this increases disk usage by 25%, it reduces memory required for + fast HNSW search by 75%. Dramatically reducing the resource overhead + required for dense vector search. + notable: true From b01fe5efe2522fe0f407ad843bf5bbf336c89a72 Mon Sep 17 00:00:00 2001 From: Benjamin Trent <4357155+benwtrent@users.noreply.github.com> Date: Fri, 1 Dec 2023 09:53:18 -0500 Subject: [PATCH 092/181] Fix search.vectors/41_knn_search_byte_quantized/Knn search with mip flakiness --- .../test/search.vectors/41_knn_search_byte_quantized.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml index 12fb4d1bbcb1d..948a6e04a128b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml @@ -267,6 +267,13 @@ setup: name: rabbit.jpg vector: [0.5, 111.3, -13.0, 14.8, -156.0] + # We force merge into a single segment to make sure scores are more uniform + # Each segment can have a different quantization error, which can affect scores and mip is especially sensitive to this + - do: + indices.forcemerge: + index: mip + max_num_segments: 1 + - do: indices.refresh: {} From 7dc8a4b2c09239107fcc5e933d024b7318fa733a Mon Sep 17 00:00:00 2001 From: Tom Veasey Date: Fri, 1 Dec 2023 16:32:28 +0000 Subject: [PATCH 093/181] Relax KDE test assertion (#102878) Our assertion for approximate median was too stringent. Fixes #102876. --- .../org/elasticsearch/xpack/ml/aggs/changepoint/KDETests.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/KDETests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/KDETests.java index 80d5a3ad71136..e4d30912050e3 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/KDETests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/KDETests.java @@ -41,8 +41,8 @@ public void testCdfAndSf() { double median = kde.data()[kde.size() / 2]; KDE.ValueAndMagnitude cdf = kde.cdf(median); KDE.ValueAndMagnitude sf = kde.sf(median); - assertThat(cdf.value(), closeTo(0.5, 0.05)); - assertThat(sf.value(), closeTo(0.5, 0.05)); + assertThat(cdf.value(), closeTo(0.5, 0.1)); + assertThat(sf.value(), closeTo(0.5, 0.1)); } // Should approximately sum to 1.0 for some random data. From 09afa3602057dd9a447197df84d90c271507b41d Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Fri, 1 Dec 2023 18:03:06 +0100 Subject: [PATCH 094/181] ESQL: Make EvalBenchmarks executable again (#102854) --- .../compute/operator/EvalBenchmark.java | 12 ++-- .../xpack/esql/action/EsqlQueryResponse.java | 4 +- .../esql/enrich/EnrichLookupService.java | 4 +- .../function/scalar/conditional/Case.java | 4 +- .../function/scalar/multivalue/MvAvg.java | 4 +- .../function/scalar/multivalue/MvDedupe.java | 4 +- .../function/scalar/multivalue/MvMax.java | 4 +- .../function/scalar/multivalue/MvMedian.java | 4 +- .../function/scalar/multivalue/MvMin.java | 4 +- .../function/scalar/multivalue/MvSum.java | 4 +- .../function/scalar/nulls/Coalesce.java | 4 +- .../esql/optimizer/LogicalPlanOptimizer.java | 4 +- .../esql/plan/physical/EstimatesRowSize.java | 4 +- .../AbstractPhysicalOperationProviders.java | 2 +- .../esql/planner/LocalExecutionPlanner.java | 58 +------------------ .../xpack/esql/planner/PlannerUtils.java | 58 +++++++++++++++++++ .../esql/action/EsqlQueryResponseTests.java | 4 +- .../function/AbstractFunctionTestCase.java | 4 +- 18 files changed, 95 insertions(+), 91 deletions(-) diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java index 56a20594f1e6c..3a1142ad87d2f 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java @@ -27,11 +27,11 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Abs; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMin; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.planner.Layout; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.expression.Literal; -import org.elasticsearch.xpack.ql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.type.EsField; @@ -61,6 +61,11 @@ public class EvalBenchmark { private static final int BLOCK_LENGTH = 8 * 1024; + static final DriverContext driverContext = new DriverContext( + BigArrays.NON_RECYCLING_INSTANCE, + BlockFactory.getInstance(new NoopCircuitBreaker("noop"), BigArrays.NON_RECYCLING_INSTANCE) + ); + static { // Smoke test all the expected values and force loading subclasses more like prod try { @@ -72,11 +77,6 @@ public class EvalBenchmark { } } - static final DriverContext driverContext = new DriverContext( - BigArrays.NON_RECYCLING_INSTANCE, - BlockFactory.getInstance(new NoopCircuitBreaker("noop"), BigArrays.NON_RECYCLING_INSTANCE) - ); - @Param({ "abs", "add", "date_trunc", "equal_to_const", "long_equal_to_long", "long_equal_to_int", "mv_min", "mv_min_ascending" }) public String operation; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java index b283231574540..e571713420950 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java @@ -45,7 +45,7 @@ import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.versionfield.Version; @@ -317,7 +317,7 @@ private static Object valueAt(String dataType, Block block, int offset, BytesRef */ private static Page valuesToPage(List dataTypes, List> values) { List results = dataTypes.stream() - .map(c -> LocalExecutionPlanner.toElementType(EsqlDataTypes.fromName(c)).newBlockBuilder(values.size())) + .map(c -> PlannerUtils.toElementType(EsqlDataTypes.fromName(c)).newBlockBuilder(values.size())) .toList(); for (List row : values) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index 8dc5bdaeca393..0f8fd70c3016b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -72,7 +72,7 @@ import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Alias; @@ -262,7 +262,7 @@ private void doLookup( List fields = new ArrayList<>(extractFields.size()); for (int i = 0; i < extractFields.size(); i++) { NamedExpression extractField = extractFields.get(i); - final ElementType elementType = LocalExecutionPlanner.toElementType(extractField.dataType()); + final ElementType elementType = PlannerUtils.toElementType(extractField.dataType()); mergingTypes[i] = elementType; var loaders = BlockReaderFactories.loaders( List.of(searchContext), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java index caef1fe0de627..0174eca9c1ddf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java @@ -17,7 +17,7 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.expression.Nullability; @@ -155,7 +155,7 @@ public Object fold() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - ElementType resultType = LocalExecutionPlanner.toElementType(dataType()); + ElementType resultType = PlannerUtils.toElementType(dataType()); List conditionsFactories = conditions.stream() .map(c -> new ConditionEvaluatorSupplier(toEvaluator.apply(c.condition), toEvaluator.apply(c.value))) .toList(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java index 0a6a5d50ee552..296229bab9b5a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java @@ -12,7 +12,7 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.search.aggregations.metrics.CompensatedSum; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -45,7 +45,7 @@ public DataType dataType() { @Override protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { - return switch (LocalExecutionPlanner.toElementType(field().dataType())) { + return switch (PlannerUtils.toElementType(field().dataType())) { case DOUBLE -> new MvAvgDoubleEvaluator.Factory(fieldEval); case INT -> new MvAvgIntEvaluator.Factory(fieldEval); case LONG -> field().dataType() == DataTypes.UNSIGNED_LONG diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java index bda8faa62f7af..7d9b40ad0d24f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java @@ -11,7 +11,7 @@ import org.elasticsearch.compute.operator.MultivalueDedupe; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -43,7 +43,7 @@ protected TypeResolution resolveFieldType() { @Override protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { - return MultivalueDedupe.evaluator(LocalExecutionPlanner.toElementType(dataType()), fieldEval); + return MultivalueDedupe.evaluator(PlannerUtils.toElementType(dataType()), fieldEval); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java index e404e4d9151f9..fafd8d6a584fa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -47,7 +47,7 @@ protected TypeResolution resolveFieldType() { @Override protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { - return switch (LocalExecutionPlanner.toSortableElementType(field().dataType())) { + return switch (PlannerUtils.toSortableElementType(field().dataType())) { case BOOLEAN -> new MvMaxBooleanEvaluator.Factory(fieldEval); case BYTES_REF -> new MvMaxBytesRefEvaluator.Factory(fieldEval); case DOUBLE -> new MvMaxDoubleEvaluator.Factory(fieldEval); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java index e10cbdd86a072..b60885967264c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java @@ -14,7 +14,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -44,7 +44,7 @@ protected TypeResolution resolveFieldType() { @Override protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { - return switch (LocalExecutionPlanner.toElementType(field().dataType())) { + return switch (PlannerUtils.toElementType(field().dataType())) { case DOUBLE -> new MvMedianDoubleEvaluator.Factory(fieldEval); case INT -> new MvMedianIntEvaluator.Factory(fieldEval); case LONG -> field().dataType() == DataTypes.UNSIGNED_LONG diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java index de78e52a19eb6..1ae2ef41191b5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -47,7 +47,7 @@ protected TypeResolution resolveFieldType() { @Override protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { - return switch (LocalExecutionPlanner.toSortableElementType(field().dataType())) { + return switch (PlannerUtils.toSortableElementType(field().dataType())) { case BOOLEAN -> new MvMinBooleanEvaluator.Factory(fieldEval); case BYTES_REF -> new MvMinBytesRefEvaluator.Factory(fieldEval); case DOUBLE -> new MvMinDoubleEvaluator.Factory(fieldEval); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java index f543a8ec3878b..a0abced909c48 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java @@ -12,7 +12,7 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.search.aggregations.metrics.CompensatedSum; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -39,7 +39,7 @@ protected TypeResolution resolveFieldType() { @Override protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { - return switch (LocalExecutionPlanner.toElementType(field().dataType())) { + return switch (PlannerUtils.toElementType(field().dataType())) { case DOUBLE -> new MvSumDoubleEvaluator.Factory(fieldEval); case INT -> new MvSumIntEvaluator.Factory(source(), fieldEval); case LONG -> field().dataType() == DataTypes.UNSIGNED_LONG diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java index ea95971e1b7b6..43d4fff9c486d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java @@ -16,7 +16,7 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; import org.elasticsearch.xpack.ql.expression.Nullability; @@ -127,7 +127,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function x.get(context)).toList() ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 3ae19ceef4d08..66654b78c3af4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -26,7 +26,7 @@ import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.planner.AbstractPhysicalOperationProviders; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Attribute; @@ -664,7 +664,7 @@ private static List aggsFromEmpty(List aggs) { // fill the boolean block later in LocalExecutionPlanner if (dataType != DataTypes.BOOLEAN) { // look for count(literal) with literal != null - var wrapper = BlockUtils.wrapperFor(blockFactory, LocalExecutionPlanner.toElementType(dataType), 1); + var wrapper = BlockUtils.wrapperFor(blockFactory, PlannerUtils.toElementType(dataType), 1); if (aggFunc instanceof Count count && (count.foldable() == false || count.fold() != null)) { wrapper.accept(0L); } else { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java index 643d99696c80a..b79d7cc0fbdde 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java @@ -10,7 +10,7 @@ import org.elasticsearch.compute.data.DocVector; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; @@ -103,7 +103,7 @@ public String toString() { } static int estimateSize(DataType dataType) { - ElementType elementType = LocalExecutionPlanner.toElementType(dataType); + ElementType elementType = PlannerUtils.toElementType(dataType); return switch (elementType) { case BOOLEAN -> 1; case BYTES_REF -> { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java index 113e4b91232ae..a7d2c6cec50ee 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java @@ -301,7 +301,7 @@ HashAggregationOperator.GroupSpec toHashGroupSpec() { } ElementType elementType() { - return LocalExecutionPlanner.toElementType(attribute.dataType()); + return PlannerUtils.toElementType(attribute.dataType()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 0c4e10e91cb29..3d377497e17af 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -78,7 +78,6 @@ import org.elasticsearch.xpack.esql.plan.physical.TopNExec; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; -import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.Expression; @@ -87,7 +86,6 @@ import org.elasticsearch.xpack.ql.expression.NameId; import org.elasticsearch.xpack.ql.expression.NamedExpression; import org.elasticsearch.xpack.ql.expression.Order; -import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.util.Holder; @@ -273,58 +271,6 @@ private PhysicalOperation planFieldExtractNode(LocalExecutionPlannerContext cont return physicalOperationProviders.fieldExtractPhysicalOperation(fieldExtractExec, plan(fieldExtractExec.child(), context)); } - /** - * Map QL's {@link DataType} to the compute engine's {@link ElementType}. - */ - public static ElementType toElementType(DataType dataType) { - if (dataType == DataTypes.LONG || dataType == DataTypes.DATETIME || dataType == DataTypes.UNSIGNED_LONG) { - return ElementType.LONG; - } - if (dataType == DataTypes.INTEGER) { - return ElementType.INT; - } - if (dataType == DataTypes.DOUBLE) { - return ElementType.DOUBLE; - } - // unsupported fields are passed through as a BytesRef - if (dataType == DataTypes.KEYWORD - || dataType == DataTypes.TEXT - || dataType == DataTypes.IP - || dataType == DataTypes.SOURCE - || dataType == DataTypes.VERSION - || dataType == DataTypes.UNSUPPORTED) { - return ElementType.BYTES_REF; - } - if (dataType == DataTypes.NULL) { - return ElementType.NULL; - } - if (dataType == DataTypes.BOOLEAN) { - return ElementType.BOOLEAN; - } - if (dataType == EsQueryExec.DOC_DATA_TYPE) { - return ElementType.DOC; - } - if (dataType == EsqlDataTypes.GEO_POINT) { - return ElementType.LONG; - } - if (dataType == EsqlDataTypes.CARTESIAN_POINT) { - return ElementType.LONG; - } - throw EsqlIllegalArgumentException.illegalDataType(dataType); - } - - /** - * Map QL's {@link DataType} to the compute engine's {@link ElementType}, for sortable types only. - * This specifically excludes GEO_POINT and CARTESIAN_POINT, which are backed by DataType.LONG - * but are not themselves sortable (the long can be sorted, but the sort order is not usually useful). - */ - public static ElementType toSortableElementType(DataType dataType) { - if (dataType == EsqlDataTypes.GEO_POINT || dataType == EsqlDataTypes.CARTESIAN_POINT) { - return ElementType.UNKNOWN; - } - return toElementType(dataType); - } - private PhysicalOperation planOutput(OutputExec outputExec, LocalExecutionPlannerContext context) { PhysicalOperation source = plan(outputExec.child(), context); var output = outputExec.output(); @@ -422,7 +368,7 @@ private PhysicalOperation planTopN(TopNExec topNExec, LocalExecutionPlannerConte TopNEncoder[] encoders = new TopNEncoder[source.layout.numberOfChannels()]; List inverse = source.layout.inverse(); for (int channel = 0; channel < inverse.size(); channel++) { - elementTypes[channel] = toElementType(inverse.get(channel).type()); + elementTypes[channel] = PlannerUtils.toElementType(inverse.get(channel).type()); encoders[channel] = switch (inverse.get(channel).type().typeName()) { case "ip" -> TopNEncoder.IP; case "text", "keyword" -> TopNEncoder.UTF8; @@ -519,7 +465,7 @@ private PhysicalOperation planGrok(GrokExec grok, LocalExecutionPlannerContext c ElementType[] types = new ElementType[extractedFields.size()]; for (int i = 0; i < extractedFields.size(); i++) { Attribute extractedField = extractedFields.get(i); - ElementType type = toElementType(extractedField.dataType()); + ElementType type = PlannerUtils.toElementType(extractedField.dataType()); fieldToPos.put(extractedField.name(), i); fieldToType.put(extractedField.name(), type); types[i] = type; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java index 201f3365b78a7..adf684d573cd1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java @@ -8,13 +8,16 @@ package org.elasticsearch.xpack.esql.planner; import org.elasticsearch.common.Strings; +import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.optimizer.LocalLogicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.LocalLogicalPlanOptimizer; import org.elasticsearch.xpack.esql.optimizer.LocalPhysicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.LocalPhysicalPlanOptimizer; +import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EsSourceExec; import org.elasticsearch.xpack.esql.plan.physical.EstimatesRowSize; import org.elasticsearch.xpack.esql.plan.physical.ExchangeExec; @@ -24,12 +27,15 @@ import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; import org.elasticsearch.xpack.esql.stats.SearchStats; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.AttributeSet; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.predicate.Predicates; import org.elasticsearch.xpack.ql.plan.logical.EsRelation; import org.elasticsearch.xpack.ql.plan.logical.Filter; import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.util.Holder; import org.elasticsearch.xpack.ql.util.Queries; @@ -164,4 +170,56 @@ static QueryBuilder detectFilter(PhysicalPlan plan, String fieldName) { return Queries.combine(FILTER, asList(requestFilter)); } + + /** + * Map QL's {@link DataType} to the compute engine's {@link ElementType}, for sortable types only. + * This specifically excludes GEO_POINT and CARTESIAN_POINT, which are backed by DataType.LONG + * but are not themselves sortable (the long can be sorted, but the sort order is not usually useful). + */ + public static ElementType toSortableElementType(DataType dataType) { + if (dataType == EsqlDataTypes.GEO_POINT || dataType == EsqlDataTypes.CARTESIAN_POINT) { + return ElementType.UNKNOWN; + } + return toElementType(dataType); + } + + /** + * Map QL's {@link DataType} to the compute engine's {@link ElementType}. + */ + public static ElementType toElementType(DataType dataType) { + if (dataType == DataTypes.LONG || dataType == DataTypes.DATETIME || dataType == DataTypes.UNSIGNED_LONG) { + return ElementType.LONG; + } + if (dataType == DataTypes.INTEGER) { + return ElementType.INT; + } + if (dataType == DataTypes.DOUBLE) { + return ElementType.DOUBLE; + } + // unsupported fields are passed through as a BytesRef + if (dataType == DataTypes.KEYWORD + || dataType == DataTypes.TEXT + || dataType == DataTypes.IP + || dataType == DataTypes.SOURCE + || dataType == DataTypes.VERSION + || dataType == DataTypes.UNSUPPORTED) { + return ElementType.BYTES_REF; + } + if (dataType == DataTypes.NULL) { + return ElementType.NULL; + } + if (dataType == DataTypes.BOOLEAN) { + return ElementType.BOOLEAN; + } + if (dataType == EsQueryExec.DOC_DATA_TYPE) { + return ElementType.DOC; + } + if (dataType == EsqlDataTypes.GEO_POINT) { + return ElementType.LONG; + } + if (dataType == EsqlDataTypes.CARTESIAN_POINT) { + return ElementType.LONG; + } + throw EsqlIllegalArgumentException.illegalDataType(dataType); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index f040933e01410..25083268a3761 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -37,7 +37,7 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; @@ -114,7 +114,7 @@ private EsqlQueryResponse.Profile randomProfile() { private Page randomPage(List columns) { return new Page(columns.stream().map(c -> { - Block.Builder builder = LocalExecutionPlanner.toElementType(EsqlDataTypes.fromName(c.type())).newBlockBuilder(1, blockFactory); + Block.Builder builder = PlannerUtils.toElementType(EsqlDataTypes.fromName(c.type())).newBlockBuilder(1, blockFactory); switch (c.type()) { case "unsigned_long", "long" -> ((LongBlock.Builder) builder).appendLong(randomLong()); case "integer" -> ((IntBlock.Builder) builder).appendInt(randomInt()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 6e1b9487d1c9c..81f2fa98be8cc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -37,7 +37,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.optimizer.FoldNull; import org.elasticsearch.xpack.esql.planner.Layout; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.FieldAttribute; @@ -377,7 +377,7 @@ private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext con } try { for (int b = 0; b < data.size(); b++) { - ElementType elementType = LocalExecutionPlanner.toElementType(data.get(b).type()); + ElementType elementType = PlannerUtils.toElementType(data.get(b).type()); try (Block.Builder builder = elementType.newBlockBuilder(positions, inputBlockFactory)) { for (int p = 0; p < positions; p++) { if (nullPositions.contains(p)) { From 85514b3d350cebb450edd010e8d71bd60074ecdb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20FOUCRET?= Date: Fri, 1 Dec 2023 18:24:29 +0100 Subject: [PATCH 095/181] Track rescorer type in search usage stats. (#102771) --- .../admin/cluster/stats/ClusterStatsIT.java | 20 ++++- .../org/elasticsearch/TransportVersions.java | 1 + .../admin/cluster/stats/SearchUsageStats.java | 28 ++++++- .../search/builder/SearchSourceBuilder.java | 4 +- .../search/rescore/RescorerBuilder.java | 4 +- .../org/elasticsearch/usage/SearchUsage.java | 15 ++++ .../usage/SearchUsageHolder.java | 7 ++ .../cluster/stats/SearchUsageStatsTests.java | 80 +++++++++++++++---- .../rescore/QueryRescorerBuilderTests.java | 46 +++++++++-- .../ClusterStatsMonitoringDocTests.java | 1 + 10 files changed, 173 insertions(+), 33 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java index 902f74ef778a0..93fc17a9a02eb 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java @@ -22,11 +22,13 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.monitor.os.OsStats; import org.elasticsearch.node.NodeRoleSettings; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.rescore.QueryRescorerBuilder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; @@ -352,16 +354,26 @@ public void testSearchUsageStats() throws IOException { ); getRestClient().performRequest(request); } + { + Request request = new Request("GET", "/_search"); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(QueryBuilders.termQuery("field", "value")) + .addRescorer(new QueryRescorerBuilder(new MatchAllQueryBuilder().boost(3.0f))); + request.setJsonEntity(Strings.toString(searchSourceBuilder)); + getRestClient().performRequest(request); + } SearchUsageStats stats = clusterAdmin().prepareClusterStats().get().getIndicesStats().getSearchUsageStats(); - assertEquals(5, stats.getTotalSearchCount()); + assertEquals(6, stats.getTotalSearchCount()); assertEquals(4, stats.getQueryUsage().size()); assertEquals(1, stats.getQueryUsage().get("match").longValue()); - assertEquals(2, stats.getQueryUsage().get("term").longValue()); + assertEquals(3, stats.getQueryUsage().get("term").longValue()); assertEquals(1, stats.getQueryUsage().get("range").longValue()); assertEquals(1, stats.getQueryUsage().get("bool").longValue()); - assertEquals(2, stats.getSectionsUsage().size()); - assertEquals(4, stats.getSectionsUsage().get("query").longValue()); + assertEquals(3, stats.getSectionsUsage().size()); + assertEquals(5, stats.getSectionsUsage().get("query").longValue()); assertEquals(1, stats.getSectionsUsage().get("aggs").longValue()); + assertEquals(1, stats.getSectionsUsage().get("rescore").longValue()); + assertEquals(1, stats.getRescorerUsage().size()); + assertEquals(1, stats.getRescorerUsage().get("query").longValue()); } } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 4627a3d907133..b6e204f3839f7 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -182,6 +182,7 @@ static TransportVersion def(int id) { public static final TransportVersion ML_INFERENCE_GET_MULTIPLE_MODELS = def(8_549_00_0); public static final TransportVersion INFERENCE_SERVICE_RESULTS_ADDED = def(8_550_00_0); public static final TransportVersion ESQL_PROFILE = def(8_551_00_0); + public static final TransportVersion CLUSTER_STATS_RESCORER_USAGE_ADDED = def(8_552_00_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStats.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStats.java index 372ca49a252c8..aa49e9f1ea01f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStats.java @@ -21,6 +21,8 @@ import java.util.Map; import java.util.Objects; +import static org.elasticsearch.TransportVersions.CLUSTER_STATS_RESCORER_USAGE_ADDED; + /** * Holds a snapshot of the search usage statistics. * Used to hold the stats for a single node that's part of a {@link ClusterStatsNodeResponse}, as well as to @@ -29,6 +31,7 @@ public final class SearchUsageStats implements Writeable, ToXContentFragment { private long totalSearchCount; private final Map queries; + private final Map rescorers; private final Map sections; /** @@ -38,22 +41,27 @@ public SearchUsageStats() { this.totalSearchCount = 0L; this.queries = new HashMap<>(); this.sections = new HashMap<>(); + this.rescorers = new HashMap<>(); } /** * Creates a new stats instance with the provided info. The expectation is that when a new instance is created using * this constructor, the provided stats are final and won't be modified further. */ - public SearchUsageStats(Map queries, Map sections, long totalSearchCount) { + public SearchUsageStats(Map queries, Map rescorers, Map sections, long totalSearchCount) { this.totalSearchCount = totalSearchCount; this.queries = queries; this.sections = sections; + this.rescorers = rescorers; } public SearchUsageStats(StreamInput in) throws IOException { this.queries = in.readMap(StreamInput::readLong); this.sections = in.readMap(StreamInput::readLong); this.totalSearchCount = in.readVLong(); + this.rescorers = in.getTransportVersion().onOrAfter(CLUSTER_STATS_RESCORER_USAGE_ADDED) + ? in.readMap(StreamInput::readLong) + : Map.of(); } @Override @@ -61,6 +69,10 @@ public void writeTo(StreamOutput out) throws IOException { out.writeMap(queries, StreamOutput::writeLong); out.writeMap(sections, StreamOutput::writeLong); out.writeVLong(totalSearchCount); + + if (out.getTransportVersion().onOrAfter(CLUSTER_STATS_RESCORER_USAGE_ADDED)) { + out.writeMap(rescorers, StreamOutput::writeLong); + } } /** @@ -68,6 +80,7 @@ public void writeTo(StreamOutput out) throws IOException { */ public void add(SearchUsageStats stats) { stats.queries.forEach((query, count) -> queries.merge(query, count, Long::sum)); + stats.rescorers.forEach((rescorer, count) -> rescorers.merge(rescorer, count, Long::sum)); stats.sections.forEach((query, count) -> sections.merge(query, count, Long::sum)); this.totalSearchCount += stats.totalSearchCount; } @@ -79,6 +92,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws { builder.field("queries"); builder.map(queries); + builder.field("rescorers"); + builder.map(rescorers); builder.field("sections"); builder.map(sections); } @@ -90,6 +105,10 @@ public Map getQueryUsage() { return Collections.unmodifiableMap(queries); } + public Map getRescorerUsage() { + return Collections.unmodifiableMap(rescorers); + } + public Map getSectionsUsage() { return Collections.unmodifiableMap(sections); } @@ -107,12 +126,15 @@ public boolean equals(Object o) { return false; } SearchUsageStats that = (SearchUsageStats) o; - return totalSearchCount == that.totalSearchCount && queries.equals(that.queries) && sections.equals(that.sections); + return totalSearchCount == that.totalSearchCount + && queries.equals(that.queries) + && rescorers.equals(that.rescorers) + && sections.equals(that.sections); } @Override public int hashCode() { - return Objects.hash(totalSearchCount, queries, sections); + return Objects.hash(totalSearchCount, queries, rescorers, sections); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 4fd20387004aa..c7077e4c867b0 100644 --- a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -1411,7 +1411,7 @@ private SearchSourceBuilder parseXContent(XContentParser parser, boolean checkTr sorts = new ArrayList<>(SortBuilder.fromXContent(parser)); } else if (RESCORE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { rescoreBuilders = new ArrayList<>(); - rescoreBuilders.add(RescorerBuilder.parseFromXContent(parser)); + rescoreBuilders.add(RescorerBuilder.parseFromXContent(parser, searchUsage::trackRescorerUsage)); searchUsage.trackSectionUsage(RESCORE_FIELD.getPreferredName()); } else if (EXT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { extBuilders = new ArrayList<>(); @@ -1498,7 +1498,7 @@ private SearchSourceBuilder parseXContent(XContentParser parser, boolean checkTr } else if (RESCORE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { rescoreBuilders = new ArrayList<>(); while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) { - rescoreBuilders.add(RescorerBuilder.parseFromXContent(parser)); + rescoreBuilders.add(RescorerBuilder.parseFromXContent(parser, searchUsage::trackRescorerUsage)); } searchUsage.trackSectionUsage(RESCORE_FIELD.getPreferredName()); } else if (STATS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { diff --git a/server/src/main/java/org/elasticsearch/search/rescore/RescorerBuilder.java b/server/src/main/java/org/elasticsearch/search/rescore/RescorerBuilder.java index 897c14409b5fd..76ee7e09ad870 100644 --- a/server/src/main/java/org/elasticsearch/search/rescore/RescorerBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/rescore/RescorerBuilder.java @@ -22,6 +22,7 @@ import java.io.IOException; import java.util.Objects; +import java.util.function.Consumer; /** * The abstract base builder for instances of {@link RescorerBuilder}. @@ -67,7 +68,7 @@ public Integer windowSize() { return windowSize; } - public static RescorerBuilder parseFromXContent(XContentParser parser) throws IOException { + public static RescorerBuilder parseFromXContent(XContentParser parser, Consumer rescorerNameConsumer) throws IOException { String fieldName = null; RescorerBuilder rescorer = null; Integer windowSize = null; @@ -83,6 +84,7 @@ public static RescorerBuilder parseFromXContent(XContentParser parser) throws } } else if (token == XContentParser.Token.START_OBJECT) { rescorer = parser.namedObject(RescorerBuilder.class, fieldName, null); + rescorerNameConsumer.accept(fieldName); } else { throw new ParsingException(parser.getTokenLocation(), "unexpected token [" + token + "] after [" + fieldName + "]"); } diff --git a/server/src/main/java/org/elasticsearch/usage/SearchUsage.java b/server/src/main/java/org/elasticsearch/usage/SearchUsage.java index 8c4e7a18269d6..f17dba1746cf8 100644 --- a/server/src/main/java/org/elasticsearch/usage/SearchUsage.java +++ b/server/src/main/java/org/elasticsearch/usage/SearchUsage.java @@ -17,6 +17,7 @@ */ public final class SearchUsage { private final Set queries = new HashSet<>(); + private final Set rescorers = new HashSet<>(); private final Set sections = new HashSet<>(); /** @@ -33,6 +34,13 @@ public void trackSectionUsage(String section) { sections.add(section); } + /** + * Track the usage of the provided rescorer + */ + public void trackRescorerUsage(String name) { + rescorers.add(name); + } + /** * Returns the query types that have been used at least once in the tracked search request */ @@ -40,6 +48,13 @@ public Set getQueryUsage() { return Collections.unmodifiableSet(queries); } + /** + * Returns the rescorer types that have been used at least once in the tracked search request + */ + public Set getRescorerUsage() { + return Collections.unmodifiableSet(rescorers); + } + /** * Returns the search section names that have been used at least once in the tracked search request */ diff --git a/server/src/main/java/org/elasticsearch/usage/SearchUsageHolder.java b/server/src/main/java/org/elasticsearch/usage/SearchUsageHolder.java index 1ceaae4f8eb58..ef7d9b3c0a291 100644 --- a/server/src/main/java/org/elasticsearch/usage/SearchUsageHolder.java +++ b/server/src/main/java/org/elasticsearch/usage/SearchUsageHolder.java @@ -24,6 +24,7 @@ public final class SearchUsageHolder { private final LongAdder totalSearchCount = new LongAdder(); private final Map queriesUsage = new ConcurrentHashMap<>(); + private final Map rescorersUsage = new ConcurrentHashMap<>(); private final Map sectionsUsage = new ConcurrentHashMap<>(); SearchUsageHolder() {} @@ -39,6 +40,9 @@ public void updateUsage(SearchUsage searchUsage) { for (String query : searchUsage.getQueryUsage()) { queriesUsage.computeIfAbsent(query, q -> new LongAdder()).increment(); } + for (String rescorer : searchUsage.getRescorerUsage()) { + rescorersUsage.computeIfAbsent(rescorer, q -> new LongAdder()).increment(); + } } /** @@ -49,8 +53,11 @@ public SearchUsageStats getSearchUsageStats() { queriesUsage.forEach((query, adder) -> queriesUsageMap.put(query, adder.longValue())); Map sectionsUsageMap = Maps.newMapWithExpectedSize(sectionsUsage.size()); sectionsUsage.forEach((query, adder) -> sectionsUsageMap.put(query, adder.longValue())); + Map rescorersUsageMap = Maps.newMapWithExpectedSize(rescorersUsage.size()); + rescorersUsage.forEach((query, adder) -> rescorersUsageMap.put(query, adder.longValue())); return new SearchUsageStats( Collections.unmodifiableMap(queriesUsageMap), + Collections.unmodifiableMap(rescorersUsageMap), Collections.unmodifiableMap(sectionsUsageMap), totalSearchCount.longValue() ); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java index 1e8bc1e17d525..10419719a5ed1 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java @@ -8,9 +8,11 @@ package org.elasticsearch.action.admin.cluster.stats; +import org.elasticsearch.TransportVersion; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.test.TransportVersionUtils; import java.io.IOException; import java.util.HashMap; @@ -30,6 +32,8 @@ public class SearchUsageStatsTests extends AbstractWireSerializingTestCase RESCORER_TYPES = List.of("query", "learn_to_rank"); + private static final List SECTIONS = List.of( "highlight", "query", @@ -62,59 +66,105 @@ private static Map randomQueryUsage(int size) { return queryUsage; } + private static Map randomRescorerUsage(int size) { + Map rescorerUsage = new HashMap<>(); + while (rescorerUsage.size() < size) { + rescorerUsage.put(randomFrom(RESCORER_TYPES), randomLongBetween(1, Long.MAX_VALUE)); + } + return rescorerUsage; + } + @Override protected SearchUsageStats createTestInstance() { if (randomBoolean()) { return new SearchUsageStats(); } return new SearchUsageStats( - randomQueryUsage(randomIntBetween(0, 4)), - randomSectionsUsage(randomIntBetween(0, 4)), + randomQueryUsage(randomIntBetween(0, QUERY_TYPES.size())), + randomRescorerUsage(randomIntBetween(0, RESCORER_TYPES.size())), + randomSectionsUsage(randomIntBetween(0, SECTIONS.size())), randomLongBetween(10, Long.MAX_VALUE) ); } @Override protected SearchUsageStats mutateInstance(SearchUsageStats instance) { - if (randomBoolean()) { - return new SearchUsageStats( - randomQueryUsage(instance.getQueryUsage().size() + 1), + int i = randomInt(4); + return switch (i) { + case 0 -> new SearchUsageStats( + randomValueOtherThan(instance.getQueryUsage(), () -> randomQueryUsage(randomIntBetween(0, QUERY_TYPES.size()))), + instance.getRescorerUsage(), instance.getSectionsUsage(), instance.getTotalSearchCount() ); - } - if (randomBoolean()) { - return new SearchUsageStats( + case 1 -> new SearchUsageStats( instance.getQueryUsage(), - randomSectionsUsage(instance.getSectionsUsage().size() + 1), + randomValueOtherThan(instance.getRescorerUsage(), () -> randomRescorerUsage(randomIntBetween(0, RESCORER_TYPES.size()))), + instance.getSectionsUsage(), instance.getTotalSearchCount() ); - } - return new SearchUsageStats(instance.getQueryUsage(), instance.getSectionsUsage(), randomLongBetween(10, Long.MAX_VALUE)); + case 2 -> new SearchUsageStats( + instance.getQueryUsage(), + instance.getRescorerUsage(), + randomValueOtherThan(instance.getRescorerUsage(), () -> randomSectionsUsage(randomIntBetween(0, SECTIONS.size()))), + instance.getTotalSearchCount() + ); + default -> new SearchUsageStats( + instance.getQueryUsage(), + instance.getRescorerUsage(), + instance.getSectionsUsage(), + randomLongBetween(10, Long.MAX_VALUE) + ); + }; } public void testAdd() { SearchUsageStats searchUsageStats = new SearchUsageStats(); assertEquals(Map.of(), searchUsageStats.getQueryUsage()); + assertEquals(Map.of(), searchUsageStats.getRescorerUsage()); assertEquals(Map.of(), searchUsageStats.getSectionsUsage()); assertEquals(0, searchUsageStats.getTotalSearchCount()); - searchUsageStats.add(new SearchUsageStats(Map.of("match", 10L), Map.of("query", 10L), 10L)); + searchUsageStats.add(new SearchUsageStats(Map.of("match", 10L), Map.of("query", 5L), Map.of("query", 10L), 10L)); assertEquals(Map.of("match", 10L), searchUsageStats.getQueryUsage()); assertEquals(Map.of("query", 10L), searchUsageStats.getSectionsUsage()); + assertEquals(Map.of("query", 5L), searchUsageStats.getRescorerUsage()); assertEquals(10L, searchUsageStats.getTotalSearchCount()); - searchUsageStats.add(new SearchUsageStats(Map.of("term", 1L, "match", 1L), Map.of("query", 10L, "knn", 1L), 10L)); + searchUsageStats.add( + new SearchUsageStats( + Map.of("term", 1L, "match", 1L), + Map.of("query", 5L, "learn_to_rank", 2L), + Map.of("query", 10L, "knn", 1L), + 10L + ) + ); assertEquals(Map.of("match", 11L, "term", 1L), searchUsageStats.getQueryUsage()); assertEquals(Map.of("query", 20L, "knn", 1L), searchUsageStats.getSectionsUsage()); + assertEquals(Map.of("query", 10L, "learn_to_rank", 2L), searchUsageStats.getRescorerUsage()); assertEquals(20L, searchUsageStats.getTotalSearchCount()); } public void testToXContent() throws IOException { - SearchUsageStats searchUsageStats = new SearchUsageStats(Map.of("term", 1L), Map.of("query", 10L), 10L); + SearchUsageStats searchUsageStats = new SearchUsageStats(Map.of("term", 1L), Map.of("query", 2L), Map.of("query", 10L), 10L); assertEquals( - "{\"search\":{\"total\":10,\"queries\":{\"term\":1},\"sections\":{\"query\":10}}}", + "{\"search\":{\"total\":10,\"queries\":{\"term\":1},\"rescorers\":{\"query\":2},\"sections\":{\"query\":10}}}", Strings.toString(searchUsageStats) ); } + + /** + * Test (de)serialization on all previous released versions + */ + public void testSerializationBWC() throws IOException { + for (TransportVersion version : TransportVersionUtils.allReleasedVersions()) { + SearchUsageStats testInstance = new SearchUsageStats( + randomQueryUsage(QUERY_TYPES.size()), + Map.of(), + randomSectionsUsage(SECTIONS.size()), + randomLongBetween(0, Long.MAX_VALUE) + ); + assertSerialization(testInstance, version); + } + } } diff --git a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java index b7979c70d0d52..0ade522ae1ffa 100644 --- a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.search.rescore.QueryRescorer.QueryRescoreContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.usage.SearchUsage; import org.elasticsearch.xcontent.NamedObjectNotFoundException; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ToXContent; @@ -42,11 +43,13 @@ import org.junit.BeforeClass; import java.io.IOException; +import java.util.Set; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; public class QueryRescorerBuilderTests extends ESTestCase { @@ -112,13 +115,15 @@ public void testFromXContent() throws IOException { } rescoreBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); XContentBuilder shuffled = shuffleXContent(builder); + SearchUsage searchUsage = new SearchUsage(); try (XContentParser parser = createParser(shuffled)) { parser.nextToken(); - RescorerBuilder secondRescoreBuilder = RescorerBuilder.parseFromXContent(parser); + RescorerBuilder secondRescoreBuilder = RescorerBuilder.parseFromXContent(parser, searchUsage::trackRescorerUsage); assertNotSame(rescoreBuilder, secondRescoreBuilder); assertEquals(rescoreBuilder, secondRescoreBuilder); assertEquals(rescoreBuilder.hashCode(), secondRescoreBuilder.hashCode()); + assertEquals(searchUsage.getRescorerUsage(), Set.of("query")); } } } @@ -243,6 +248,7 @@ public MappedFieldType getFieldType(String name) { * test parsing exceptions for incorrect rescorer syntax */ public void testUnknownFieldsExpection() throws IOException { + SearchUsage searchUsage = new SearchUsage(); String rescoreElement = """ { @@ -251,8 +257,12 @@ public void testUnknownFieldsExpection() throws IOException { } """; try (XContentParser parser = createParser(rescoreElement)) { - Exception e = expectThrows(NamedObjectNotFoundException.class, () -> RescorerBuilder.parseFromXContent(parser)); + Exception e = expectThrows( + NamedObjectNotFoundException.class, + () -> RescorerBuilder.parseFromXContent(parser, searchUsage::trackRescorerUsage) + ); assertEquals("[3:27] unknown field [bad_rescorer_name]", e.getMessage()); + assertThat(searchUsage.getRescorerUsage(), empty()); } rescoreElement = """ { @@ -260,8 +270,12 @@ public void testUnknownFieldsExpection() throws IOException { } """; try (XContentParser parser = createParser(rescoreElement)) { - Exception e = expectThrows(ParsingException.class, () -> RescorerBuilder.parseFromXContent(parser)); + Exception e = expectThrows( + ParsingException.class, + () -> RescorerBuilder.parseFromXContent(parser, searchUsage::trackRescorerUsage) + ); assertEquals("rescore doesn't support [bad_fieldName]", e.getMessage()); + assertThat(searchUsage.getRescorerUsage(), empty()); } rescoreElement = """ @@ -271,14 +285,22 @@ public void testUnknownFieldsExpection() throws IOException { } """; try (XContentParser parser = createParser(rescoreElement)) { - Exception e = expectThrows(ParsingException.class, () -> RescorerBuilder.parseFromXContent(parser)); + Exception e = expectThrows( + ParsingException.class, + () -> RescorerBuilder.parseFromXContent(parser, searchUsage::trackRescorerUsage) + ); assertEquals("unexpected token [START_ARRAY] after [query]", e.getMessage()); + assertThat(searchUsage.getRescorerUsage(), empty()); } rescoreElement = "{ }"; try (XContentParser parser = createParser(rescoreElement)) { - Exception e = expectThrows(ParsingException.class, () -> RescorerBuilder.parseFromXContent(parser)); + Exception e = expectThrows( + ParsingException.class, + () -> RescorerBuilder.parseFromXContent(parser, searchUsage::trackRescorerUsage) + ); assertEquals("missing rescore type", e.getMessage()); + assertThat(searchUsage.getRescorerUsage(), empty()); } rescoreElement = """ @@ -288,8 +310,12 @@ public void testUnknownFieldsExpection() throws IOException { } """; try (XContentParser parser = createParser(rescoreElement)) { - XContentParseException e = expectThrows(XContentParseException.class, () -> RescorerBuilder.parseFromXContent(parser)); + XContentParseException e = expectThrows( + XContentParseException.class, + () -> RescorerBuilder.parseFromXContent(parser, searchUsage::trackRescorerUsage) + ); assertEquals("[3:17] [query] unknown field [bad_fieldname]", e.getMessage()); + assertThat(searchUsage.getRescorerUsage(), empty()); } rescoreElement = """ @@ -299,8 +325,12 @@ public void testUnknownFieldsExpection() throws IOException { } """; try (XContentParser parser = createParser(rescoreElement)) { - Exception e = expectThrows(XContentParseException.class, () -> RescorerBuilder.parseFromXContent(parser)); + Exception e = expectThrows( + XContentParseException.class, + () -> RescorerBuilder.parseFromXContent(parser, searchUsage::trackRescorerUsage) + ); assertThat(e.getMessage(), containsString("[query] failed to parse field [rescore_query]")); + assertThat(searchUsage.getRescorerUsage(), empty()); } rescoreElement = """ @@ -310,7 +340,7 @@ public void testUnknownFieldsExpection() throws IOException { } """; try (XContentParser parser = createParser(rescoreElement)) { - RescorerBuilder.parseFromXContent(parser); + RescorerBuilder.parseFromXContent(parser, searchUsage::trackRescorerUsage); } } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java index c782f25fdad4c..d88adea7aaef3 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java @@ -583,6 +583,7 @@ public void testToXContent() throws IOException { "search" : { "total" : 0, "queries" : {}, + "rescorers" : {}, "sections" : {} }, "dense_vector": { From a4cdaf390cb0ef4dcfb710314d51826b44a8ee87 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20FOUCRET?= Date: Fri, 1 Dec 2023 18:30:10 +0100 Subject: [PATCH 096/181] [LTR] Missing param handling improvements (#102850) --- .../trainedmodel/LearnToRankConfig.java | 56 ++++- .../ltr/QueryExtractorBuilder.java | 31 ++- .../datafeed/DatafeedConfigBuilderTests.java | 4 +- .../core/ml/datafeed/DatafeedConfigTests.java | 4 +- .../core/ml/datafeed/DatafeedUpdateTests.java | 6 +- .../trainedmodel/LearnToRankConfigTests.java | 58 +++++- .../ltr/QueryExtractorBuilderTests.java | 21 +- .../core/ml/utils/QueryProviderTests.java | 4 +- .../ml/inference/ltr/LearnToRankService.java | 32 ++- .../ltr/LearnToRankServiceTests.java | 194 ++++++++++-------- 10 files changed, 281 insertions(+), 129 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfig.java index 89dcf746d7927..ba617ca8d04b8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfig.java @@ -18,11 +18,14 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.MlConfigVersion; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearnToRankFeatureExtractorBuilder; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.QueryExtractorBuilder; import org.elasticsearch.xpack.core.ml.utils.NamedXContentObjectHelper; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; @@ -33,7 +36,9 @@ public class LearnToRankConfig extends RegressionConfig implements Rewriteable LENIENT_PARSER = createParser(true); private static final ObjectParser STRICT_PARSER = createParser(false); @@ -51,6 +56,7 @@ private static ObjectParser createParser(boo b -> {}, FEATURE_EXTRACTORS ); + parser.declareObject(Builder::setParamsDefaults, (p, c) -> p.map(), DEFAULT_PARAMS); return parser; } @@ -67,8 +73,13 @@ public static Builder builder(LearnToRankConfig config) { } private final List featureExtractorBuilders; + private final Map paramsDefaults; - public LearnToRankConfig(Integer numTopFeatureImportanceValues, List featureExtractorBuilders) { + public LearnToRankConfig( + Integer numTopFeatureImportanceValues, + List featureExtractorBuilders, + Map paramsDefaults + ) { super(DEFAULT_RESULTS_FIELD, numTopFeatureImportanceValues); if (featureExtractorBuilders != null) { Set featureNames = featureExtractorBuilders.stream() @@ -80,23 +91,40 @@ public LearnToRankConfig(Integer numTopFeatureImportanceValues, List getFeatureExtractorBuilders() { return featureExtractorBuilders; } + public List getQueryFeatureExtractorBuilders() { + List queryExtractorBuilders = new ArrayList<>(); + for (LearnToRankFeatureExtractorBuilder featureExtractorBuilder : featureExtractorBuilders) { + if (featureExtractorBuilder instanceof QueryExtractorBuilder queryExtractorBuilder) { + queryExtractorBuilders.add(queryExtractorBuilder); + } + } + + return queryExtractorBuilders; + } + @Override public String getResultsField() { return DEFAULT_RESULTS_FIELD; } + public Map getParamsDefaults() { + return paramsDefaults; + } + @Override public boolean isAllocateOnly() { return false; @@ -126,6 +154,7 @@ public String getWriteableName() { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeNamedWriteableCollection(featureExtractorBuilders); + out.writeGenericMap(paramsDefaults); } @Override @@ -146,6 +175,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws featureExtractorBuilders ); } + + if (paramsDefaults.isEmpty() == false) { + builder.field(DEFAULT_PARAMS.getPreferredName(), paramsDefaults); + } + builder.endObject(); return builder; } @@ -156,12 +190,13 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; if (super.equals(o) == false) return false; LearnToRankConfig that = (LearnToRankConfig) o; - return Objects.equals(featureExtractorBuilders, that.featureExtractorBuilders); + return Objects.equals(featureExtractorBuilders, that.featureExtractorBuilders) + && Objects.equals(paramsDefaults, that.paramsDefaults); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), featureExtractorBuilders); + return Objects.hash(super.hashCode(), featureExtractorBuilders, paramsDefaults); } @Override @@ -197,7 +232,7 @@ public LearnToRankConfig rewrite(QueryRewriteContext ctx) throws IOException { rewritten |= (rewrittenExtractor != extractorBuilder); } if (rewritten) { - return new LearnToRankConfig(getNumTopFeatureImportanceValues(), rewrittenExtractors); + return new LearnToRankConfig(getNumTopFeatureImportanceValues(), rewrittenExtractors, paramsDefaults); } return this; } @@ -205,12 +240,14 @@ public LearnToRankConfig rewrite(QueryRewriteContext ctx) throws IOException { public static class Builder { private Integer numTopFeatureImportanceValues; private List learnToRankFeatureExtractorBuilders; + private Map paramsDefaults = Map.of(); Builder() {} Builder(LearnToRankConfig config) { this.numTopFeatureImportanceValues = config.getNumTopFeatureImportanceValues(); this.learnToRankFeatureExtractorBuilders = config.featureExtractorBuilders; + this.paramsDefaults = config.getParamsDefaults(); } public Builder setNumTopFeatureImportanceValues(Integer numTopFeatureImportanceValues) { @@ -225,8 +262,13 @@ public Builder setLearnToRankFeatureExtractorBuilders( return this; } + public Builder setParamsDefaults(Map paramsDefaults) { + this.paramsDefaults = paramsDefaults; + return this; + } + public LearnToRankConfig build() { - return new LearnToRankConfig(numTopFeatureImportanceValues, learnToRankFeatureExtractorBuilders); + return new LearnToRankConfig(numTopFeatureImportanceValues, learnToRankFeatureExtractorBuilders, paramsDefaults); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilder.java index f2839148d6a60..a138fbbb98ba1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilder.java @@ -18,35 +18,44 @@ import org.elasticsearch.xpack.core.ml.utils.QueryProvider; import java.io.IOException; +import java.util.Objects; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.elasticsearch.xpack.core.ml.job.messages.Messages.INFERENCE_CONFIG_QUERY_BAD_FORMAT; import static org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper.requireNonNull; -public record QueryExtractorBuilder(String featureName, QueryProvider query) implements LearnToRankFeatureExtractorBuilder { +public record QueryExtractorBuilder(String featureName, QueryProvider query, float defaultScore) + implements + LearnToRankFeatureExtractorBuilder { public static final ParseField NAME = new ParseField("query_extractor"); public static final ParseField FEATURE_NAME = new ParseField("feature_name"); public static final ParseField QUERY = new ParseField("query"); + public static final ParseField DEFAULT_SCORE = new ParseField("default_score"); + + public static float DEFAULT_SCORE_DEFAULT = 0f; private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( NAME.getPreferredName(), - a -> new QueryExtractorBuilder((String) a[0], (QueryProvider) a[1]) + a -> new QueryExtractorBuilder((String) a[0], (QueryProvider) a[1], Objects.requireNonNullElse((Float) a[2], DEFAULT_SCORE_DEFAULT)) ); private static final ConstructingObjectParser LENIENT_PARSER = new ConstructingObjectParser<>( NAME.getPreferredName(), true, - a -> new QueryExtractorBuilder((String) a[0], (QueryProvider) a[1]) + a -> new QueryExtractorBuilder((String) a[0], (QueryProvider) a[1], Objects.requireNonNullElse((Float) a[2], DEFAULT_SCORE_DEFAULT)) ); static { PARSER.declareString(constructorArg(), FEATURE_NAME); PARSER.declareObject(constructorArg(), (p, c) -> QueryProvider.fromXContent(p, false, INFERENCE_CONFIG_QUERY_BAD_FORMAT), QUERY); + PARSER.declareFloat(optionalConstructorArg(), DEFAULT_SCORE); LENIENT_PARSER.declareString(constructorArg(), FEATURE_NAME); LENIENT_PARSER.declareObject( constructorArg(), (p, c) -> QueryProvider.fromXContent(p, true, INFERENCE_CONFIG_QUERY_BAD_FORMAT), QUERY ); + LENIENT_PARSER.declareFloat(optionalConstructorArg(), DEFAULT_SCORE); } public static QueryExtractorBuilder fromXContent(XContentParser parser, Object context) { @@ -55,18 +64,29 @@ public static QueryExtractorBuilder fromXContent(XContentParser parser, Object c } public QueryExtractorBuilder(String featureName, QueryProvider query) { + this(featureName, query, DEFAULT_SCORE_DEFAULT); + } + + public QueryExtractorBuilder(String featureName, QueryProvider query, float defaultScore) { this.featureName = requireNonNull(featureName, FEATURE_NAME); this.query = requireNonNull(query, QUERY); + if (defaultScore < 0f) { + throw new IllegalArgumentException("[" + NAME + "] requires defaultScore to be positive."); + } + this.defaultScore = defaultScore; } public QueryExtractorBuilder(StreamInput input) throws IOException { - this(input.readString(), QueryProvider.fromStream(input)); + this(input.readString(), QueryProvider.fromStream(input), input.readFloat()); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(FEATURE_NAME.getPreferredName(), featureName); + if (defaultScore > 0f) { + builder.field(DEFAULT_SCORE.getPreferredName(), defaultScore); + } builder.field(QUERY.getPreferredName(), query.getQuery()); builder.endObject(); return builder; @@ -81,6 +101,7 @@ public String getWriteableName() { public void writeTo(StreamOutput out) throws IOException { out.writeString(featureName); query.writeTo(out); + out.writeFloat(defaultScore); } @Override @@ -106,6 +127,6 @@ public QueryExtractorBuilder rewrite(QueryRewriteContext ctx) throws IOException if (rewritten == query) { return this; } - return new QueryExtractorBuilder(featureName, rewritten); + return new QueryExtractorBuilder(featureName, rewritten, defaultScore); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigBuilderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigBuilderTests.java index 6a722896970e6..7970a6c3fbc5a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigBuilderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigBuilderTests.java @@ -30,7 +30,7 @@ import java.util.Map; import static org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfigTests.randomStringList; -import static org.elasticsearch.xpack.core.ml.utils.QueryProviderTests.createRandomValidQueryProvider; +import static org.elasticsearch.xpack.core.ml.utils.QueryProviderTests.createTestQueryProvider; public class DatafeedConfigBuilderTests extends AbstractWireSerializingTestCase { @@ -44,7 +44,7 @@ public static DatafeedConfig.Builder createRandomizedDatafeedConfigBuilder(Strin } builder.setIndices(randomStringList(1, 10)); if (randomBoolean()) { - builder.setQueryProvider(createRandomValidQueryProvider(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))); + builder.setQueryProvider(createTestQueryProvider(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))); } boolean addScriptFields = randomBoolean(); if (addScriptFields) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java index 0d487f27cd903..c42b540f9461e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java @@ -70,7 +70,7 @@ import static org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfigBuilderTests.createRandomizedDatafeedConfigBuilder; import static org.elasticsearch.xpack.core.ml.job.messages.Messages.DATAFEED_AGGREGATIONS_INTERVAL_MUST_BE_GREATER_THAN_ZERO; -import static org.elasticsearch.xpack.core.ml.utils.QueryProviderTests.createRandomValidQueryProvider; +import static org.elasticsearch.xpack.core.ml.utils.QueryProviderTests.createTestQueryProvider; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -794,7 +794,7 @@ public void testSerializationOfComplexAggs() throws IOException { .subAggregation(bucketScriptPipelineAggregationBuilder); DatafeedConfig.Builder datafeedConfigBuilder = createDatafeedBuilderWithDateHistogram(dateHistogram); datafeedConfigBuilder.setQueryProvider( - createRandomValidQueryProvider(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)) + createTestQueryProvider(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)) ); DatafeedConfig datafeedConfig = datafeedConfigBuilder.build(); AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder().addAggregator(dateHistogram); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java index f6c859830119b..682fbc81b4592 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java @@ -60,7 +60,7 @@ import java.util.Map; import static org.elasticsearch.xpack.core.ml.datafeed.AggProviderTests.createRandomValidAggProvider; -import static org.elasticsearch.xpack.core.ml.utils.QueryProviderTests.createRandomValidQueryProvider; +import static org.elasticsearch.xpack.core.ml.utils.QueryProviderTests.createTestQueryProvider; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.is; @@ -97,7 +97,7 @@ public static DatafeedUpdate createRandomized(String datafeedId, @Nullable Dataf builder.setIndices(DatafeedConfigTests.randomStringList(1, 10)); } if (randomBoolean()) { - builder.setQuery(createRandomValidQueryProvider(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))); + builder.setQuery(createTestQueryProvider(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))); } if (randomBoolean()) { int scriptsSize = randomInt(3); @@ -264,7 +264,7 @@ public void testApply_givenFullUpdateNoAggregations() { DatafeedConfig.Builder datafeedBuilder = new DatafeedConfig.Builder("foo", "foo-feed"); datafeedBuilder.setIndices(Collections.singletonList("i_1")); DatafeedConfig datafeed = datafeedBuilder.build(); - QueryProvider queryProvider = createRandomValidQueryProvider("a", "b"); + QueryProvider queryProvider = createTestQueryProvider("a", "b"); DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeed.getId()); update.setIndices(Collections.singletonList("i_2")); update.setQueryDelay(TimeValue.timeValueSeconds(42)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfigTests.java index 16e56b5dc73bd..1059af21ab7eb 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfigTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Tuple; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.search.SearchModule; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -28,7 +29,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.function.Predicate; @@ -45,7 +45,8 @@ public static LearnToRankConfig randomLearnToRankConfig() { randomBoolean() ? null : randomIntBetween(0, 10), randomBoolean() ? null - : Stream.generate(QueryExtractorBuilderTests::randomInstance).limit(randomInt(5)).collect(Collectors.toList()) + : Stream.generate(QueryExtractorBuilderTests::randomInstance).limit(randomInt(5)).collect(Collectors.toList()), + randomBoolean() ? null : randomMap(0, 10, () -> Tuple.tuple(randomIdentifier(), randomIdentifier())) ); } @@ -61,7 +62,45 @@ protected LearnToRankConfig createTestInstance() { @Override protected LearnToRankConfig mutateInstance(LearnToRankConfig instance) { - return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 + int i = randomInt(2); + + LearnToRankConfig.Builder builder = LearnToRankConfig.builder(instance); + + switch (i) { + case 0 -> { + builder.setNumTopFeatureImportanceValues( + randomValueOtherThan( + instance.getNumTopFeatureImportanceValues(), + () -> randomBoolean() && instance.getNumTopFeatureImportanceValues() != 0 ? null : randomIntBetween(0, 10) + ) + ); + } + case 1 -> { + builder.setLearnToRankFeatureExtractorBuilders( + randomValueOtherThan( + instance.getFeatureExtractorBuilders(), + () -> randomBoolean() || instance.getFeatureExtractorBuilders().isEmpty() + ? Stream.generate(QueryExtractorBuilderTests::randomInstance) + .limit(randomIntBetween(1, 5)) + .collect(Collectors.toList()) + : null + ) + ); + } + case 2 -> { + builder.setParamsDefaults( + randomValueOtherThan( + instance.getParamsDefaults(), + () -> randomBoolean() || instance.getParamsDefaults().isEmpty() + ? randomMap(1, 10, () -> Tuple.tuple(randomIdentifier(), randomIdentifier())) + : null + ) + ); + } + default -> throw new AssertionError("Unexpected random test case"); + } + + return builder.build(); } @Override @@ -94,10 +133,11 @@ public void testDuplicateFeatureNames() { new TestValueExtractor("foo"), new TestValueExtractor("foo") ); - expectThrows( - IllegalArgumentException.class, - () -> new LearnToRankConfig(randomBoolean() ? null : randomIntBetween(0, 10), featureExtractorBuilderList) - ); + + LearnToRankConfig.Builder builder = LearnToRankConfig.builder(randomLearnToRankConfig()) + .setLearnToRankFeatureExtractorBuilders(featureExtractorBuilderList); + + expectThrows(IllegalArgumentException.class, () -> builder.build()); } @Override @@ -105,7 +145,7 @@ protected NamedXContentRegistry xContentRegistry() { List namedXContent = new ArrayList<>(); namedXContent.addAll(new MlInferenceNamedXContentProvider().getNamedXContentParsers()); namedXContent.addAll(new MlLTRNamedXContentProvider().getNamedXContentParsers()); - namedXContent.addAll(new SearchModule(Settings.EMPTY, Collections.emptyList()).getNamedXContents()); + namedXContent.addAll(new SearchModule(Settings.EMPTY, List.of()).getNamedXContents()); namedXContent.add( new NamedXContentRegistry.Entry( LearnToRankFeatureExtractorBuilder.class, @@ -119,7 +159,7 @@ protected NamedXContentRegistry xContentRegistry() { @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { List namedWriteables = new ArrayList<>(new MlInferenceNamedXContentProvider().getNamedWriteables()); - namedWriteables.addAll(new SearchModule(Settings.EMPTY, Collections.emptyList()).getNamedWriteables()); + namedWriteables.addAll(new SearchModule(Settings.EMPTY, List.of()).getNamedWriteables()); namedWriteables.addAll(new MlLTRNamedXContentProvider().getNamedWriteables()); namedWriteables.add( new NamedWriteableRegistry.Entry( diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilderTests.java index 07103175f927c..23cce17ba9bae 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilderTests.java @@ -26,7 +26,11 @@ public class QueryExtractorBuilderTests extends AbstractXContentSerializingTestC protected boolean lenient; public static QueryExtractorBuilder randomInstance() { - return new QueryExtractorBuilder(randomAlphaOfLength(10), QueryProviderTests.createRandomValidQueryProvider()); + return new QueryExtractorBuilder( + randomAlphaOfLength(10), + QueryProviderTests.createRandomValidQueryProvider(), + randomFrom(0f, randomFloat()) + ); } @Before @@ -56,10 +60,19 @@ protected QueryExtractorBuilder createTestInstance() { @Override protected QueryExtractorBuilder mutateInstance(QueryExtractorBuilder instance) throws IOException { - int i = randomInt(1); + int i = randomInt(2); return switch (i) { - case 0 -> new QueryExtractorBuilder(randomAlphaOfLength(10), instance.query()); - case 1 -> new QueryExtractorBuilder(instance.featureName(), QueryProviderTests.createRandomValidQueryProvider()); + case 0 -> new QueryExtractorBuilder(randomAlphaOfLength(10), instance.query(), instance.defaultScore()); + case 1 -> new QueryExtractorBuilder( + instance.featureName(), + QueryProviderTests.createRandomValidQueryProvider(), + instance.defaultScore() + ); + case 2 -> new QueryExtractorBuilder( + instance.featureName(), + instance.query(), + randomValueOtherThan(instance.defaultScore(), () -> randomFrom(0f, randomFloat())) + ); default -> throw new AssertionError("unknown random case for instance mutation"); }; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/QueryProviderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/QueryProviderTests.java index dc2f1b7d179f5..a0c2fe93a1a24 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/QueryProviderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/QueryProviderTests.java @@ -65,10 +65,10 @@ protected QueryProvider doParseInstance(XContentParser parser) throws IOExceptio } public static QueryProvider createRandomValidQueryProvider() { - return createRandomValidQueryProvider(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)); + return createTestQueryProvider(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)); } - public static QueryProvider createRandomValidQueryProvider(String field, String value) { + public static QueryProvider createTestQueryProvider(String field, String value) { Map terms = Collections.singletonMap( BoolQueryBuilder.NAME, Collections.singletonMap( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankService.java index 42f7d8cf0a3b3..2f85000705d8a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankService.java @@ -9,7 +9,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.MatchNoneQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.script.GeneralScriptException; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; @@ -37,11 +39,14 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Optional; import static java.util.Map.entry; +import static org.elasticsearch.common.xcontent.XContentHelper.mergeDefaults; import static org.elasticsearch.script.Script.DEFAULT_TEMPLATE_LANG; import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; import static org.elasticsearch.xpack.core.ml.job.messages.Messages.INFERENCE_CONFIG_QUERY_BAD_FORMAT; @@ -101,9 +106,6 @@ public void loadLearnToRankConfig(String modelId, Map params, Ac null, ActionListener.wrap(trainedModelConfig -> { if (trainedModelConfig.getInferenceConfig() instanceof LearnToRankConfig retrievedInferenceConfig) { - for (LearnToRankFeatureExtractorBuilder builder : retrievedInferenceConfig.getFeatureExtractorBuilders()) { - builder.validate(); - } listener.onResponse(applyParams(retrievedInferenceConfig, params)); return; } @@ -129,15 +131,18 @@ public void loadLearnToRankConfig(String modelId, Map params, Ac * * @throws IOException */ - private LearnToRankConfig applyParams(LearnToRankConfig config, Map params) throws IOException { + private LearnToRankConfig applyParams(LearnToRankConfig config, Map params) throws Exception { if (scriptService.isLangSupported(DEFAULT_TEMPLATE_LANG) == false) { return config; } List featureExtractorBuilders = new ArrayList<>(); + Map mergedParams = new HashMap<>(Objects.requireNonNullElse(params, Map.of())); + mergeDefaults(mergedParams, config.getParamsDefaults()); + for (LearnToRankFeatureExtractorBuilder featureExtractorBuilder : config.getFeatureExtractorBuilders()) { - featureExtractorBuilders.add(applyParams(featureExtractorBuilder, params)); + featureExtractorBuilders.add(applyParams(featureExtractorBuilder, mergedParams)); } return LearnToRankConfig.builder(config).setLearnToRankFeatureExtractorBuilders(featureExtractorBuilders).build(); @@ -155,11 +160,13 @@ private LearnToRankConfig applyParams(LearnToRankConfig config, Map params - ) throws IOException { + ) throws Exception { if (featureExtractorBuilder instanceof QueryExtractorBuilder queryExtractorBuilder) { - return applyParams(queryExtractorBuilder, params); + featureExtractorBuilder = applyParams(queryExtractorBuilder, params); } + featureExtractorBuilder.validate(); + return featureExtractorBuilder; } @@ -186,14 +193,16 @@ private QueryExtractorBuilder applyParams(QueryExtractorBuilder queryExtractorBu return new QueryExtractorBuilder( queryExtractorBuilder.featureName(), - QueryProvider.fromXContent(parser, false, INFERENCE_CONFIG_QUERY_BAD_FORMAT) + QueryProvider.fromXContent(parser, false, INFERENCE_CONFIG_QUERY_BAD_FORMAT), + queryExtractorBuilder.defaultScore() ); } catch (GeneralScriptException e) { if (e.getRootCause().getClass().getName().equals(MustacheInvalidParameterException.class.getName())) { // Can't use instanceof since it return unexpected result. return new QueryExtractorBuilder( queryExtractorBuilder.featureName(), - QueryProvider.fromParsedQuery(new MatchNoneQueryBuilder()) + defaultQuery(queryExtractorBuilder.defaultScore()), + queryExtractorBuilder.defaultScore() ); } throw e; @@ -205,4 +214,9 @@ private String templateSource(QueryProvider queryProvider) throws IOException { return BytesReference.bytes(queryProvider.toXContent(configSourceBuilder, EMPTY_PARAMS)).utf8ToString(); } } + + private QueryProvider defaultQuery(float score) throws IOException { + QueryBuilder query = score == 0 ? new MatchNoneQueryBuilder() : new MatchAllQueryBuilder().boost(score); + return QueryProvider.fromParsedQuery(query); + } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankServiceTests.java index a2cd0ff8856c6..cbe91ba874e6d 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankServiceTests.java @@ -7,11 +7,11 @@ package org.elasticsearch.xpack.ml.inference.ltr; -import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.MatchNoneQueryBuilder; import org.elasticsearch.script.ScriptEngine; import org.elasticsearch.script.ScriptModule; @@ -28,17 +28,18 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.RegressionConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.QueryExtractorBuilder; import org.elasticsearch.xpack.core.ml.ltr.MlLTRNamedXContentProvider; -import org.elasticsearch.xpack.core.ml.utils.QueryProvider; import org.elasticsearch.xpack.core.ml.utils.QueryProviderTests; import org.elasticsearch.xpack.ml.inference.loadingservice.ModelLoadingService; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; import static org.elasticsearch.script.Script.DEFAULT_TEMPLATE_LANG; +import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.argThat; @@ -51,7 +52,6 @@ public class LearnToRankServiceTests extends ESTestCase { public static final String GOOD_MODEL = "modelId"; public static final String BAD_MODEL = "badModel"; - public static final String TEMPLATED_GOOD_MODEL = "templatedModelId"; public static final TrainedModelConfig GOOD_MODEL_CONFIG = TrainedModelConfig.builder() .setModelId(GOOD_MODEL) .setInput(new TrainedModelInput(List.of("field1", "field2"))) @@ -62,9 +62,10 @@ public class LearnToRankServiceTests extends ESTestCase { new LearnToRankConfig( 2, List.of( - new QueryExtractorBuilder("feature_1", QueryProviderTests.createRandomValidQueryProvider("field_1", "foo")), - new QueryExtractorBuilder("feature_2", QueryProviderTests.createRandomValidQueryProvider("field_2", "bar")) - ) + new QueryExtractorBuilder("feature_1", QueryProviderTests.createTestQueryProvider("field_1", "foo")), + new QueryExtractorBuilder("feature_2", QueryProviderTests.createTestQueryProvider("field_2", "bar")) + ), + Map.of() ) ) .build(); @@ -77,113 +78,102 @@ public class LearnToRankServiceTests extends ESTestCase { .setInferenceConfig(new RegressionConfig(null, null)) .build(); - public static final TrainedModelConfig TEMPLATED_GOOD_MODEL_CONFIG = new TrainedModelConfig.Builder(GOOD_MODEL_CONFIG).setModelId( - TEMPLATED_GOOD_MODEL - ) - .setInferenceConfig( - new LearnToRankConfig( - 2, - List.of( - new QueryExtractorBuilder("feature_1", QueryProviderTests.createRandomValidQueryProvider("field_1", "{{foo_param}}")), - new QueryExtractorBuilder("feature_2", QueryProviderTests.createRandomValidQueryProvider("field_2", "{{bar_param}}")) - ) - ) - ) - .build(); - @SuppressWarnings("unchecked") public void testLoadLearnToRankConfig() throws Exception { - LearnToRankService learnToRankService = new LearnToRankService( - mockModelLoadingService(), - mockTrainedModelProvider(), - mockScriptService(), - xContentRegistry() - ); + LearnToRankService learnToRankService = getTestLearnToRankService(); ActionListener listener = mock(ActionListener.class); - learnToRankService.loadLearnToRankConfig(GOOD_MODEL, Collections.emptyMap(), listener); + learnToRankService.loadLearnToRankConfig(GOOD_MODEL, Map.of(), listener); verify(listener).onResponse(eq((LearnToRankConfig) GOOD_MODEL_CONFIG.getInferenceConfig())); } @SuppressWarnings("unchecked") public void testLoadMissingLearnToRankConfig() throws Exception { - LearnToRankService learnToRankService = new LearnToRankService( - mockModelLoadingService(), - mockTrainedModelProvider(), - mockScriptService(), - xContentRegistry() - ); + LearnToRankService learnToRankService = getTestLearnToRankService(); ActionListener listener = mock(ActionListener.class); - learnToRankService.loadLearnToRankConfig("non-existing-model", Collections.emptyMap(), listener); + learnToRankService.loadLearnToRankConfig("non-existing-model", Map.of(), listener); verify(listener).onFailure(isA(ResourceNotFoundException.class)); } @SuppressWarnings("unchecked") public void testLoadBadLearnToRankConfig() throws Exception { - LearnToRankService learnToRankService = new LearnToRankService( - mockModelLoadingService(), - mockTrainedModelProvider(), - mockScriptService(), - xContentRegistry() - ); + LearnToRankService learnToRankService = getTestLearnToRankService(); ActionListener listener = mock(ActionListener.class); - learnToRankService.loadLearnToRankConfig(BAD_MODEL, Collections.emptyMap(), listener); + learnToRankService.loadLearnToRankConfig(BAD_MODEL, Map.of(), listener); verify(listener).onFailure(isA(ElasticsearchStatusException.class)); } @SuppressWarnings("unchecked") public void testLoadLearnToRankConfigWithTemplate() throws Exception { - LearnToRankService learnToRankService = new LearnToRankService( - mockModelLoadingService(), - mockTrainedModelProvider(), - mockScriptService(), - xContentRegistry() + LearnToRankConfig learnToRankConfig = new LearnToRankConfig( + 0, + List.of(new QueryExtractorBuilder("feature_1", QueryProviderTests.createTestQueryProvider("field_1", "{{foo_param}}"))), + Map.of() ); - // When no parameters are provided we expect query to be rewritten into a match_none query. - { - ActionListener listener = mock(ActionListener.class); - SetOnce retrievedConfig = new SetOnce<>(); + LearnToRankService learnToRankService = getTestLearnToRankService(learnToRankConfig); + ActionListener listener = mock(ActionListener.class); + + learnToRankService.loadLearnToRankConfig("model-id", Map.ofEntries(Map.entry("foo_param", "foo")), listener); + verify(listener).onResponse(argThat(retrievedConfig -> { + assertThat(retrievedConfig.getFeatureExtractorBuilders(), hasSize(1)); + QueryExtractorBuilder queryExtractorBuilder = retrievedConfig.getQueryFeatureExtractorBuilders().get(0); + assertEquals(queryExtractorBuilder.featureName(), "feature_1"); + assertEquals(queryExtractorBuilder.query(), QueryProviderTests.createTestQueryProvider("field_1", "foo")); + return true; + })); + } + + @SuppressWarnings("unchecked") + public void testLoadLearnToRankConfigWithMissingTemplateParams() throws Exception { + LearnToRankConfig learnToRankConfig = new LearnToRankConfig( + 0, + List.of( + new QueryExtractorBuilder("feature_1", QueryProviderTests.createTestQueryProvider("field_1", "foo")), + new QueryExtractorBuilder("feature_2", QueryProviderTests.createTestQueryProvider("field_1", "{{foo_param}}")), + new QueryExtractorBuilder("feature_3", QueryProviderTests.createTestQueryProvider("field_1", "{{bar_param}}"), 1.5f), + new QueryExtractorBuilder("feature_4", QueryProviderTests.createTestQueryProvider("field_1", "{{baz_param}}")) + ), + Map.of("baz_param", "default_value") + ); + + LearnToRankService learnToRankService = getTestLearnToRankService(learnToRankConfig); + ActionListener listener = mock(ActionListener.class); + + learnToRankService.loadLearnToRankConfig("model-id", randomBoolean() ? null : Map.of(), listener); + verify(listener).onResponse(argThat(retrievedConfig -> { + // Check all features are present. + assertThat(retrievedConfig.getFeatureExtractorBuilders(), hasSize(4)); + + Map queryExtractorBuilders = retrievedConfig.getQueryFeatureExtractorBuilders() + .stream() + .collect(Collectors.toMap(QueryExtractorBuilder::featureName, Function.identity())); + + // feature_1 will be extracted using the provided query since no params are missing for it + assertThat(queryExtractorBuilders, hasKey("feature_1")); + assertEquals(queryExtractorBuilders.get("feature_1").query(), QueryProviderTests.createTestQueryProvider("field_1", "foo")); - doAnswer(i -> { - retrievedConfig.set(i.getArgument(0, LearnToRankConfig.class)); - return null; - }).when(listener).onResponse(any()); - learnToRankService.loadLearnToRankConfig(TEMPLATED_GOOD_MODEL, null, listener); + // feature_2 will be extracted using a match_none query because {{foo_params}} is missing + assertThat(queryExtractorBuilders, hasKey("feature_2")); + assertEquals(queryExtractorBuilders.get("feature_2").query().getParsedQuery(), new MatchNoneQueryBuilder()); - assertNotNull(retrievedConfig.get()); - assertThat(retrievedConfig.get().getFeatureExtractorBuilders(), hasSize(2)); + // feature_3 will be extracted using a match_all query with a boost because: + // - {{bar_param}} is missing + // - a default_score is provided for the query extractor + assertThat(queryExtractorBuilders, hasKey("feature_3")); + assertEquals(queryExtractorBuilders.get("feature_3").query().getParsedQuery(), new MatchAllQueryBuilder().boost(1.5f)); + // feature_4 will be extracted using the default value for the {{baz_param}} + assertThat(queryExtractorBuilders, hasKey("feature_4")); assertEquals( - retrievedConfig.get(), - LearnToRankConfig.builder((LearnToRankConfig) TEMPLATED_GOOD_MODEL_CONFIG.getInferenceConfig()) - .setLearnToRankFeatureExtractorBuilders( - List.of( - new QueryExtractorBuilder("feature_1", QueryProvider.fromParsedQuery(new MatchNoneQueryBuilder())), - new QueryExtractorBuilder("feature_2", QueryProvider.fromParsedQuery(new MatchNoneQueryBuilder())) - ) - ) - .build() - ); - } - - // Now testing when providing all the params of the template. - { - ActionListener listener = mock(ActionListener.class); - learnToRankService.loadLearnToRankConfig( - TEMPLATED_GOOD_MODEL, - Map.ofEntries(Map.entry("foo_param", "foo"), Map.entry("bar_param", "bar")), - listener + queryExtractorBuilders.get("feature_4").query(), + QueryProviderTests.createTestQueryProvider("field_1", "default_value") ); - verify(listener).onResponse(argThat(retrievedConfig -> { - assertThat(retrievedConfig.getFeatureExtractorBuilders(), hasSize(2)); - assertEquals(retrievedConfig, GOOD_MODEL_CONFIG.getInferenceConfig()); - return true; - })); - } + return true; + })); } @Override @@ -191,7 +181,7 @@ protected NamedXContentRegistry xContentRegistry() { List namedXContent = new ArrayList<>(); namedXContent.addAll(new MlInferenceNamedXContentProvider().getNamedXContentParsers()); namedXContent.addAll(new MlLTRNamedXContentProvider().getNamedXContentParsers()); - namedXContent.addAll(new SearchModule(Settings.EMPTY, Collections.emptyList()).getNamedXContents()); + namedXContent.addAll(new SearchModule(Settings.EMPTY, List.of()).getNamedXContents()); return new NamedXContentRegistry(namedXContent); } @@ -208,7 +198,6 @@ private TrainedModelProvider mockTrainedModelProvider() { ActionListener l = invocation.getArgument(3, ActionListener.class); switch (modelId) { case GOOD_MODEL -> l.onResponse(GOOD_MODEL_CONFIG); - case TEMPLATED_GOOD_MODEL -> l.onResponse(TEMPLATED_GOOD_MODEL_CONFIG); case BAD_MODEL -> l.onResponse(BAD_MODEL_CONFIG); default -> l.onFailure(new ResourceNotFoundException("missing model")); } @@ -219,7 +208,40 @@ private TrainedModelProvider mockTrainedModelProvider() { return trainedModelProvider; } - private ScriptService mockScriptService() { + private LearnToRankService getTestLearnToRankService() { + return getTestLearnToRankService(mockTrainedModelProvider()); + } + + @SuppressWarnings("unchecked") + private LearnToRankService getTestLearnToRankService(LearnToRankConfig learnToRankConfig) { + TrainedModelProvider trainedModelProvider = mock(TrainedModelProvider.class); + + doAnswer(invocation -> { + String modelId = invocation.getArgument(0); + ActionListener l = invocation.getArgument(3, ActionListener.class); + + l.onResponse( + TrainedModelConfig.builder() + .setModelId(modelId) + .setInput(new TrainedModelInput(List.of("field1", "field2"))) + .setEstimatedOperations(1) + .setModelSize(2) + .setModelType(TrainedModelType.TREE_ENSEMBLE) + .setInferenceConfig(learnToRankConfig) + .build() + ); + return null; + + }).when(trainedModelProvider).getTrainedModel(any(), any(), any(), any()); + + return getTestLearnToRankService(trainedModelProvider); + } + + private LearnToRankService getTestLearnToRankService(TrainedModelProvider trainedModelProvider) { + return new LearnToRankService(mockModelLoadingService(), trainedModelProvider, getTestScriptService(), xContentRegistry()); + } + + private ScriptService getTestScriptService() { ScriptEngine scriptEngine = new MustacheScriptEngine(); return new ScriptService(Settings.EMPTY, Map.of(DEFAULT_TEMPLATE_LANG, scriptEngine), ScriptModule.CORE_CONTEXTS, () -> 1L); } From 3dcef66df5e5b2a1c1e56639a2f09181ce7913af Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Fri, 1 Dec 2023 18:59:31 +0100 Subject: [PATCH 097/181] Fix #102863 (#102875) There were two issues: * We mixed x and y when generating estimated errors * We did not deal with values near zero --- .../ql/util/SpatialCoordinateTypesTests.java | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java index 67e72d530e2e0..6909475c04521 100644 --- a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java +++ b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java @@ -22,12 +22,19 @@ public class SpatialCoordinateTypesTests extends ESTestCase { private static final Map types = new LinkedHashMap<>(); static { types.put(SpatialCoordinateTypes.GEO, new TestTypeFunctions(ESTestCase::randomGeoPoint, v -> 1e-5)); - types.put(SpatialCoordinateTypes.CARTESIAN, new TestTypeFunctions(ESTestCase::randomCartesianPoint, v -> Math.abs(v / 1e5))); + types.put( + SpatialCoordinateTypes.CARTESIAN, + new TestTypeFunctions(ESTestCase::randomCartesianPoint, SpatialCoordinateTypesTests::cartesianError) + ); + } + + private static double cartesianError(double v) { + double abs = Math.abs(v); + return (abs < 1) ? 1e-5 : abs / 1e7; } record TestTypeFunctions(Supplier randomPoint, Function error) {} - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102863") public void testEncoding() { for (var type : types.entrySet()) { for (int i = 0; i < 10; i++) { @@ -35,8 +42,8 @@ public void testEncoding() { SpatialPoint original = type.getValue().randomPoint().get(); var error = type.getValue().error; SpatialPoint point = coordType.longAsPoint(coordType.pointAsLong(original)); - assertThat(coordType + ": Y[" + i + "]", point.getY(), closeTo(original.getY(), error.apply(original.getX()))); - assertThat(coordType + ": X[" + i + "]", point.getX(), closeTo(original.getX(), error.apply(original.getY()))); + assertThat(coordType + ": Y[" + i + "]", point.getY(), closeTo(original.getY(), error.apply(original.getY()))); + assertThat(coordType + ": X[" + i + "]", point.getX(), closeTo(original.getX(), error.apply(original.getX()))); } } } From 613e3b69360f05b557fc805319466623449707c1 Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Fri, 1 Dec 2023 19:18:35 +0100 Subject: [PATCH 098/181] [Connector API] Implement Check in action (#102847) --- .../rest-api-spec/api/connector.check_in.json | 32 ++++ .../test/entsearch/333_connector_check_in.yml | 41 +++++ .../xpack/application/EnterpriseSearch.java | 5 + .../application/connector/Connector.java | 38 ++++- .../connector/ConnectorIndexService.java | 31 ++++ .../connector/ConnectorSyncInfo.java | 20 --- .../RestUpdateConnectorLastSeenAction.java | 41 +++++ ...ransportUpdateConnectorLastSeenAction.java | 55 +++++++ .../action/UpdateConnectorLastSeenAction.java | 147 ++++++++++++++++++ .../connector/ConnectorIndexServiceTests.java | 50 ++++++ .../connector/ConnectorTestUtils.java | 2 +- ...tSeenActionRequestBWCSerializingTests.java | 41 +++++ ...SeenActionResponseBWCSerializingTests.java | 42 +++++ .../xpack/security/operator/Constants.java | 3 +- 14 files changed, 519 insertions(+), 29 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector.check_in.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/333_connector_check_in.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSeenAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSeenAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenActionResponseBWCSerializingTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.check_in.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.check_in.json new file mode 100644 index 0000000000000..a9db92aa450e0 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.check_in.json @@ -0,0 +1,32 @@ +{ + "connector.check_in": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Updates the last_seen timestamp in the connector document." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/{connector_id}/_check_in", + "methods": [ + "PUT" + ], + "parts": { + "connector_id": { + "type": "string", + "description": "The unique identifier of the connector to be updated." + } + } + } + ] + } + } +} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/333_connector_check_in.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/333_connector_check_in.yml new file mode 100644 index 0000000000000..042fea7091f43 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/333_connector_check_in.yml @@ -0,0 +1,41 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + + - do: + connector.put: + connector_id: test-connector + body: + index_name: search-1-test + name: my-connector + language: pl + is_native: false + service_type: super-connector +--- +"Connector Check-in": + - do: + connector.check_in: + connector_id: test-connector + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - exists: last_seen + +--- +"Connector Check-in Error - Connector doesn't exist": + - do: + catch: "missing" + connector.check_in: + connector_id: test-non-existent-connector + +--- +"Connector Check-in Error - connector_id is empty": + - do: + catch: "bad_request" + connector.check_in: + connector_id: "" diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index 3402c3a8b9d7b..29758c3c334cc 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -51,6 +51,7 @@ import org.elasticsearch.xpack.application.connector.action.RestListConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestPutConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorFilteringAction; +import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.action.TransportDeleteConnectorAction; @@ -58,9 +59,11 @@ import org.elasticsearch.xpack.application.connector.action.TransportListConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportPutConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorFilteringAction; +import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.syncjob.action.CancelConnectorSyncJobAction; @@ -196,6 +199,7 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(ListConnectorAction.INSTANCE, TransportListConnectorAction.class), new ActionHandler<>(PutConnectorAction.INSTANCE, TransportPutConnectorAction.class), new ActionHandler<>(UpdateConnectorFilteringAction.INSTANCE, TransportUpdateConnectorFilteringAction.class), + new ActionHandler<>(UpdateConnectorLastSeenAction.INSTANCE, TransportUpdateConnectorLastSeenAction.class), new ActionHandler<>(UpdateConnectorPipelineAction.INSTANCE, TransportUpdateConnectorPipelineAction.class), new ActionHandler<>(UpdateConnectorSchedulingAction.INSTANCE, TransportUpdateConnectorSchedulingAction.class), @@ -260,6 +264,7 @@ public List getRestHandlers( new RestListConnectorAction(), new RestPutConnectorAction(), new RestUpdateConnectorFilteringAction(), + new RestUpdateConnectorLastSeenAction(), new RestUpdateConnectorPipelineAction(), new RestUpdateConnectorSchedulingAction(), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java index d4aab30ba89bf..f824009196648 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java @@ -51,6 +51,7 @@ *
  • The name of the Elasticsearch index where the synchronized data is stored or managed.
  • *
  • A boolean flag 'isNative' indicating whether the connector is a native Elasticsearch connector.
  • *
  • The language associated with the connector.
  • + *
  • The timestamp when the connector was last active or seen.
  • *
  • A {@link ConnectorSyncInfo} object containing synchronization state and history information.
  • *
  • The name of the connector.
  • *
  • A {@link ConnectorIngestPipeline} object specifying the data ingestion pipeline configuration.
  • @@ -87,6 +88,8 @@ public class Connector implements NamedWriteable, ToXContentObject { @Nullable private final String language; @Nullable + private final Instant lastSeen; + @Nullable private final ConnectorSyncInfo syncInfo; @Nullable private final String name; @@ -115,6 +118,7 @@ public class Connector implements NamedWriteable, ToXContentObject { * @param indexName Name of the index associated with the connector. * @param isNative Flag indicating whether the connector is a native type. * @param language The language supported by the connector. + * @param lastSeen The timestamp when the connector was last active or seen. * @param syncInfo Information about the synchronization state of the connector. * @param name Name of the connector. * @param pipeline Ingest pipeline configuration. @@ -136,6 +140,7 @@ private Connector( String indexName, boolean isNative, String language, + Instant lastSeen, ConnectorSyncInfo syncInfo, String name, ConnectorIngestPipeline pipeline, @@ -156,6 +161,7 @@ private Connector( this.indexName = indexName; this.isNative = isNative; this.language = language; + this.lastSeen = lastSeen; this.syncInfo = syncInfo; this.name = name; this.pipeline = pipeline; @@ -178,6 +184,7 @@ public Connector(StreamInput in) throws IOException { this.indexName = in.readOptionalString(); this.isNative = in.readBoolean(); this.language = in.readOptionalString(); + this.lastSeen = in.readOptionalInstant(); this.syncInfo = in.readOptionalWriteable(ConnectorSyncInfo::new); this.name = in.readOptionalString(); this.pipeline = in.readOptionalWriteable(ConnectorIngestPipeline::new); @@ -199,6 +206,7 @@ public Connector(StreamInput in) throws IOException { public static final ParseField INDEX_NAME_FIELD = new ParseField("index_name"); static final ParseField IS_NATIVE_FIELD = new ParseField("is_native"); public static final ParseField LANGUAGE_FIELD = new ParseField("language"); + public static final ParseField LAST_SEEN_FIELD = new ParseField("last_seen"); static final ParseField NAME_FIELD = new ParseField("name"); public static final ParseField PIPELINE_FIELD = new ParseField("pipeline"); public static final ParseField SCHEDULING_FIELD = new ParseField("scheduling"); @@ -221,6 +229,7 @@ public Connector(StreamInput in) throws IOException { .setIndexName((String) args[i++]) .setIsNative((Boolean) args[i++]) .setLanguage((String) args[i++]) + .setLastSeen((Instant) args[i++]) .setSyncInfo( new ConnectorSyncInfo.Builder().setLastAccessControlSyncError((String) args[i++]) .setLastAccessControlSyncScheduledAt((Instant) args[i++]) @@ -228,7 +237,6 @@ public Connector(StreamInput in) throws IOException { .setLastDeletedDocumentCount((Long) args[i++]) .setLastIncrementalSyncScheduledAt((Instant) args[i++]) .setLastIndexedDocumentCount((Long) args[i++]) - .setLastSeen((Instant) args[i++]) .setLastSyncError((String) args[i++]) .setLastSyncScheduledAt((Instant) args[i++]) .setLastSyncStatus((ConnectorSyncStatus) args[i++]) @@ -272,6 +280,12 @@ public Connector(StreamInput in) throws IOException { PARSER.declareString(optionalConstructorArg(), INDEX_NAME_FIELD); PARSER.declareBoolean(optionalConstructorArg(), IS_NATIVE_FIELD); PARSER.declareString(optionalConstructorArg(), LANGUAGE_FIELD); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), + Connector.LAST_SEEN_FIELD, + ObjectParser.ValueType.STRING_OR_NULL + ); PARSER.declareString(optionalConstructorArg(), ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_ERROR); PARSER.declareField( @@ -294,12 +308,6 @@ public Connector(StreamInput in) throws IOException { ObjectParser.ValueType.STRING ); PARSER.declareLong(optionalConstructorArg(), ConnectorSyncInfo.LAST_INDEXED_DOCUMENT_COUNT_FIELD); - PARSER.declareField( - optionalConstructorArg(), - (p, c) -> Instant.parse(p.text()), - ConnectorSyncInfo.LAST_SEEN_FIELD, - ObjectParser.ValueType.STRING - ); PARSER.declareString(optionalConstructorArg(), ConnectorSyncInfo.LAST_SYNC_ERROR_FIELD); PARSER.declareField( optionalConstructorArg(), @@ -394,6 +402,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (language != null) { builder.field(LANGUAGE_FIELD.getPreferredName(), language); } + builder.field(LAST_SEEN_FIELD.getPreferredName(), lastSeen); if (syncInfo != null) { syncInfo.toXContent(builder, params); } @@ -433,6 +442,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(indexName); out.writeBoolean(isNative); out.writeOptionalString(language); + out.writeOptionalInstant(lastSeen); out.writeOptionalWriteable(syncInfo); out.writeOptionalString(name); out.writeOptionalWriteable(pipeline); @@ -475,6 +485,10 @@ public Map getConfiguration() { return configuration; } + public Instant getLastSeen() { + return lastSeen; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -492,6 +506,7 @@ public boolean equals(Object o) { && Objects.equals(filtering, connector.filtering) && Objects.equals(indexName, connector.indexName) && Objects.equals(language, connector.language) + && Objects.equals(lastSeen, connector.lastSeen) && Objects.equals(syncInfo, connector.syncInfo) && Objects.equals(name, connector.name) && Objects.equals(pipeline, connector.pipeline) @@ -515,6 +530,7 @@ public int hashCode() { indexName, isNative, language, + lastSeen, syncInfo, name, pipeline, @@ -544,6 +560,8 @@ public static class Builder { private String indexName; private boolean isNative = false; private String language; + + private Instant lastSeen; private ConnectorSyncInfo syncInfo = new ConnectorSyncInfo.Builder().build(); private String name; private ConnectorIngestPipeline pipeline; @@ -611,6 +629,11 @@ public Builder setLanguage(String language) { return this; } + public Builder setLastSeen(Instant lastSeen) { + this.lastSeen = lastSeen; + return this; + } + public Builder setSyncInfo(ConnectorSyncInfo syncInfo) { this.syncInfo = syncInfo; return this; @@ -664,6 +687,7 @@ public Connector build() { indexName, isNative, language, + lastSeen, syncInfo, name, pipeline, diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index 749e8c2e9dd87..9730a0217b942 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -32,6 +32,7 @@ import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; @@ -261,6 +262,36 @@ public void updateConnectorScheduling(UpdateConnectorSchedulingAction.Request re } } + /** + * Updates the lastSeen property of a {@link Connector}. + * + * @param request The request for updating the connector's lastSeen status. + * @param listener The listener for handling responses, including successful updates or errors. + */ + public void updateConnectorLastSeen(UpdateConnectorLastSeenAction.Request request, ActionListener listener) { + try { + String connectorId = request.getConnectorId(); + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( + new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) + .id(connectorId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(request.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)) + ); + clientWithOrigin.update( + updateRequest, + new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { + if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { + l.onFailure(new ResourceNotFoundException(connectorId)); + return; + } + l.onResponse(updateResponse); + }) + ); + } catch (Exception e) { + listener.onFailure(e); + } + } + private static ConnectorIndexService.ConnectorResult mapSearchResponseToConnectorList(SearchResponse response) { final List connectorResults = Arrays.stream(response.getHits().getHits()) .map(ConnectorIndexService::hitToConnector) diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncInfo.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncInfo.java index 10a2d54e29300..8f2002efff5b6 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncInfo.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncInfo.java @@ -33,8 +33,6 @@ public class ConnectorSyncInfo implements Writeable, ToXContentFragment { @Nullable private final Long lastIndexedDocumentCount; @Nullable - private final Instant lastSeen; - @Nullable private final String lastSyncError; @Nullable private final Instant lastSyncScheduledAt; @@ -50,7 +48,6 @@ public class ConnectorSyncInfo implements Writeable, ToXContentFragment { * @param lastDeletedDocumentCount The count of documents last deleted during sync. * @param lastIncrementalSyncScheduledAt The timestamp when the last incremental sync was scheduled. * @param lastIndexedDocumentCount The count of documents last indexed during sync. - * @param lastSeen The timestamp when the connector was last active or seen. * @param lastSyncError The last error message encountered during sync, if any. * @param lastSyncScheduledAt The timestamp when the last sync was scheduled. * @param lastSyncStatus The status of the last sync. @@ -63,7 +60,6 @@ private ConnectorSyncInfo( Long lastDeletedDocumentCount, Instant lastIncrementalSyncScheduledAt, Long lastIndexedDocumentCount, - Instant lastSeen, String lastSyncError, Instant lastSyncScheduledAt, ConnectorSyncStatus lastSyncStatus, @@ -75,7 +71,6 @@ private ConnectorSyncInfo( this.lastDeletedDocumentCount = lastDeletedDocumentCount; this.lastIncrementalSyncScheduledAt = lastIncrementalSyncScheduledAt; this.lastIndexedDocumentCount = lastIndexedDocumentCount; - this.lastSeen = lastSeen; this.lastSyncError = lastSyncError; this.lastSyncScheduledAt = lastSyncScheduledAt; this.lastSyncStatus = lastSyncStatus; @@ -89,7 +84,6 @@ public ConnectorSyncInfo(StreamInput in) throws IOException { this.lastDeletedDocumentCount = in.readOptionalLong(); this.lastIncrementalSyncScheduledAt = in.readOptionalInstant(); this.lastIndexedDocumentCount = in.readOptionalLong(); - this.lastSeen = in.readOptionalInstant(); this.lastSyncError = in.readOptionalString(); this.lastSyncScheduledAt = in.readOptionalInstant(); this.lastSyncStatus = in.readOptionalEnum(ConnectorSyncStatus.class); @@ -102,7 +96,6 @@ public ConnectorSyncInfo(StreamInput in) throws IOException { static final ParseField LAST_DELETED_DOCUMENT_COUNT_FIELD = new ParseField("last_deleted_document_count"); static final ParseField LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD = new ParseField("last_incremental_sync_scheduled_at"); static final ParseField LAST_INDEXED_DOCUMENT_COUNT_FIELD = new ParseField("last_indexed_document_count"); - static final ParseField LAST_SEEN_FIELD = new ParseField("last_seen"); static final ParseField LAST_SYNC_ERROR_FIELD = new ParseField("last_sync_error"); static final ParseField LAST_SYNC_SCHEDULED_AT_FIELD = new ParseField("last_sync_scheduled_at"); static final ParseField LAST_SYNC_STATUS_FIELD = new ParseField("last_sync_status"); @@ -129,9 +122,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (lastIndexedDocumentCount != null) { builder.field(LAST_INDEXED_DOCUMENT_COUNT_FIELD.getPreferredName(), lastIndexedDocumentCount); } - if (lastSeen != null) { - builder.field(LAST_SEEN_FIELD.getPreferredName(), lastSeen); - } if (lastSyncError != null) { builder.field(LAST_SYNC_ERROR_FIELD.getPreferredName(), lastSyncError); } @@ -156,7 +146,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalLong(lastDeletedDocumentCount); out.writeOptionalInstant(lastIncrementalSyncScheduledAt); out.writeOptionalLong(lastIndexedDocumentCount); - out.writeOptionalInstant(lastSeen); out.writeOptionalString(lastSyncError); out.writeOptionalInstant(lastSyncScheduledAt); out.writeOptionalEnum(lastSyncStatus); @@ -174,7 +163,6 @@ public boolean equals(Object o) { && Objects.equals(lastDeletedDocumentCount, that.lastDeletedDocumentCount) && Objects.equals(lastIncrementalSyncScheduledAt, that.lastIncrementalSyncScheduledAt) && Objects.equals(lastIndexedDocumentCount, that.lastIndexedDocumentCount) - && Objects.equals(lastSeen, that.lastSeen) && Objects.equals(lastSyncError, that.lastSyncError) && Objects.equals(lastSyncScheduledAt, that.lastSyncScheduledAt) && lastSyncStatus == that.lastSyncStatus @@ -190,7 +178,6 @@ public int hashCode() { lastDeletedDocumentCount, lastIncrementalSyncScheduledAt, lastIndexedDocumentCount, - lastSeen, lastSyncError, lastSyncScheduledAt, lastSyncStatus, @@ -206,7 +193,6 @@ public static class Builder { private Long lastDeletedDocumentCount; private Instant lastIncrementalSyncScheduledAt; private Long lastIndexedDocumentCount; - private Instant lastSeen; private String lastSyncError; private Instant lastSyncScheduledAt; private ConnectorSyncStatus lastSyncStatus; @@ -242,11 +228,6 @@ public Builder setLastIndexedDocumentCount(Long lastIndexedDocumentCount) { return this; } - public Builder setLastSeen(Instant lastSeen) { - this.lastSeen = lastSeen; - return this; - } - public Builder setLastSyncError(String lastSyncError) { this.lastSyncError = lastSyncError; return this; @@ -275,7 +256,6 @@ public ConnectorSyncInfo build() { lastDeletedDocumentCount, lastIncrementalSyncScheduledAt, lastIndexedDocumentCount, - lastSeen, lastSyncError, lastSyncScheduledAt, lastSyncStatus, diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSeenAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSeenAction.java new file mode 100644 index 0000000000000..b2ebaa74984b1 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSeenAction.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.PUT; + +public class RestUpdateConnectorLastSeenAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_update_last_seen_action"; + } + + @Override + public List routes() { + return List.of(new Route(PUT, "/" + EnterpriseSearch.CONNECTOR_API_ENDPOINT + "/{connector_id}/_check_in")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + UpdateConnectorLastSeenAction.Request request = new UpdateConnectorLastSeenAction.Request(restRequest.param("connector_id")); + return channel -> client.execute( + UpdateConnectorLastSeenAction.INSTANCE, + request, + new RestToXContentListener<>(channel, UpdateConnectorLastSeenAction.Response::status, r -> null) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSeenAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSeenAction.java new file mode 100644 index 0000000000000..3d3d2c9ee04b7 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSeenAction.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.ConnectorIndexService; + +public class TransportUpdateConnectorLastSeenAction extends HandledTransportAction< + UpdateConnectorLastSeenAction.Request, + UpdateConnectorLastSeenAction.Response> { + + protected final ConnectorIndexService connectorIndexService; + + @Inject + public TransportUpdateConnectorLastSeenAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + UpdateConnectorLastSeenAction.NAME, + transportService, + actionFilters, + UpdateConnectorLastSeenAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorIndexService = new ConnectorIndexService(client); + } + + @Override + protected void doExecute( + Task task, + UpdateConnectorLastSeenAction.Request request, + ActionListener listener + ) { + connectorIndexService.updateConnectorLastSeen( + request, + listener.map(r -> new UpdateConnectorLastSeenAction.Response(r.getResult())) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenAction.java new file mode 100644 index 0000000000000..976be76ba84af --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenAction.java @@ -0,0 +1,147 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.application.connector.Connector; + +import java.io.IOException; +import java.time.Instant; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +public class UpdateConnectorLastSeenAction extends ActionType { + + public static final UpdateConnectorLastSeenAction INSTANCE = new UpdateConnectorLastSeenAction(); + public static final String NAME = "cluster:admin/xpack/connector/update_last_seen"; + + public UpdateConnectorLastSeenAction() { + super(NAME, UpdateConnectorLastSeenAction.Response::new); + } + + public static class Request extends ActionRequest implements ToXContentObject { + + private final String connectorId; + + private final Instant lastSeen; + + public Request(String connectorId) { + this.connectorId = connectorId; + this.lastSeen = Instant.now(); + } + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorId = in.readString(); + this.lastSeen = in.readInstant(); + } + + public String getConnectorId() { + return connectorId; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorId)) { + validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + } + + return validationException; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(Connector.LAST_SEEN_FIELD.getPreferredName(), lastSeen); + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorId); + out.writeInstant(lastSeen); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorId, request.connectorId) && Objects.equals(lastSeen, request.lastSeen); + } + + @Override + public int hashCode() { + return Objects.hash(connectorId, lastSeen); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + final DocWriteResponse.Result result; + + public Response(StreamInput in) throws IOException { + super(in); + result = DocWriteResponse.Result.readFrom(in); + } + + public Response(DocWriteResponse.Result result) { + this.result = result; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + this.result.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("result", this.result.getLowercase()); + builder.endObject(); + return builder; + } + + public RestStatus status() { + return switch (result) { + case NOT_FOUND -> RestStatus.NOT_FOUND; + default -> RestStatus.OK; + }; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response that = (Response) o; + return Objects.equals(result, that.result); + } + + @Override + public int hashCode() { + return Objects.hash(result); + } + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index 5f32f27b1ec64..c93135942348a 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; import org.junit.Before; @@ -108,6 +109,28 @@ public void testUpdateConnectorFiltering() throws Exception { assertThat(filteringList, equalTo(indexedConnector.getFiltering())); } + public void testUpdateConnectorLastSeen() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + DocWriteResponse resp = awaitPutConnector(connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + UpdateConnectorLastSeenAction.Request checkInRequest = new UpdateConnectorLastSeenAction.Request(connector.getConnectorId()); + DocWriteResponse updateResponse = awaitUpdateConnectorLastSeen(checkInRequest); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + + Connector indexedConnectorTime1 = awaitGetConnector(connector.getConnectorId()); + assertNotNull(indexedConnectorTime1.getLastSeen()); + + checkInRequest = new UpdateConnectorLastSeenAction.Request(connector.getConnectorId()); + updateResponse = awaitUpdateConnectorLastSeen(checkInRequest); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + + Connector indexedConnectorTime2 = awaitGetConnector(connector.getConnectorId()); + assertNotNull(indexedConnectorTime2.getLastSeen()); + assertTrue(indexedConnectorTime2.getLastSeen().isAfter(indexedConnectorTime1.getLastSeen())); + + } + public void testUpdateConnectorScheduling() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); DocWriteResponse resp = awaitPutConnector(connector); @@ -232,6 +255,7 @@ private UpdateResponse awaitUpdateConnectorFiltering(UpdateConnectorFilteringAct final AtomicReference resp = new AtomicReference<>(null); final AtomicReference exc = new AtomicReference<>(null); connectorIndexService.updateConnectorFiltering(updateFiltering, new ActionListener<>() { + @Override public void onResponse(UpdateResponse indexResponse) { resp.set(indexResponse); @@ -244,6 +268,7 @@ public void onFailure(Exception e) { latch.countDown(); } }); + assertTrue("Timeout waiting for update filtering request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); if (exc.get() != null) { throw exc.get(); @@ -252,6 +277,31 @@ public void onFailure(Exception e) { return resp.get(); } + private UpdateResponse awaitUpdateConnectorLastSeen(UpdateConnectorLastSeenAction.Request checkIn) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorIndexService.updateConnectorLastSeen(checkIn, new ActionListener<>() { + @Override + public void onResponse(UpdateResponse indexResponse) { + resp.set(indexResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for check-in request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from check-in request", resp.get()); + return resp.get(); + } + private UpdateResponse awaitUpdateConnectorPipeline(UpdateConnectorPipelineAction.Request updatePipeline) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java index e1752ed6fb354..a0cf018142599 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java @@ -73,7 +73,6 @@ public static ConnectorSyncInfo getRandomConnectorSyncInfo() { .setLastDeletedDocumentCount(randomFrom(new Long[] { null, randomLong() })) .setLastIncrementalSyncScheduledAt(randomFrom(new Instant[] { null, Instant.ofEpochMilli(randomLong()) })) .setLastIndexedDocumentCount(randomFrom(new Long[] { null, randomLong() })) - .setLastSeen(randomFrom(new Instant[] { null, Instant.ofEpochMilli(randomLong()) })) .setLastSyncError(randomFrom(new String[] { null, randomAlphaOfLength(10) })) .setLastSyncScheduledAt(randomFrom(new Instant[] { null, Instant.ofEpochMilli(randomLong()) })) .setLastSyncStatus(randomFrom(new ConnectorSyncStatus[] { null, getRandomSyncStatus() })) @@ -190,6 +189,7 @@ public static Connector getRandomConnector() { .setIndexName(randomFrom(new String[] { null, randomAlphaOfLength(10) })) .setIsNative(randomBoolean()) .setLanguage(randomFrom(new String[] { null, randomAlphaOfLength(10) })) + .setLastSeen(randomFrom(new Instant[] { null, Instant.ofEpochMilli(randomLong()) })) .setSyncInfo(getRandomConnectorSyncInfo()) .setName(randomFrom(new String[] { null, randomAlphaOfLength(10) })) .setPipeline(randomBoolean() ? getRandomConnectorIngestPipeline() : null) diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..13e9e546d516b --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenActionRequestBWCSerializingTests.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorLastSeenActionRequestBWCSerializingTests extends AbstractBWCWireSerializationTestCase< + UpdateConnectorLastSeenAction.Request> { + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorLastSeenAction.Request::new; + } + + @Override + protected UpdateConnectorLastSeenAction.Request createTestInstance() { + return new UpdateConnectorLastSeenAction.Request(randomUUID()); + } + + @Override + protected UpdateConnectorLastSeenAction.Request mutateInstance(UpdateConnectorLastSeenAction.Request instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorLastSeenAction.Request mutateInstanceForVersion( + UpdateConnectorLastSeenAction.Request instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenActionResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..d992f1b5f188e --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenActionResponseBWCSerializingTests.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorLastSeenActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< + UpdateConnectorLastSeenAction.Response> { + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorLastSeenAction.Response::new; + } + + @Override + protected UpdateConnectorLastSeenAction.Response createTestInstance() { + return new UpdateConnectorLastSeenAction.Response(randomFrom(DocWriteResponse.Result.values())); + } + + @Override + protected UpdateConnectorLastSeenAction.Response mutateInstance(UpdateConnectorLastSeenAction.Response instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorLastSeenAction.Response mutateInstanceForVersion( + UpdateConnectorLastSeenAction.Response instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index b9d005e695459..aa6c67798e3e3 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -127,9 +127,10 @@ public class Constants { "cluster:admin/xpack/connector/get", "cluster:admin/xpack/connector/list", "cluster:admin/xpack/connector/put", + "cluster:admin/xpack/connector/update_filtering", + "cluster:admin/xpack/connector/update_last_seen", "cluster:admin/xpack/connector/update_pipeline", "cluster:admin/xpack/connector/update_scheduling", - "cluster:admin/xpack/connector/update_filtering", "cluster:admin/xpack/connector/sync_job/post", "cluster:admin/xpack/connector/sync_job/delete", "cluster:admin/xpack/connector/sync_job/check_in", From 6750eb37f9744296d2b11f0c63958e218a4cb339 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Fri, 1 Dec 2023 13:18:51 -0500 Subject: [PATCH 099/181] Better processor stat merge (#102821) --- docs/changelog/102821.yaml | 5 + .../org/elasticsearch/ingest/IngestStats.java | 67 ++++++++--- .../ingest/IngestStatsTests.java | 106 +++++++++++++----- 3 files changed, 136 insertions(+), 42 deletions(-) create mode 100644 docs/changelog/102821.yaml diff --git a/docs/changelog/102821.yaml b/docs/changelog/102821.yaml new file mode 100644 index 0000000000000..dcd6721621878 --- /dev/null +++ b/docs/changelog/102821.yaml @@ -0,0 +1,5 @@ +pr: 102821 +summary: Better processor stat merge +area: Ingest Node +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestStats.java b/server/src/main/java/org/elasticsearch/ingest/IngestStats.java index e197af5fbb46a..488a498f1640a 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestStats.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestStats.java @@ -28,7 +28,6 @@ import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; public record IngestStats(Stats totalStats, List pipelineStats, Map> processorStats) implements @@ -263,20 +262,62 @@ static List merge(List first, List sec */ public record ProcessorStat(String name, String type, Stats stats) { - // The list of ProcessorStats has *always* stats for each processor (even if processor was executed or not), so it's safe to zip - // both lists using a common index iterator. private static List merge(List first, List second) { - var merged = new ArrayList(); - assert first.size() == second.size() - : "stats size mismatch [" - + first.stream().map(ps -> ps.name + ":" + ps.type).collect(Collectors.joining(",")) - + "] [" - + second.stream().map(ps -> ps.name + ":" + ps.type).collect(Collectors.joining(",")) - + "]"; - for (var i = 0; i < first.size(); i++) { - merged.add(new ProcessorStat(first.get(i).name, first.get(i).type, Stats.merge(first.get(i).stats, second.get(i).stats))); + // in the simple case, this amounts to summing up the stats in the first and second and returning + // a new list of stats that contains the sum. but there are a few not-quite-so-simple cases, too, + // so this logic is a little bit intricate. + + // total up the stats across both sides + long firstIngestCountTotal = 0; + for (ProcessorStat ps : first) { + firstIngestCountTotal += ps.stats.ingestCount; + } + + long secondIngestCountTotal = 0; + for (ProcessorStat ps : second) { + secondIngestCountTotal += ps.stats.ingestCount; + } + + // early return in the case of a non-ingest node (the sum of the stats will be zero, so just return the other) + if (firstIngestCountTotal == 0) { + return second; + } else if (secondIngestCountTotal == 0) { + return first; + } + + // the list of stats can be different depending on the exact order of application of the cluster states + // that apply a change to a pipeline -- figure out if they match or not (usually they match!!!) + + // speculative execution of the expected, simple case (where we can merge the processor stats) + // if we process both lists of stats and everything matches up, we can return the resulting merged list + if (first.size() == second.size()) { // if the sizes of the lists don't match, then we can skip all this + boolean match = true; + var merged = new ArrayList(first.size()); + for (var i = 0; i < first.size(); i++) { + ProcessorStat ps1 = first.get(i); + ProcessorStat ps2 = second.get(i); + if (ps1.name.equals(ps2.name) == false || ps1.type.equals(ps2.type) == false) { + match = false; + break; + } else { + merged.add(new ProcessorStat(ps1.name, ps1.type, Stats.merge(ps1.stats, ps2.stats))); + } + } + if (match) { + return merged; + } + } + + // speculative execution failed, so we're in the unfortunate case. the lists are different, and they + // can't be meaningfully merged without more information. note that IngestService#innerUpdatePipelines + // resets the counts if there's enough variation on an update, so we'll favor the side with the *lower* + // count as being the 'newest' -- the assumption is that the higher side is just a cluster state + // application away from itself being reset to zero anyway. + if (firstIngestCountTotal < secondIngestCountTotal) { + return first; + } else { + return second; } - return merged; } } } diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java index c9bf1f97a4e9d..2be2f56677648 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java @@ -71,42 +71,86 @@ public void testPipelineStatsMerge() { ); } - public void testProcessorStatsMerge() { + public void testProcessorStatsMergeZeroCounts() { { - var first = Map.of("pipeline-1", randomPipelineProcessorStats()); + var expected = randomPipelineProcessorStats(); + var first = Map.of("pipeline-1", expected); + + // merging with an empty map yields the non-empty map assertEquals(IngestStats.merge(Map.of(), first), first); assertEquals(IngestStats.merge(first, Map.of()), first); + + // it's the same exact reference, in fact + assertSame(expected, IngestStats.merge(Map.of(), first).get("pipeline-1")); + assertSame(expected, IngestStats.merge(first, Map.of()).get("pipeline-1")); } { - var first = Map.of( - "pipeline-1", - randomPipelineProcessorStats(), - "pipeline-2", - randomPipelineProcessorStats(), - "pipeline-3", - randomPipelineProcessorStats() + var expected = randomPipelineProcessorStats(); + var first = Map.of("pipeline-1", expected); + var zero = List.of( + new IngestStats.ProcessorStat("proc-1", "type-1", zeroStats()), + new IngestStats.ProcessorStat("proc-1", "type-2", zeroStats()), + new IngestStats.ProcessorStat("proc-2", "type-1", zeroStats()), + new IngestStats.ProcessorStat("proc-3", "type-3", zeroStats()) ); - var second = Map.of( + var second = Map.of("pipeline-1", zero); + + // merging with a zero map yields the non-zero map + assertEquals(IngestStats.merge(second, first), first); + assertEquals(IngestStats.merge(first, second), first); + + // it's the same exact reference, in fact + assertSame(expected, IngestStats.merge(second, first).get("pipeline-1")); + assertSame(expected, IngestStats.merge(first, second).get("pipeline-1")); + } + } + + public void testProcessorStatsMerge() { + var first = Map.of( + "pipeline-1", + randomPipelineProcessorStats(), + "pipeline-2", + randomPipelineProcessorStats(), + "pipeline-3", + randomPipelineProcessorStats() + ); + var second = Map.of( + "pipeline-2", + randomPipelineProcessorStats(), + "pipeline-3", + randomPipelineProcessorStats(), + "pipeline-1", + randomPipelineProcessorStats() + ); + + assertEquals( + IngestStats.merge(first, second), + Map.of( + "pipeline-1", + expectedPipelineProcessorStats(first.get("pipeline-1"), second.get("pipeline-1")), "pipeline-2", - randomPipelineProcessorStats(), + expectedPipelineProcessorStats(first.get("pipeline-2"), second.get("pipeline-2")), "pipeline-3", - randomPipelineProcessorStats(), - "pipeline-1", - randomPipelineProcessorStats() - ); + expectedPipelineProcessorStats(first.get("pipeline-3"), second.get("pipeline-3")) + ) + ); + } - assertEquals( - IngestStats.merge(first, second), - Map.of( - "pipeline-1", - expectedPipelineProcessorStats(first.get("pipeline-1"), second.get("pipeline-1")), - "pipeline-2", - expectedPipelineProcessorStats(first.get("pipeline-2"), second.get("pipeline-2")), - "pipeline-3", - expectedPipelineProcessorStats(first.get("pipeline-3"), second.get("pipeline-3")) - ) - ); - } + public void testProcessorStatsMergeHeterogeneous() { + // if a pipeline has heterogeneous *non-zero* stats, then we defer to the one with a smaller total ingest count + + var first = Map.of( + "pipeline-1", + List.of( + new IngestStats.ProcessorStat("name-1", "type-1", new IngestStats.Stats(randomLongBetween(1, 100), 0, 0, 0)), + new IngestStats.ProcessorStat("name-2", "type-2", new IngestStats.Stats(randomLongBetween(1, 100), 0, 0, 0)) + ) + ); + var expected = List.of(new IngestStats.ProcessorStat("name-1", "type-1", new IngestStats.Stats(1, 0, 0, 0))); + var second = Map.of("pipeline-1", expected); + + assertEquals(second, IngestStats.merge(first, second)); + assertSame(expected, IngestStats.merge(second, first).get("pipeline-1")); } private static List expectedPipelineProcessorStats( @@ -117,7 +161,7 @@ private static List expectedPipelineProcessorStats( new IngestStats.ProcessorStat("proc-1", "type-1", merge(first.get(0).stats(), second.get(0).stats())), new IngestStats.ProcessorStat("proc-1", "type-2", merge(first.get(1).stats(), second.get(1).stats())), new IngestStats.ProcessorStat("proc-2", "type-1", merge(first.get(2).stats(), second.get(2).stats())), - new IngestStats.ProcessorStat("proc-3", "type-4", merge(first.get(3).stats(), second.get(3).stats())) + new IngestStats.ProcessorStat("proc-3", "type-3", merge(first.get(3).stats(), second.get(3).stats())) ); } @@ -126,7 +170,7 @@ private static List randomPipelineProcessorStats() { randomProcessorStat("proc-1", "type-1"), randomProcessorStat("proc-1", "type-2"), randomProcessorStat("proc-2", "type-1"), - randomProcessorStat("proc-3", "type-4") + randomProcessorStat("proc-3", "type-3") ); } @@ -216,4 +260,8 @@ private static IngestStats.PipelineStat randomPipelineStat(String id) { private static IngestStats.Stats randomStats() { return new IngestStats.Stats(randomLong(), randomLong(), randomLong(), randomLong()); } + + private static IngestStats.Stats zeroStats() { + return new IngestStats.Stats(0, 0, 0, 0); + } } From 8af50e314fe837c46353c12938c102bcab2aab63 Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Fri, 1 Dec 2023 13:24:14 -0500 Subject: [PATCH 100/181] Reformatted RestStatus to include a blank line after each enum declaration (#102882) --- .../org/elasticsearch/rest/RestStatus.java | 46 +++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/rest/RestStatus.java b/server/src/main/java/org/elasticsearch/rest/RestStatus.java index 101c412e1420e..84f7be32db6e5 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestStatus.java +++ b/server/src/main/java/org/elasticsearch/rest/RestStatus.java @@ -19,6 +19,7 @@ import static java.util.Collections.unmodifiableMap; public enum RestStatus { + /** * The client SHOULD continue with its request. This interim response is used to inform the client that the * initial part of the request has been received and has not yet been rejected by the server. The client @@ -26,6 +27,7 @@ public enum RestStatus { * ignore this response. The server MUST send a final response after the request has been completed. */ CONTINUE(100), + /** * The server understands and is willing to comply with the client's request, via the Upgrade message header field * (section 14.42), for a change in the application protocol being used on this connection. The server will @@ -33,6 +35,7 @@ public enum RestStatus { * which terminates the 101 response. */ SWITCHING_PROTOCOLS(101), + /** * The request has succeeded. The information returned with the response is dependent on the method * used in the request, for example: @@ -44,6 +47,7 @@ public enum RestStatus { * */ OK(200), + /** * The request has been fulfilled and resulted in a new resource being created. The newly created resource can * be referenced by the URI(s) returned in the entity of the response, with the most specific URI for the @@ -57,6 +61,7 @@ public enum RestStatus { * for the requested variant just created, see section 14.19. */ CREATED(201), + /** * The request has been accepted for processing, but the processing has not been completed. The request might * or might not eventually be acted upon, as it might be disallowed when processing actually takes place. There @@ -69,6 +74,7 @@ public enum RestStatus { * monitor or some estimate of when the user can expect the request to be fulfilled. */ ACCEPTED(202), + /** * The returned meta information in the entity-header is not the definitive set as available from the origin * server, but is gathered from a local or a third-party copy. The set presented MAY be a subset or super set @@ -77,6 +83,7 @@ public enum RestStatus { * is not required and is only appropriate when the response would otherwise be 200 (OK). */ NON_AUTHORITATIVE_INFORMATION(203), + /** * The server has fulfilled the request but does not need to return an entity-body, and might want to return * updated meta information. The response MAY include new or updated meta information in the form of @@ -91,6 +98,7 @@ public enum RestStatus { * line after the header fields. */ NO_CONTENT(204), + /** * The server has fulfilled the request and the user agent SHOULD reset the document view which caused the * request to be sent. This response is primarily intended to allow input for actions to take place via user @@ -98,6 +106,7 @@ public enum RestStatus { * another input action. The response MUST NOT include an entity. */ RESET_CONTENT(205), + /** * The server has fulfilled the partial GET request for the resource. The request MUST have included a Range * header field (section 14.35) indicating the desired range, and MAY have included an If-Range header @@ -127,6 +136,7 @@ public enum RestStatus { * A cache that does not support the Range and Content-Range headers MUST NOT cache 206 (Partial) responses. */ PARTIAL_CONTENT(206), + /** * The 207 (Multi-Status) status code provides status for multiple independent operations (see Section 13 for * more information). @@ -146,6 +156,7 @@ public enum RestStatus { * to identify the resource. */ MULTI_STATUS(207), + /** * The requested resource corresponds to any one of a set of representations, each with its own specific * location, and agent-driven negotiation information (section 12) is being provided so that the user (or user @@ -162,6 +173,7 @@ public enum RestStatus { * This response is cacheable unless indicated otherwise. */ MULTIPLE_CHOICES(300), + /** * The requested resource has been assigned a new permanent URI and any future references to this resource * SHOULD use one of the returned URIs. Clients with link editing capabilities ought to automatically re-link @@ -176,6 +188,7 @@ public enum RestStatus { * the conditions under which the request was issued. */ MOVED_PERMANENTLY(301), + /** * The requested resource resides temporarily under a different URI. Since the redirection might be altered on * occasion, the client SHOULD continue to use the Request-URI for future requests. This response is only @@ -189,6 +202,7 @@ public enum RestStatus { * the conditions under which the request was issued. */ FOUND(302), + /** * The response to the request can be found under a different URI and SHOULD be retrieved using a GET method on * that resource. This method exists primarily to allow the output of a POST-activated script to redirect the @@ -200,6 +214,7 @@ public enum RestStatus { * HEAD, the entity of the response SHOULD contain a short hypertext note with a hyperlink to the new URI(s). */ SEE_OTHER(303), + /** * If the client has performed a conditional GET request and access is allowed, but the document has not been * modified, the server SHOULD respond with this status code. The 304 response MUST NOT contain a message-body, @@ -228,12 +243,14 @@ public enum RestStatus { * reflect any new field values given in the response. */ NOT_MODIFIED(304), + /** * The requested resource MUST be accessed through the proxy given by the Location field. The Location field * gives the URI of the proxy. The recipient is expected to repeat this single request via the proxy. * 305 responses MUST only be generated by origin servers. */ USE_PROXY(305), + /** * The requested resource resides temporarily under a different URI. Since the redirection MAY be altered on * occasion, the client SHOULD continue to use the Request-URI for future requests. This response is only @@ -249,11 +266,13 @@ public enum RestStatus { * conditions under which the request was issued. */ TEMPORARY_REDIRECT(307), + /** * The request could not be understood by the server due to malformed syntax. The client SHOULD NOT repeat the * request without modifications. */ BAD_REQUEST(400), + /** * The request requires user authentication. The response MUST include a WWW-Authenticate header field * (section 14.47) containing a challenge applicable to the requested resource. The client MAY repeat the request @@ -265,10 +284,12 @@ public enum RestStatus { * "HTTP Authentication: Basic and Digest Access Authentication" [43]. */ UNAUTHORIZED(401), + /** * This code is reserved for future use. */ PAYMENT_REQUIRED(402), + /** * The server understood the request, but is refusing to fulfill it. Authorization will not help and the request * SHOULD NOT be repeated. If the request method was not HEAD and the server wishes to make public why the @@ -277,6 +298,7 @@ public enum RestStatus { * instead. */ FORBIDDEN(403), + /** * The server has not found anything matching the Request-URI. No indication is given of whether the condition * is temporary or permanent. The 410 (Gone) status code SHOULD be used if the server knows, through some @@ -285,11 +307,13 @@ public enum RestStatus { * has been refused, or when no other response is applicable. */ NOT_FOUND(404), + /** * The method specified in the Request-Line is not allowed for the resource identified by the Request-URI. * The response MUST include an Allow header containing a list of valid methods for the requested resource. */ METHOD_NOT_ALLOWED(405), + /** * The resource identified by the request is only capable of generating response entities which have content * characteristics not acceptable according to the accept headers sent in the request. @@ -308,6 +332,7 @@ public enum RestStatus { * the user for a decision on further actions. */ NOT_ACCEPTABLE(406), + /** * This code is similar to 401 (Unauthorized), but indicates that the client must first authenticate itself with * the proxy. The proxy MUST return a Proxy-Authenticate header field (section 14.33) containing a challenge @@ -316,11 +341,13 @@ public enum RestStatus { * "HTTP Authentication: Basic and Digest Access Authentication" [43]. */ PROXY_AUTHENTICATION(407), + /** * The client did not produce a request within the time that the server was prepared to wait. The client MAY * repeat the request without modifications at any later time. */ REQUEST_TIMEOUT(408), + /** * The request could not be completed due to a conflict with the current state of the resource. This code is * only allowed in situations where it is expected that the user might be able to resolve the conflict and @@ -335,6 +362,7 @@ public enum RestStatus { * a format defined by the response Content-Type. */ CONFLICT(409), + /** * The requested resource is no longer available at the server and no forwarding address is known. This condition * is expected to be considered permanent. Clients with link editing capabilities SHOULD delete references to @@ -350,11 +378,13 @@ public enum RestStatus { * owner. */ GONE(410), + /** * The server refuses to accept the request without a defined Content-Length. The client MAY repeat the request * if it adds a valid Content-Length header field containing the length of the message-body in the request message. */ LENGTH_REQUIRED(411), + /** * The precondition given in one or more of the request-header fields evaluated to false when it was tested on * the server. This response code allows the client to place preconditions on the current resource metainformation @@ -362,6 +392,7 @@ public enum RestStatus { * intended. */ PRECONDITION_FAILED(412), + /** * The server is refusing to process a request because the request entity is larger than the server is willing * or able to process. The server MAY close the connection to prevent the client from continuing the request. @@ -370,6 +401,7 @@ public enum RestStatus { * is temporary and after what time the client MAY try again. */ REQUEST_ENTITY_TOO_LARGE(413), + /** * The server is refusing to service the request because the Request-URI is longer than the server is willing * to interpret. This rare condition is only likely to occur when a client has improperly converted a POST @@ -379,11 +411,13 @@ public enum RestStatus { * buffers for reading or manipulating the Request-URI. */ REQUEST_URI_TOO_LONG(414), + /** * The server is refusing to service the request because the entity of the request is in a format not supported * by the requested resource for the requested method. */ UNSUPPORTED_MEDIA_TYPE(415), + /** * A server SHOULD return a response with this status code if a request included a Range request-header field * (section 14.35), and none of the range-specifier values in this field overlap the current extent of the @@ -396,12 +430,14 @@ public enum RestStatus { * response MUST NOT use the multipart/byteranges content-type. */ REQUESTED_RANGE_NOT_SATISFIED(416), + /** * The expectation given in an Expect request-header field (see section 14.20) could not be met by this server, * or, if the server is a proxy, the server has unambiguous evidence that the request could not be met by the * next-hop server. */ EXPECTATION_FAILED(417), + /** * The 422 (Unprocessable Entity) status code means the server understands the content type of the request * entity (hence a 415(Unsupported Media Type) status code is inappropriate), and the syntax of the request @@ -410,37 +446,44 @@ public enum RestStatus { * well-formed (i.e., syntactically correct), but semantically erroneous, XML instructions. */ UNPROCESSABLE_ENTITY(422), + /** * The 423 (Locked) status code means the source or destination resource of a method is locked. This response * SHOULD contain an appropriate precondition or postcondition code, such as 'lock-token-submitted' or * 'no-conflicting-lock'. */ LOCKED(423), + /** * The 424 (Failed Dependency) status code means that the method could not be performed on the resource because * the requested action depended on another action and that action failed. For example, if a command in a * PROPPATCH method fails, then, at minimum, the rest of the commands will also fail with 424 (Failed Dependency). */ FAILED_DEPENDENCY(424), + /** * 429 Too Many Requests (RFC6585) */ TOO_MANY_REQUESTS(429), + /** * The server encountered an unexpected condition which prevented it from fulfilling the request. */ INTERNAL_SERVER_ERROR(500), + /** * The server does not support the functionality required to fulfill the request. This is the appropriate * response when the server does not recognize the request method and is not capable of supporting it for any * resource. */ NOT_IMPLEMENTED(501), + /** * The server, while acting as a gateway or proxy, received an invalid response from the upstream server it * accessed in attempting to fulfill the request. */ BAD_GATEWAY(502), + /** * The server is currently unable to handle the request due to a temporary overloading or maintenance of the * server. The implication is that this is a temporary condition which will be alleviated after some delay. @@ -448,12 +491,14 @@ public enum RestStatus { * the client SHOULD handle the response as it would for a 500 response. */ SERVICE_UNAVAILABLE(503), + /** * The server, while acting as a gateway or proxy, did not receive a timely response from the upstream server * specified by the URI (e.g. HTTP, FTP, LDAP) or some other auxiliary server (e.g. DNS) it needed to access * in attempting to complete the request. */ GATEWAY_TIMEOUT(504), + /** * The server does not support, or refuses to support, the HTTP protocol version that was used in the request * message. The server is indicating that it is unable or unwilling to complete the request using the same major @@ -462,6 +507,7 @@ public enum RestStatus { * that server. */ HTTP_VERSION_NOT_SUPPORTED(505), + /** * The 507 (Insufficient Storage) status code means the method could not be performed on the resource because * the server is unable to store the representation needed to successfully complete the request. This condition From 60b7622de6c854ebe2cad13b8d5d7407f1901853 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 1 Dec 2023 10:29:26 -0800 Subject: [PATCH 101/181] Fix memory tracking in TopN.Row (#102831) This commit addresses the issue of missing memory tracking for the BitSet in TopN.Row. Instead of introducing BreakingBitSet, we replace the BitSet with a smaller array of offsets in this PR. Nik suggested to remove that BitSet, but I haven't looked into that option yet. Closes #100640 Closes #102683 Closes #102790 Closes #102784 --- docs/changelog/102831.yaml | 9 +++ .../compute/operator/topn/TopNOperator.java | 80 ++++++++++++++----- .../operator/topn/TopNOperatorTests.java | 5 +- .../compute/operator/topn/TopNRowTests.java | 17 +++- .../esql/qa/single_node/HeapAttackIT.java | 2 - 5 files changed, 84 insertions(+), 29 deletions(-) create mode 100644 docs/changelog/102831.yaml diff --git a/docs/changelog/102831.yaml b/docs/changelog/102831.yaml new file mode 100644 index 0000000000000..fb99b0c7f732b --- /dev/null +++ b/docs/changelog/102831.yaml @@ -0,0 +1,9 @@ +pr: 102831 +summary: Fix memory tracking in TopN.Row +area: ES|QL +type: bug +issues: + - 100640 + - 102784 + - 102790 + - 102683 diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java index 2ebc9c82c6d98..c3fc9fc68b60c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java @@ -25,7 +25,6 @@ import java.util.ArrayList; import java.util.Arrays; -import java.util.BitSet; import java.util.Collections; import java.util.Iterator; import java.util.List; @@ -51,8 +50,7 @@ public class TopNOperator implements Operator, Accountable { * multivalues) to reference each position in each block of the Page. */ static final class Row implements Accountable, Releasable { - private static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(Row.class) + RamUsageEstimator - .shallowSizeOfInstance(BitSet.class); + private static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(Row.class); /** * The sort key. @@ -64,7 +62,7 @@ static final class Row implements Accountable, Releasable { * For ex, if a Long is represented as 8 bytes, each of these bytes will have the same value (set/unset) if the respective Long * value is used for sorting ascending/descending. */ - final BitSet orderByCompositeKeyAscending = new BitSet(); + final BytesOrder bytesOrder; /** * Values to reconstruct the row. Sort of. When we reconstruct the row we read @@ -73,11 +71,12 @@ static final class Row implements Accountable, Releasable { */ final BreakingBytesRefBuilder values; - Row(CircuitBreaker breaker) { + Row(CircuitBreaker breaker, List sortOrders) { boolean success = false; try { keys = new BreakingBytesRefBuilder(breaker, "topn"); values = new BreakingBytesRefBuilder(breaker, "topn"); + bytesOrder = new BytesOrder(sortOrders, breaker, "topn"); success = true; } finally { if (success == false) { @@ -88,12 +87,54 @@ static final class Row implements Accountable, Releasable { @Override public long ramBytesUsed() { - return SHALLOW_SIZE + keys.ramBytesUsed() + orderByCompositeKeyAscending.size() / Byte.SIZE + values.ramBytesUsed(); + return SHALLOW_SIZE + keys.ramBytesUsed() + bytesOrder.ramBytesUsed() + values.ramBytesUsed(); } @Override public void close() { - Releasables.closeExpectNoException(keys, values); + Releasables.closeExpectNoException(keys, values, bytesOrder); + } + } + + static final class BytesOrder implements Releasable, Accountable { + private static final long BASE_RAM_USAGE = RamUsageEstimator.shallowSizeOfInstance(BytesOrder.class); + private final CircuitBreaker breaker; + final List sortOrders; + final int[] endOffsets; + + BytesOrder(List sortOrders, CircuitBreaker breaker, String label) { + this.breaker = breaker; + this.sortOrders = sortOrders; + breaker.addEstimateBytesAndMaybeBreak(memoryUsed(sortOrders.size()), label); + this.endOffsets = new int[sortOrders.size()]; + } + + /** + * Returns true if the byte at the given position is ordered ascending; otherwise, return false + */ + boolean isByteOrderAscending(int bytePosition) { + int index = Arrays.binarySearch(endOffsets, bytePosition); + if (index < 0) { + index = -1 - index; + } + return sortOrders.get(index).asc(); + } + + private long memoryUsed(int numKeys) { + // sortOrders is global and its memory is accounted at the top level TopNOperator + return BASE_RAM_USAGE + RamUsageEstimator.alignObjectSize( + (long) RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (long) Integer.BYTES * numKeys + ); + } + + @Override + public long ramBytesUsed() { + return memoryUsed(sortOrders.size()); + } + + @Override + public void close() { + breaker.addWithoutBreaking(-ramBytesUsed()); } } @@ -138,14 +179,11 @@ void row(int position, Row destination) { private void writeKey(int position, Row row) { int orderByCompositeKeyCurrentPosition = 0; - for (KeyFactory factory : keyFactories) { - int valueAsBytesSize = factory.extractor.writeKey(row.keys, position); - row.orderByCompositeKeyAscending.set( - orderByCompositeKeyCurrentPosition, - valueAsBytesSize + orderByCompositeKeyCurrentPosition, - factory.ascending - ); + for (int i = 0; i < keyFactories.length; i++) { + int valueAsBytesSize = keyFactories[i].extractor.writeKey(row.keys, position); + assert valueAsBytesSize > 0 : valueAsBytesSize; orderByCompositeKeyCurrentPosition += valueAsBytesSize; + row.bytesOrder.endOffsets[i] = orderByCompositeKeyCurrentPosition - 1; } } @@ -189,9 +227,7 @@ public record TopNOperatorFactory( List sortOrders, int maxPageSize ) implements OperatorFactory { - public TopNOperatorFactory - - { + public TopNOperatorFactory { for (ElementType e : elementTypes) { if (e == null) { throw new IllegalArgumentException("ElementType not known"); @@ -274,19 +310,20 @@ static int compareRows(Row r1, Row r2) { // the two rows are equal return 0; } + int length = Math.min(br1.length, br2.length); // one value is the prefix of the other if (mismatchedByteIndex == length) { // the value with the greater length is considered greater than the other if (length == br1.length) {// first row is less than the second row - return r2.orderByCompositeKeyAscending.get(length) ? 1 : -1; + return r2.bytesOrder.isByteOrderAscending(length) ? 1 : -1; } else {// second row is less than the first row - return r1.orderByCompositeKeyAscending.get(length) ? -1 : 1; + return r1.bytesOrder.isByteOrderAscending(length) ? -1 : 1; } } else { // compare the byte that mismatched accounting for that respective byte asc/desc ordering int c = Byte.compareUnsigned(br1.bytes[br1.offset + mismatchedByteIndex], br2.bytes[br2.offset + mismatchedByteIndex]); - return r1.orderByCompositeKeyAscending.get(mismatchedByteIndex) ? -c : c; + return r1.bytesOrder.isByteOrderAscending(mismatchedByteIndex) ? -c : c; } } @@ -312,10 +349,9 @@ public void addInput(Page page) { try { for (int i = 0; i < page.getPositionCount(); i++) { if (spare == null) { - spare = new Row(breaker); + spare = new Row(breaker, sortOrders); } else { spare.keys.clear(); - spare.orderByCompositeKeyAscending.clear(); spare.values.clear(); } rowFiller.row(i, spare); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java index f43873b4fdfd9..be3e75fcce2a2 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java @@ -434,13 +434,14 @@ private TopNOperator.Row row( Page page, int position ) { + final var sortOrders = List.of(new TopNOperator.SortOrder(channel, asc, nullsFirst)); TopNOperator.RowFiller rf = new TopNOperator.RowFiller( IntStream.range(0, page.getBlockCount()).mapToObj(i -> elementType).toList(), IntStream.range(0, page.getBlockCount()).mapToObj(i -> encoder).toList(), - List.of(new TopNOperator.SortOrder(channel, asc, nullsFirst)), + sortOrders, page ); - TopNOperator.Row row = new TopNOperator.Row(nonBreakingBigArrays().breakerService().getBreaker("request")); + TopNOperator.Row row = new TopNOperator.Row(nonBreakingBigArrays().breakerService().getBreaker("request"), sortOrders); rf.row(position, row); return row; } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNRowTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNRowTests.java index 472b9e50767b1..9fb3a7644ca20 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNRowTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNRowTests.java @@ -12,25 +12,27 @@ import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.test.ESTestCase; +import java.util.List; + import static org.hamcrest.Matchers.equalTo; public class TopNRowTests extends ESTestCase { private final CircuitBreaker breaker = new NoopCircuitBreaker(CircuitBreaker.REQUEST); public void testRamBytesUsedEmpty() { - TopNOperator.Row row = new TopNOperator.Row(breaker); + TopNOperator.Row row = new TopNOperator.Row(breaker, sortOrders()); assertThat(row.ramBytesUsed(), equalTo(expectedRamBytesUsed(row))); } public void testRamBytesUsedSmall() { - TopNOperator.Row row = new TopNOperator.Row(new NoopCircuitBreaker(CircuitBreaker.REQUEST)); + TopNOperator.Row row = new TopNOperator.Row(new NoopCircuitBreaker(CircuitBreaker.REQUEST), sortOrders()); row.keys.append(randomByte()); row.values.append(randomByte()); assertThat(row.ramBytesUsed(), equalTo(expectedRamBytesUsed(row))); } public void testRamBytesUsedBig() { - TopNOperator.Row row = new TopNOperator.Row(new NoopCircuitBreaker(CircuitBreaker.REQUEST)); + TopNOperator.Row row = new TopNOperator.Row(new NoopCircuitBreaker(CircuitBreaker.REQUEST), sortOrders()); for (int i = 0; i < 10000; i++) { row.keys.append(randomByte()); row.values.append(randomByte()); @@ -38,6 +40,13 @@ public void testRamBytesUsedBig() { assertThat(row.ramBytesUsed(), equalTo(expectedRamBytesUsed(row))); } + private static List sortOrders() { + return List.of( + new TopNOperator.SortOrder(randomNonNegativeInt(), randomBoolean(), randomBoolean()), + new TopNOperator.SortOrder(randomNonNegativeInt(), randomBoolean(), randomBoolean()) + ); + } + private long expectedRamBytesUsed(TopNOperator.Row row) { long expected = RamUsageTester.ramUsed(row); if (row.values.bytes().length == 0) { @@ -47,6 +56,8 @@ private long expectedRamBytesUsed(TopNOperator.Row row) { // The breaker is shared infrastructure so we don't count it but RamUsageTester does expected -= RamUsageTester.ramUsed(breaker); expected -= RamUsageTester.ramUsed("topn"); + // the sort orders are shared + expected -= RamUsageTester.ramUsed(sortOrders()); return expected; } } diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/HeapAttackIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/HeapAttackIT.java index ffe122b8de222..31d0a7646e1b7 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/HeapAttackIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/HeapAttackIT.java @@ -9,7 +9,6 @@ import org.apache.http.client.config.RequestConfig; import org.apache.http.util.EntityUtils; -import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -47,7 +46,6 @@ * Tests that run ESQL queries that have, in the past, used so much memory they * crash Elasticsearch. */ -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102784") public class HeapAttackIT extends ESRestTestCase { /** * This used to fail, but we've since compacted top n so it actually succeeds now. From caec612feafca1450ae9e1800fbbb7298609986e Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Fri, 1 Dec 2023 13:45:43 -0500 Subject: [PATCH 102/181] Make cosine similarity faster by storing magnitude and normalizing vectors (#99445) `cosine` is our default similarity and should provide a good experience on speed. `dot_product` is faster than `cosine` as it doesn't require calculating vector magnitudes in the similarity comparison loop. Instead, it can assume vectors have a length of `1` and use an optimized `dot_product` calculation. However, `cosine` as it exists today accepts vectors of any magnitude and cannot take advantage of this. This commit addresses this by: - Normalizing all vectors passed when indexing via `cosine` - Storing the calculated magnitude in an additional field (only if its `!= 1`). - Using the `dot_product` Lucene calculation - Normalizing query vectors when used against these new `cosine` fields - De-normalizing vectors when accessed via scripts - Allowing scripts to access these stored magnitudes. --- docs/changelog/99445.yaml | 5 + .../test/painless/140_dense_vector_basic.yml | 84 ++++++ .../test/mixed_cluster/30_vector_search.yml | 14 +- .../test/old_cluster/30_vector_search.yml | 28 +- .../upgraded_cluster/30_vector_search.yml | 29 +- .../search.vectors/40_knn_search_cosine.yml | 253 ++++++++++++++++++ .../elasticsearch/index/IndexVersions.java | 1 + .../DenormalizedCosineFloatVectorValues.java | 106 ++++++++ .../vectors/DenseVectorFieldMapper.java | 172 +++++++++--- .../mapper/vectors/VectorIndexFieldData.java | 17 +- .../script/field/vectors/KnnDenseVector.java | 12 +- .../vectors/KnnDenseVectorDocValuesField.java | 7 + ...ormalizedCosineFloatVectorValuesTests.java | 114 ++++++++ .../vectors/DenseVectorFieldMapperTests.java | 103 ++++++- .../KnnDenseVectorScriptDocValuesTests.java | 7 +- 15 files changed, 878 insertions(+), 74 deletions(-) create mode 100644 docs/changelog/99445.yaml create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search_cosine.yml create mode 100644 server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java create mode 100644 server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java diff --git a/docs/changelog/99445.yaml b/docs/changelog/99445.yaml new file mode 100644 index 0000000000000..deea5fbf2423c --- /dev/null +++ b/docs/changelog/99445.yaml @@ -0,0 +1,5 @@ +pr: 99445 +summary: Make cosine similarity faster by storing magnitude and normalizing vectors +area: Vector Search +type: enhancement +issues: [] diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/140_dense_vector_basic.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/140_dense_vector_basic.yml index 291f014662995..a4245621f83e0 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/140_dense_vector_basic.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/140_dense_vector_basic.yml @@ -135,3 +135,87 @@ setup: - match: {hits.hits.2._id: "1"} - gte: {hits.hits.2._score: 0.78} - lte: {hits.hits.2._score: 0.791} + +--- +"L2 similarity with indexed cosine similarity vector": + - skip: + features: close_to + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "l2norm(params.query_vector, 'indexed_vector')" + params: + query_vector: [0.5, 111.3, -13.0, 14.8, -156.0] + + - match: {hits.total: 3} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 301.36, error: 0.01}} + + - match: {hits.hits.1._id: "2"} + - close_to: {hits.hits.1._score: {value: 11.34, error: 0.01}} + + - match: {hits.hits.2._id: "3"} + - close_to: {hits.hits.2._score: {value: 0.01, error: 0.01}} +--- +"L1 similarity with indexed cosine similarity vector": + - skip: + features: close_to + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "l1norm(params.query_vector, 'indexed_vector')" + params: + query_vector: [0.5, 111.3, -13.0, 14.8, -156.0] + + - match: {hits.total: 3} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 485.18, error: 0.01}} + + - match: {hits.hits.1._id: "2"} + - close_to: {hits.hits.1._score: {value: 12.30, error: 0.01}} + + - match: {hits.hits.2._id: "3"} + - close_to: {hits.hits.2._score: {value: 0.01, error: 0.01}} +--- +"Test vector magnitude equality": + - skip: + features: close_to + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "doc['vector'].magnitude" + + - match: {hits.total: 3} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 429.6021, error: 0.01}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 192.6447, error: 0.01}} + + - match: {hits.hits.2._id: "2"} + - close_to: {hits.hits.2._score: {value: 186.34454, error: 0.01}} diff --git a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/mixed_cluster/30_vector_search.yml b/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/mixed_cluster/30_vector_search.yml index 969c4428c7c6c..108f58b29bf27 100644 --- a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/mixed_cluster/30_vector_search.yml +++ b/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/mixed_cluster/30_vector_search.yml @@ -1,5 +1,7 @@ --- "Search float indices created in old cluster": + - skip: + features: close_to - do: search: index: test-float-index @@ -15,11 +17,11 @@ field: bdv - match: { hits.hits.0._id: "3" } - - match: { hits.hits.0._score: 27 } + - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - match: { hits.hits.1._id: "2" } - - match: { hits.hits.1._score: 21 } + - close_to: { hits.hits.1._score: { value: 21.0, error: 0.00001 } } - match: { hits.hits.2._id: "1" } - - match: { hits.hits.2._score: 15 } + - close_to: { hits.hits.2._score: { value: 15.0, error: 0.00001 } } - do: search: @@ -36,11 +38,11 @@ field: knn - match: { hits.hits.0._id: "3" } - - match: { hits.hits.0._score: 27 } + - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - match: { hits.hits.1._id: "5" } - - match: { hits.hits.1._score: 25 } + - close_to: { hits.hits.1._score: { value: 25.0, error: 0.00001 } } - match: { hits.hits.2._id: "7" } - - match: { hits.hits.2._score: 23 } + - close_to: { hits.hits.2._score: { value: 23.0, error: 0.00001 } } - do: search: index: test-float-index diff --git a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml b/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml index b471fa56a47a5..4aca71fe48f4a 100644 --- a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml +++ b/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml @@ -1,5 +1,7 @@ --- "Create indexed float vectors and search": + - skip: + features: close_to - do: indices.create: index: test-float-index @@ -56,11 +58,11 @@ field: bdv - match: { hits.hits.0._id: "3" } - - match: { hits.hits.0._score: 27 } + - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - match: { hits.hits.1._id: "2" } - - match: { hits.hits.1._score: 21 } + - close_to: { hits.hits.1._score: { value: 21.0, error: 0.00001 } } - match: { hits.hits.2._id: "1" } - - match: { hits.hits.2._score: 15 } + - close_to: { hits.hits.2._score: { value: 15.0, error: 0.00001 } } - do: search: @@ -77,11 +79,11 @@ field: knn - match: { hits.hits.0._id: "3" } - - match: { hits.hits.0._score: 27 } + - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - match: { hits.hits.1._id: "5" } - - match: { hits.hits.1._score: 25 } + - close_to: { hits.hits.1._score: { value: 25.0, error: 0.00001 } } - match: { hits.hits.2._id: "7" } - - match: { hits.hits.2._score: 23 } + - close_to: { hits.hits.2._score: { value: 23.0, error: 0.00001 } } - do: search: @@ -114,6 +116,8 @@ --- "Create indexed byte vectors and search": + - skip: + features: close_to - do: indices.create: index: test-byte-index @@ -172,11 +176,11 @@ field: bdv - match: { hits.hits.0._id: "3" } - - match: { hits.hits.0._score: 27 } + - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - match: { hits.hits.1._id: "2" } - - match: { hits.hits.1._score: 21 } + - close_to: { hits.hits.1._score: { value: 21.0, error: 0.00001 } } - match: { hits.hits.2._id: "1" } - - match: { hits.hits.2._score: 15 } + - close_to: { hits.hits.2._score: { value: 15.0, error: 0.00001 } } - do: search: @@ -193,11 +197,11 @@ field: knn - match: { hits.hits.0._id: "3" } - - match: { hits.hits.0._score: 27 } + - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - match: { hits.hits.1._id: "5" } - - match: { hits.hits.1._score: 25 } + - close_to: { hits.hits.1._score: { value: 25.0, error: 0.00001 } } - match: { hits.hits.2._id: "7" } - - match: { hits.hits.2._score: 23 } + - close_to: { hits.hits.2._score: { value: 23.0, error: 0.00001 } } - do: search: diff --git a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/upgraded_cluster/30_vector_search.yml b/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/upgraded_cluster/30_vector_search.yml index 60304a0078acc..ee2c357594b94 100644 --- a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/upgraded_cluster/30_vector_search.yml +++ b/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/upgraded_cluster/30_vector_search.yml @@ -1,5 +1,7 @@ --- "Search float indices created in old cluster": + - skip: + features: close_to - do: search: index: test-float-index @@ -15,11 +17,11 @@ field: bdv - match: { hits.hits.0._id: "3" } - - match: { hits.hits.0._score: 27 } + - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - match: { hits.hits.1._id: "2" } - - match: { hits.hits.1._score: 21 } + - close_to: { hits.hits.1._score: { value: 21.0, error: 0.00001 } } - match: { hits.hits.2._id: "1" } - - match: { hits.hits.2._score: 15 } + - close_to: { hits.hits.2._score: { value: 15.0, error: 0.00001 } } - do: search: @@ -36,11 +38,11 @@ field: knn - match: { hits.hits.0._id: "3" } - - match: { hits.hits.0._score: 27 } + - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - match: { hits.hits.1._id: "5" } - - match: { hits.hits.1._score: 25 } + - close_to: { hits.hits.1._score: { value: 25.0, error: 0.00001 } } - match: { hits.hits.2._id: "7" } - - match: { hits.hits.2._score: 23 } + - close_to: { hits.hits.2._score: { value: 23.0, error: 0.00001 } } - do: search: @@ -72,6 +74,8 @@ --- "Search byte indices created in old cluster": + - skip: + features: close_to - do: search: index: test-byte-index @@ -87,11 +91,11 @@ field: bdv - match: { hits.hits.0._id: "3" } - - match: { hits.hits.0._score: 27 } + - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - match: { hits.hits.1._id: "2" } - - match: { hits.hits.1._score: 21 } + - close_to: { hits.hits.1._score: { value: 21.0, error: 0.00001 } } - match: { hits.hits.2._id: "1" } - - match: { hits.hits.2._score: 15 } + - close_to: { hits.hits.2._score: { value: 15.0, error: 0.00001 } } - do: search: @@ -108,11 +112,12 @@ field: knn - match: { hits.hits.0._id: "3" } - - match: { hits.hits.0._score: 27 } + - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - match: { hits.hits.1._id: "5" } - - match: { hits.hits.1._score: 25 } + - close_to: { hits.hits.1._score: { value: 25.0, error: 0.00001 } } - match: { hits.hits.2._id: "7" } - - match: { hits.hits.2._score: 23 } + - close_to: { hits.hits.2._score: { value: 23.0, error: 0.00001 } } + - do: search: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search_cosine.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search_cosine.yml new file mode 100644 index 0000000000000..8faad25f0037d --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search_cosine.yml @@ -0,0 +1,253 @@ +setup: + - skip: + version: ' - 7.99.99' + reason: 'kNN search added in 8.0' + - do: + indices.create: + index: test + body: + mappings: + properties: + vector: + type: dense_vector + dims: 5 + index: true + similarity: cosine + normalized_vector: + type: dense_vector + dims: 5 + index: true + similarity: cosine + end_normalized: + type: dense_vector + dims: 5 + index: true + similarity: cosine + first_normalized: + type: dense_vector + dims: 5 + index: true + similarity: cosine + middle_normalized: + type: dense_vector + dims: 5 + index: true + similarity: cosine + + + - do: + index: + index: test + id: "1" + body: + name: cow.jpg + vector: [230.0, 300.33, -34.8988, 15.555, -200.0] + middle_normalized: [230.0, 300.33, -34.8988, 15.555, -200.0] + normalized_vector: [0.5353791, 0.6990887, -0.08123516, 0.03620792, -0.46554706] + end_normalized: [230.0, 300.33, -34.8988, 15.555, -200.0] + first_normalized: [0.5353791, 0.6990887, -0.08123516, 0.03620792, -0.46554706] + + - do: + index: + index: test + id: "2" + body: + name: moose.jpg + vector: [-0.5, 100.0, -13, 14.8, -156.0] + first_normalized: [-0.5, 100.0, -13, 14.8, -156.0] + normalized_vector: [-0.0026832016, 0.53664035, -0.06976324, 0.07942277, -0.8371589] + middle_normalized: [-0.0026832016, 0.53664035, -0.06976324, 0.07942277, -0.8371589] + end_normalized: [-0.0026832016, 0.53664035, -0.06976324, 0.07942277, -0.8371589] + + - do: + index: + index: test + id: "3" + body: + name: rabbit.jpg + vector: [0.5, 111.3, -13.0, 14.8, -156.0] + first_normalized: [0.5, 111.3, -13.0, 14.8, -156.0] + middle_normalized: [0.5, 111.3, -13.0, 14.8, -156.0] + normalized_vector: [0.0025954517, 0.5777475, -0.06748174, 0.076825365, -0.8097809] + end_normalized: [0.0025954517, 0.5777475, -0.06748174, 0.076825365, -0.8097809] + + - do: + indices.refresh: {} + +--- +"kNN search only regular query": + - skip: + version: ' - 8.3.99' + reason: 'kNN added to search endpoint in 8.4' + features: close_to + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 3 + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - close_to: {hits.hits.0._score: {value: 0.999405, error: 0.0001}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 0.9976501, error: 0.0001}} + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: normalized_vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 3 + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - close_to: {hits.hits.0._score: {value: 0.999405, error: 0.0001}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 0.9976501, error: 0.0001}} + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: first_normalized + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 3 + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - close_to: {hits.hits.0._score: {value: 0.999405, error: 0.0001}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 0.9976501, error: 0.0001}} + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: middle_normalized + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 3 + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - close_to: {hits.hits.0._score: {value: 0.999405, error: 0.0001}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 0.9976501, error: 0.0001}} + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: end_normalized + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 3 + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - close_to: {hits.hits.0._score: {value: 0.999405, error: 0.0001}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 0.9976501, error: 0.0001}} + +# With a normalized query vector, all should be the same + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: vector + query_vector: [-0.0027626718, 0.4972809, -0.055253435, 0.081775084, -0.86195356] + k: 3 + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - close_to: {hits.hits.0._score: {value: 0.999405, error: 0.0001}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 0.9976501, error: 0.0001}} + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: normalized_vector + query_vector: [-0.0027626718, 0.4972809, -0.055253435, 0.081775084, -0.86195356] + k: 3 + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - close_to: {hits.hits.0._score: {value: 0.999405, error: 0.0001}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 0.9976501, error: 0.0001}} + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: first_normalized + query_vector: [-0.0027626718, 0.4972809, -0.055253435, 0.081775084, -0.86195356] + k: 3 + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - close_to: {hits.hits.0._score: {value: 0.999405, error: 0.0001}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 0.9976501, error: 0.0001}} + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: middle_normalized + query_vector: [-0.0027626718, 0.4972809, -0.055253435, 0.081775084, -0.86195356] + k: 3 + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - close_to: {hits.hits.0._score: {value: 0.999405, error: 0.0001}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 0.9976501, error: 0.0001}} + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: end_normalized + query_vector: [-0.0027626718, 0.4972809, -0.055253435, 0.081775084, -0.86195356] + k: 3 + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - close_to: {hits.hits.0._score: {value: 0.999405, error: 0.0001}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 0.9976501, error: 0.0001}} diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index b6bebcf6abb12..75ee272e7effe 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -90,6 +90,7 @@ private static IndexVersion def(int id, Version luceneVersion) { public static final IndexVersion SPARSE_VECTOR_IN_FIELD_NAMES_SUPPORT = def(8_500_002, Version.LUCENE_9_7_0); public static final IndexVersion UPGRADE_LUCENE_9_8 = def(8_500_003, Version.LUCENE_9_8_0); public static final IndexVersion ES_VERSION_8_12 = def(8_500_004, Version.LUCENE_9_8_0); + public static final IndexVersion NORMALIZED_VECTOR_COSINE = def(8_500_005, Version.LUCENE_9_8_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java new file mode 100644 index 0000000000000..1bffbb4fd6c3d --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper.vectors; + +import org.apache.lucene.index.FloatVectorValues; +import org.apache.lucene.index.NumericDocValues; + +import java.io.IOException; + +/** + * Provides the denormalized vectors. Float vectors stored with cosine similarity are normalized by default. So when reading the value + * for scripts, we to denormalize them. + */ +public class DenormalizedCosineFloatVectorValues extends FloatVectorValues { + + private final FloatVectorValues in; + private final NumericDocValues magnitudeIn; + private final float[] vector; + private float magnitude = 1f; + private boolean hasMagnitude; + private int docId = -1; + + public DenormalizedCosineFloatVectorValues(FloatVectorValues in, NumericDocValues magnitudeIn) { + this.in = in; + this.magnitudeIn = magnitudeIn; + this.vector = new float[in.dimension()]; + } + + @Override + public int dimension() { + return in.dimension(); + } + + @Override + public int size() { + return in.size(); + } + + @Override + public float[] vectorValue() throws IOException { + // Lazy load vectors as we may iterate but not actually require the vector + return vectorValue(in.docID()); + } + + @Override + public int docID() { + return in.docID(); + } + + @Override + public int nextDoc() throws IOException { + return in.nextDoc(); + } + + @Override + public int advance(int target) throws IOException { + return in.advance(target); + } + + public float magnitude() { + return magnitude; + } + + private float[] vectorValue(int docId) throws IOException { + if (docId != this.docId) { + this.docId = docId; + hasMagnitude = decodedMagnitude(docId); + // We should only copy and transform if we have a stored a non-unit length magnitude + if (hasMagnitude) { + System.arraycopy(in.vectorValue(), 0, vector, 0, dimension()); + for (int i = 0; i < vector.length; i++) { + vector[i] *= magnitude; + } + return vector; + } else { + return in.vectorValue(); + } + } else { + return hasMagnitude ? vector : in.vectorValue(); + } + } + + private boolean decodedMagnitude(int docId) throws IOException { + if (magnitudeIn == null) { + return false; + } + int currentDoc = magnitudeIn.docID(); + if (docId == currentDoc) { + return true; + } else { + if (magnitudeIn.advanceExact(docId)) { + magnitude = Float.intBitsToFloat((int) magnitudeIn.longValue()); + return true; + } else { + magnitude = 1f; + return false; + } + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 6aaea1dd32285..423f5d81ebbd3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -15,12 +15,15 @@ import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; +import org.apache.lucene.document.FloatDocValuesField; import org.apache.lucene.document.KnnByteVectorField; import org.apache.lucene.document.KnnFloatVectorField; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; +import org.apache.lucene.index.FilterLeafReader; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.VectorEncoding; @@ -63,6 +66,7 @@ import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.time.ZoneId; +import java.util.Arrays; import java.util.Locale; import java.util.Map; import java.util.Objects; @@ -76,9 +80,16 @@ * A {@link FieldMapper} for indexing a dense vector of floats. */ public class DenseVectorFieldMapper extends FieldMapper { + public static final String COSINE_MAGNITUDE_FIELD_SUFFIX = "._magnitude"; + private static final float EPS = 1e-4f; + + static boolean isNotUnitVector(float magnitude) { + return Math.abs(magnitude - 1.0f) > EPS; + } public static final IndexVersion MAGNITUDE_STORED_INDEX_VERSION = IndexVersions.V_7_5_0; public static final IndexVersion INDEXED_BY_DEFAULT_INDEX_VERSION = IndexVersions.FIRST_DETACHED_INDEX_VERSION; + public static final IndexVersion NORMALIZE_COSINE = IndexVersions.NORMALIZED_VECTOR_COSINE; public static final IndexVersion LITTLE_ENDIAN_FLOAT_STORED_INDEX_VERSION = IndexVersions.V_8_9_0; public static final String CONTENT_TYPE = "dense_vector"; @@ -242,7 +253,8 @@ IndexFieldData.Builder fielddataBuilder(DenseVectorFieldType denseVectorFieldTyp denseVectorFieldType.indexVersionCreated, this, denseVectorFieldType.dims, - denseVectorFieldType.indexed + denseVectorFieldType.indexed, + r -> r ); } @@ -311,7 +323,7 @@ void checkVectorMagnitude( } @Override - public Field parseKnnVector(DocumentParserContext context, DenseVectorFieldMapper fieldMapper) throws IOException { + public void parseKnnVectorAndIndex(DocumentParserContext context, DenseVectorFieldMapper fieldMapper) throws IOException { int index = 0; byte[] vector = new byte[fieldMapper.fieldType().dims]; float squaredMagnitude = 0; @@ -356,7 +368,12 @@ public Field parseKnnVector(DocumentParserContext context, DenseVectorFieldMappe } fieldMapper.checkDimensionMatches(index, context); checkVectorMagnitude(fieldMapper.fieldType().similarity, errorByteElementsAppender(vector), squaredMagnitude); - return createKnnVectorField(fieldMapper.fieldType().name(), vector, fieldMapper.fieldType().similarity.function); + Field field = createKnnVectorField( + fieldMapper.fieldType().name(), + vector, + fieldMapper.fieldType().similarity.vectorSimilarityFunction(fieldMapper.indexCreatedVersion, this) + ); + context.doc().addWithKey(fieldMapper.fieldType().name(), field); } @Override @@ -438,7 +455,32 @@ IndexFieldData.Builder fielddataBuilder(DenseVectorFieldType denseVectorFieldTyp denseVectorFieldType.indexVersionCreated, this, denseVectorFieldType.dims, - denseVectorFieldType.indexed + denseVectorFieldType.indexed, + denseVectorFieldType.indexVersionCreated.onOrAfter(NORMALIZE_COSINE) + && denseVectorFieldType.indexed + && denseVectorFieldType.similarity.equals(VectorSimilarity.COSINE) ? r -> new FilterLeafReader(r) { + @Override + public CacheHelper getCoreCacheHelper() { + return r.getCoreCacheHelper(); + } + + @Override + public CacheHelper getReaderCacheHelper() { + return r.getReaderCacheHelper(); + } + + @Override + public FloatVectorValues getFloatVectorValues(String fieldName) throws IOException { + FloatVectorValues values = in.getFloatVectorValues(fieldName); + if (values == null) { + return null; + } + return new DenormalizedCosineFloatVectorValues( + values, + in.getNumericDocValues(fieldName + COSINE_MAGNITUDE_FIELD_SUFFIX) + ); + } + } : r -> r ); } @@ -464,7 +506,7 @@ void checkVectorMagnitude( throw new IllegalArgumentException(appender.apply(errorBuilder).toString()); } - if (similarity == VectorSimilarity.DOT_PRODUCT && Math.abs(squaredMagnitude - 1.0f) > 1e-4f) { + if (similarity == VectorSimilarity.DOT_PRODUCT && isNotUnitVector(squaredMagnitude)) { errorBuilder = new StringBuilder( "The [" + VectorSimilarity.DOT_PRODUCT + "] similarity can only be used with unit-length vectors." ); @@ -480,7 +522,7 @@ void checkVectorMagnitude( } @Override - public Field parseKnnVector(DocumentParserContext context, DenseVectorFieldMapper fieldMapper) throws IOException { + public void parseKnnVectorAndIndex(DocumentParserContext context, DenseVectorFieldMapper fieldMapper) throws IOException { int index = 0; float[] vector = new float[fieldMapper.fieldType().dims]; float squaredMagnitude = 0; @@ -495,7 +537,23 @@ public Field parseKnnVector(DocumentParserContext context, DenseVectorFieldMappe fieldMapper.checkDimensionMatches(index, context); checkVectorBounds(vector); checkVectorMagnitude(fieldMapper.fieldType().similarity, errorFloatElementsAppender(vector), squaredMagnitude); - return createKnnVectorField(fieldMapper.fieldType().name(), vector, fieldMapper.fieldType().similarity.function); + if (fieldMapper.indexCreatedVersion.onOrAfter(NORMALIZE_COSINE) + && fieldMapper.fieldType().similarity.equals(VectorSimilarity.COSINE) + && isNotUnitVector(squaredMagnitude)) { + float length = (float) Math.sqrt(squaredMagnitude); + for (int i = 0; i < vector.length; i++) { + vector[i] /= length; + } + final String fieldName = fieldMapper.fieldType().name() + COSINE_MAGNITUDE_FIELD_SUFFIX; + Field magnitudeField = new FloatDocValuesField(fieldName, length); + context.doc().addWithKey(fieldName, magnitudeField); + } + Field field = createKnnVectorField( + fieldMapper.fieldType().name(), + vector, + fieldMapper.fieldType().similarity.vectorSimilarityFunction(fieldMapper.indexCreatedVersion, this) + ); + context.doc().addWithKey(fieldMapper.fieldType().name(), field); } @Override @@ -542,7 +600,7 @@ ByteBuffer createByteBuffer(IndexVersion indexVersion, int numBytes) { abstract IndexFieldData.Builder fielddataBuilder(DenseVectorFieldType denseVectorFieldType, FieldDataContext fieldDataContext); - abstract Field parseKnnVector(DocumentParserContext context, DenseVectorFieldMapper fieldMapper) throws IOException; + abstract void parseKnnVectorAndIndex(DocumentParserContext context, DenseVectorFieldMapper fieldMapper) throws IOException; abstract double parseKnnVectorToByteBuffer(DocumentParserContext context, DenseVectorFieldMapper fieldMapper, ByteBuffer byteBuffer) throws IOException; @@ -646,23 +704,35 @@ static Function errorByteElementsAppender(byte[] v ); enum VectorSimilarity { - L2_NORM(VectorSimilarityFunction.EUCLIDEAN) { + L2_NORM { @Override float score(float similarity, ElementType elementType, int dim) { return switch (elementType) { case BYTE, FLOAT -> 1f / (1f + similarity * similarity); }; } + + @Override + public VectorSimilarityFunction vectorSimilarityFunction(IndexVersion indexVersion, ElementType elementType) { + return VectorSimilarityFunction.EUCLIDEAN; + } }, - COSINE(VectorSimilarityFunction.COSINE) { + COSINE { @Override float score(float similarity, ElementType elementType, int dim) { return switch (elementType) { case BYTE, FLOAT -> (1 + similarity) / 2f; }; } + + @Override + public VectorSimilarityFunction vectorSimilarityFunction(IndexVersion indexVersion, ElementType elementType) { + return indexVersion.onOrAfter(NORMALIZE_COSINE) && ElementType.FLOAT.equals(elementType) + ? VectorSimilarityFunction.DOT_PRODUCT + : VectorSimilarityFunction.COSINE; + } }, - DOT_PRODUCT(VectorSimilarityFunction.DOT_PRODUCT) { + DOT_PRODUCT { @Override float score(float similarity, ElementType elementType, int dim) { return switch (elementType) { @@ -670,21 +740,25 @@ float score(float similarity, ElementType elementType, int dim) { case FLOAT -> (1 + similarity) / 2f; }; } + + @Override + public VectorSimilarityFunction vectorSimilarityFunction(IndexVersion indexVersion, ElementType elementType) { + return VectorSimilarityFunction.DOT_PRODUCT; + } }, - MAX_INNER_PRODUCT(VectorSimilarityFunction.MAXIMUM_INNER_PRODUCT) { + MAX_INNER_PRODUCT { @Override float score(float similarity, ElementType elementType, int dim) { return switch (elementType) { case BYTE, FLOAT -> similarity < 0 ? 1 / (1 + -1 * similarity) : similarity + 1; }; } - }; - - public final VectorSimilarityFunction function; - VectorSimilarity(VectorSimilarityFunction function) { - this.function = function; - } + @Override + public VectorSimilarityFunction vectorSimilarityFunction(IndexVersion indexVersion, ElementType elementType) { + return VectorSimilarityFunction.MAXIMUM_INNER_PRODUCT; + } + }; @Override public final String toString() { @@ -692,6 +766,8 @@ public final String toString() { } abstract float score(float similarity, ElementType elementType, int dim); + + public abstract VectorSimilarityFunction vectorSimilarityFunction(IndexVersion indexVersion, ElementType elementType); } private abstract static class IndexOptions implements ToXContent { @@ -892,11 +968,19 @@ public Query createKnnQuery( } elementType.checkVectorBounds(queryVector); - if (similarity == VectorSimilarity.DOT_PRODUCT - || similarity == VectorSimilarity.COSINE - || similarity == VectorSimilarity.MAX_INNER_PRODUCT) { + if (similarity == VectorSimilarity.DOT_PRODUCT || similarity == VectorSimilarity.COSINE) { float squaredMagnitude = VectorUtil.dotProduct(queryVector, queryVector); elementType.checkVectorMagnitude(similarity, ElementType.errorFloatElementsAppender(queryVector), squaredMagnitude); + if (similarity == VectorSimilarity.COSINE + && ElementType.FLOAT.equals(elementType) + && indexVersionCreated.onOrAfter(NORMALIZE_COSINE) + && isNotUnitVector(squaredMagnitude)) { + float length = (float) Math.sqrt(squaredMagnitude); + queryVector = Arrays.copyOf(queryVector, queryVector.length); + for (int i = 0; i < queryVector.length; i++) { + queryVector[i] /= length; + } + } } Query knnQuery = switch (elementType) { case BYTE -> { @@ -983,19 +1067,22 @@ public void parse(DocumentParserContext context) throws IOException { context.addDynamicMapper(name(), update); return; } - Field field = fieldType().indexed ? parseKnnVector(context) : parseBinaryDocValuesVector(context); - context.doc().addWithKey(fieldType().name(), field); + if (fieldType().indexed) { + parseKnnVectorAndIndex(context); + } else { + parseBinaryDocValuesVectorAndIndex(context); + } } - private Field parseKnnVector(DocumentParserContext context) throws IOException { - return fieldType().elementType.parseKnnVector(context, this); + private void parseKnnVectorAndIndex(DocumentParserContext context) throws IOException { + fieldType().elementType.parseKnnVectorAndIndex(context, this); } - private Field parseBinaryDocValuesVector(DocumentParserContext context) throws IOException { - int dims = fieldType().dims; - ElementType elementType = fieldType().elementType; + private void parseBinaryDocValuesVectorAndIndex(DocumentParserContext context) throws IOException { // encode array of floats as array of integers and store into buf // this code is here and not in the VectorEncoderDecoder so not to create extra arrays + int dims = fieldType().dims; + ElementType elementType = fieldType().elementType; int numBytes = indexCreatedVersion.onOrAfter(MAGNITUDE_STORED_INDEX_VERSION) ? dims * elementType.elementBytes + MAGNITUDE_BYTES : dims * elementType.elementBytes; @@ -1007,7 +1094,8 @@ private Field parseBinaryDocValuesVector(DocumentParserContext context) throws I float vectorMagnitude = (float) Math.sqrt(dotProduct); byteBuffer.putFloat(vectorMagnitude); } - return new BinaryDocValuesField(fieldType().name(), new BytesRef(byteBuffer.array())); + Field field = new BinaryDocValuesField(fieldType().name(), new BytesRef(byteBuffer.array())); + context.doc().addWithKey(fieldType().name(), field); } private void checkDimensionExceeded(int index, DocumentParserContext context) { @@ -1120,7 +1208,7 @@ public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { ); } if (fieldType().indexed) { - return new IndexedSyntheticFieldLoader(); + return new IndexedSyntheticFieldLoader(indexCreatedVersion, fieldType().similarity); } return new DocValuesSyntheticFieldLoader(indexCreatedVersion); } @@ -1129,6 +1217,16 @@ private class IndexedSyntheticFieldLoader implements SourceLoader.SyntheticField private FloatVectorValues values; private ByteVectorValues byteVectorValues; private boolean hasValue; + private boolean hasMagnitude; + + private final IndexVersion indexCreatedVersion; + private final VectorSimilarity vectorSimilarity; + private NumericDocValues magnitudeReader; + + private IndexedSyntheticFieldLoader(IndexVersion indexCreatedVersion, VectorSimilarity vectorSimilarity) { + this.indexCreatedVersion = indexCreatedVersion; + this.vectorSimilarity = vectorSimilarity; + } @Override public Stream> storedFieldLoaders() { @@ -1139,8 +1237,12 @@ public Stream> storedFieldLoaders() { public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf) throws IOException { values = leafReader.getFloatVectorValues(name()); if (values != null) { + if (indexCreatedVersion.onOrAfter(NORMALIZE_COSINE) && VectorSimilarity.COSINE.equals(vectorSimilarity)) { + magnitudeReader = leafReader.getNumericDocValues(name() + COSINE_MAGNITUDE_FIELD_SUFFIX); + } return docId -> { hasValue = docId == values.advance(docId); + hasMagnitude = hasValue && magnitudeReader != null && magnitudeReader.advanceExact(docId); return hasValue; }; } @@ -1164,10 +1266,18 @@ public void write(XContentBuilder b) throws IOException { if (false == hasValue) { return; } + float magnitude = Float.NaN; + if (hasMagnitude) { + magnitude = Float.intBitsToFloat((int) magnitudeReader.longValue()); + } b.startArray(simpleName()); if (values != null) { for (float v : values.vectorValue()) { - b.value(v); + if (hasMagnitude) { + b.value(v * magnitude); + } else { + b.value(v); + } } } else if (byteVectorValues != null) { byte[] vectorValue = byteVectorValues.vectorValue(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorIndexFieldData.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorIndexFieldData.java index 6ebfd1f57088b..3be341c54c7da 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorIndexFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorIndexFieldData.java @@ -8,6 +8,7 @@ package org.elasticsearch.index.mapper.vectors; +import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.SortField; import org.elasticsearch.common.util.BigArrays; @@ -24,6 +25,8 @@ import org.elasticsearch.search.sort.BucketedSort; import org.elasticsearch.search.sort.SortOrder; +import java.util.function.Function; + public class VectorIndexFieldData implements IndexFieldData { protected final String fieldName; @@ -32,6 +35,7 @@ public class VectorIndexFieldData implements IndexFieldData readerWrapper; public VectorIndexFieldData( String fieldName, @@ -39,7 +43,8 @@ public VectorIndexFieldData( IndexVersion indexVersion, ElementType elementType, int dims, - boolean indexed + boolean indexed, + Function readerWrapper ) { this.fieldName = fieldName; this.valuesSourceType = valuesSourceType; @@ -47,6 +52,7 @@ public VectorIndexFieldData( this.elementType = elementType; this.dims = dims; this.indexed = indexed; + this.readerWrapper = readerWrapper; } @Override @@ -82,7 +88,7 @@ public BucketedSort newBucketedSort( @Override public VectorDVLeafFieldData load(LeafReaderContext context) { - return new VectorDVLeafFieldData(context.reader(), fieldName, indexVersion, elementType, dims, indexed); + return new VectorDVLeafFieldData(readerWrapper.apply(context.reader()), fieldName, indexVersion, elementType, dims, indexed); } @Override @@ -97,6 +103,7 @@ public static class Builder implements IndexFieldData.Builder { private final ElementType elementType; private final int dims; private final boolean indexed; + private final Function readerWrapper; public Builder( String name, @@ -104,7 +111,8 @@ public Builder( IndexVersion indexVersion, ElementType elementType, int dims, - boolean indexed + boolean indexed, + Function readerWrapper ) { this.name = name; this.valuesSourceType = valuesSourceType; @@ -112,11 +120,12 @@ public Builder( this.elementType = elementType; this.dims = dims; this.indexed = indexed; + this.readerWrapper = readerWrapper; } @Override public IndexFieldData build(IndexFieldDataCache cache, CircuitBreakerService breakerService) { - return new VectorIndexFieldData(name, valuesSourceType, indexVersion, elementType, dims, indexed); + return new VectorIndexFieldData(name, valuesSourceType, indexVersion, elementType, dims, indexed, readerWrapper); } } } diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVector.java index 9edcd97df4747..1605f179e36aa 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVector.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVector.java @@ -16,9 +16,16 @@ public class KnnDenseVector implements DenseVector { protected final float[] docVector; + private float magnitude; public KnnDenseVector(float[] docVector) { this.docVector = docVector; + this.magnitude = Float.NaN; + } + + public KnnDenseVector(float[] docVector, float magnitude) { + this.docVector = docVector; + this.magnitude = magnitude; } @Override @@ -30,7 +37,10 @@ public float[] getVector() { @Override public float getMagnitude() { - return DenseVector.getMagnitude(docVector); + if (Float.isNaN(magnitude)) { + magnitude = DenseVector.getMagnitude(docVector); + } + return magnitude; } @Override diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java index b10d83cbdb52b..f5d637dc063b4 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java @@ -10,6 +10,7 @@ import org.apache.lucene.index.FloatVectorValues; import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.mapper.vectors.DenormalizedCosineFloatVectorValues; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; import org.elasticsearch.index.mapper.vectors.DenseVectorScriptDocValues; @@ -63,6 +64,9 @@ public DenseVector get() { return DenseVector.EMPTY; } + if (input instanceof DenormalizedCosineFloatVectorValues normalized) { + return new KnnDenseVector(vector, normalized.magnitude()); + } return new KnnDenseVector(vector); } @@ -72,6 +76,9 @@ public DenseVector get(DenseVector defaultValue) { return defaultValue; } + if (input instanceof DenormalizedCosineFloatVectorValues normalized) { + return new KnnDenseVector(vector, normalized.magnitude()); + } return new KnnDenseVector(vector); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java new file mode 100644 index 0000000000000..c158dcccd41d0 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java @@ -0,0 +1,114 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper.vectors; + +import org.apache.lucene.index.NumericDocValues; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Arrays; + +import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; +import static org.elasticsearch.index.mapper.vectors.KnnDenseVectorScriptDocValuesTests.wrap; + +public class DenormalizedCosineFloatVectorValuesTests extends ESTestCase { + + public void testEmptyVectors() throws IOException { + DenormalizedCosineFloatVectorValues normalizedCosineFloatVectorValues = new DenormalizedCosineFloatVectorValues( + wrap(new float[0][0]), + wrapMagnitudes(new float[0]) + ); + assertEquals(NO_MORE_DOCS, normalizedCosineFloatVectorValues.nextDoc()); + } + + public void testRandomVectors() throws IOException { + int dims = randomIntBetween(64, 2048); + int numVectors = randomIntBetween(1, 24); + float[][] vectors = new float[numVectors][]; + float[][] normalizedVectors = new float[numVectors][]; + float[] magnitudes = new float[numVectors]; + for (int i = 0; i < numVectors; i++) { + float[] vector = new float[dims]; + float mag = randomVector(vector); + magnitudes[i] = mag; + vectors[i] = vector; + normalizedVectors[i] = copyAndNormalize(vector, mag); + } + + DenormalizedCosineFloatVectorValues normalizedCosineFloatVectorValues = new DenormalizedCosineFloatVectorValues( + wrap(normalizedVectors), + wrapMagnitudes(magnitudes) + ); + + for (int i = 0; i < numVectors; i++) { + assertEquals(i, normalizedCosineFloatVectorValues.advance(i)); + assertArrayEquals(vectors[i], normalizedCosineFloatVectorValues.vectorValue(), (float) 1e-6); + assertEquals(magnitudes[i], normalizedCosineFloatVectorValues.magnitude(), (float) 1e-6); + } + + } + + public static float[] copyAndNormalize(float[] in, float mag) { + float[] copy = Arrays.copyOf(in, in.length); + for (int i = 0; i < copy.length; i++) { + copy[i] = copy[i] / mag; + } + return copy; + } + + private static float randomVector(float[] in) { + float magnitude = 0f; + for (int i = 0; i < in.length; i++) { + float v = randomFloat() * randomIntBetween(1, 5); + in[i] = v; + magnitude += v * v; + } + return (float) Math.sqrt(magnitude); + } + + public static NumericDocValues wrapMagnitudes(float[] magnitudes) { + return new NumericDocValues() { + int index = -1; + + @Override + public long longValue() throws IOException { + return Float.floatToRawIntBits(magnitudes[index]); + } + + @Override + public boolean advanceExact(int target) throws IOException { + return advance(target) != NO_MORE_DOCS; + } + + @Override + public int docID() { + return index; + } + + @Override + public int nextDoc() { + return advance(index + 1); + } + + @Override + public int advance(int target) { + if (target >= magnitudes.length) { + return NO_MORE_DOCS; + } + return index = target; + } + + @Override + public long cost() { + return magnitudes.length; + } + }; + } + +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index b10d756a6e458..0b3b4fae82324 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -17,9 +17,11 @@ import org.apache.lucene.document.KnnFloatVectorField; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.VectorEncoding; +import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.search.FieldExistsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.VectorUtil; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.XContentHelper; @@ -44,6 +46,7 @@ import org.elasticsearch.search.lookup.Source; import org.elasticsearch.search.lookup.SourceProvider; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; import org.junit.AssumptionViolatedException; @@ -280,7 +283,10 @@ private void testIndexedVector(VectorSimilarity similarity, DocumentMapper mappe KnnFloatVectorField vectorField = (KnnFloatVectorField) fields.get(0); assertArrayEquals("Parsed vector is not equal to original.", vector, vectorField.vectorValue(), 0.001f); - assertEquals(similarity.function, vectorField.fieldType().vectorSimilarityFunction()); + assertEquals( + similarity.vectorSimilarityFunction(IndexVersion.current(), ElementType.FLOAT), + vectorField.fieldType().vectorSimilarityFunction() + ); } public void testNonIndexedVector() throws Exception { @@ -333,7 +339,10 @@ public void testIndexedByteVector() throws Exception { new byte[] { (byte) -1, (byte) 1, (byte) 127 }, vectorField.vectorValue() ); - assertEquals(similarity.function, vectorField.fieldType().vectorSimilarityFunction()); + assertEquals( + similarity.vectorSimilarityFunction(IndexVersion.current(), ElementType.BYTE), + vectorField.fieldType().vectorSimilarityFunction() + ); } public void testDotProductWithInvalidNorm() throws Exception { @@ -571,7 +580,7 @@ public void testDefaultParamsBeforeIndexByDefault() throws Exception { assertNull(denseVectorFieldType.getSimilarity()); } - public void testtParamsBeforeIndexByDefault() throws Exception { + public void testParamsBeforeIndexByDefault() throws Exception { DocumentMapper documentMapper = createDocumentMapper(INDEXED_BY_DEFAULT_PREVIOUS_INDEX_VERSION, fieldMapping(b -> { b.field("type", "dense_vector").field("dims", 3).field("index", true).field("similarity", "dot_product"); })); @@ -651,6 +660,48 @@ public void testDocumentsWithIncorrectDims() throws Exception { } } + public void testCosineDenseVectorValues() throws IOException { + final int dims = randomIntBetween(64, 2048); + VectorSimilarity similarity = VectorSimilarity.COSINE; + DocumentMapper mapper = createDocumentMapper( + fieldMapping(b -> b.field("type", "dense_vector").field("dims", dims).field("index", true).field("similarity", similarity)) + ); + float[] vector = new float[dims]; + for (int i = 0; i < dims; i++) { + vector[i] = randomFloat() * randomIntBetween(1, 10); + } + ParsedDocument doc1 = mapper.parse(source(b -> b.array("field", vector))); + List fields = doc1.rootDoc().getFields("field"); + + assertEquals(1, fields.size()); + assertThat(fields.get(0), instanceOf(KnnFloatVectorField.class)); + KnnFloatVectorField vectorField = (KnnFloatVectorField) fields.get(0); + // Cosine vectors are now normalized + VectorUtil.l2normalize(vector); + assertArrayEquals("Parsed vector is not equal to normalized original.", vector, vectorField.vectorValue(), 0.001f); + } + + public void testCosineDenseVectorValuesOlderIndexVersions() throws IOException { + final int dims = randomIntBetween(64, 2048); + VectorSimilarity similarity = VectorSimilarity.COSINE; + DocumentMapper mapper = createDocumentMapper( + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersions.NEW_SPARSE_VECTOR), + fieldMapping(b -> b.field("type", "dense_vector").field("dims", dims).field("index", true).field("similarity", similarity)) + ); + float[] vector = new float[dims]; + for (int i = 0; i < dims; i++) { + vector[i] = randomFloat() * randomIntBetween(1, 10); + } + ParsedDocument doc1 = mapper.parse(source(b -> b.array("field", vector))); + List fields = doc1.rootDoc().getFields("field"); + + assertEquals(1, fields.size()); + assertThat(fields.get(0), instanceOf(KnnFloatVectorField.class)); + KnnFloatVectorField vectorField = (KnnFloatVectorField) fields.get(0); + // Cosine vectors are now normalized + assertArrayEquals("Parsed vector is not equal to original.", vector, vectorField.vectorValue(), 0.001f); + } + /** * Test that max dimensions limit for float dense_vector field * is 4096 as defined by {@link DenseVectorFieldMapper#MAX_DIMS_COUNT} @@ -674,7 +725,9 @@ public void testMaxDimsFloatVector() throws IOException { KnnFloatVectorField vectorField = (KnnFloatVectorField) fields.get(0); assertEquals(dims, vectorField.fieldType().vectorDimension()); assertEquals(VectorEncoding.FLOAT32, vectorField.fieldType().vectorEncoding()); - assertEquals(similarity.function, vectorField.fieldType().vectorSimilarityFunction()); + assertEquals(VectorSimilarityFunction.DOT_PRODUCT, vectorField.fieldType().vectorSimilarityFunction()); + // Cosine vectors are now normalized + VectorUtil.l2normalize(vector); assertArrayEquals("Parsed vector is not equal to original.", vector, vectorField.vectorValue(), 0.001f); } @@ -708,10 +761,50 @@ public void testMaxDimsByteVector() throws IOException { KnnByteVectorField vectorField = (KnnByteVectorField) fields.get(0); assertEquals(dims, vectorField.fieldType().vectorDimension()); assertEquals(VectorEncoding.BYTE, vectorField.fieldType().vectorEncoding()); - assertEquals(similarity.function, vectorField.fieldType().vectorSimilarityFunction()); + assertEquals( + similarity.vectorSimilarityFunction(IndexVersion.current(), ElementType.BYTE), + vectorField.fieldType().vectorSimilarityFunction() + ); assertArrayEquals("Parsed vector is not equal to original.", vector, vectorField.vectorValue()); } + public void testVectorSimilarity() { + assertEquals( + VectorSimilarityFunction.COSINE, + VectorSimilarity.COSINE.vectorSimilarityFunction(IndexVersion.current(), ElementType.BYTE) + ); + assertEquals( + VectorSimilarityFunction.COSINE, + VectorSimilarity.COSINE.vectorSimilarityFunction( + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, DenseVectorFieldMapper.NORMALIZE_COSINE), + ElementType.FLOAT + ) + ); + assertEquals( + VectorSimilarityFunction.DOT_PRODUCT, + VectorSimilarity.COSINE.vectorSimilarityFunction( + IndexVersionUtils.randomVersionBetween(random(), DenseVectorFieldMapper.NORMALIZE_COSINE, IndexVersion.current()), + ElementType.FLOAT + ) + ); + assertEquals( + VectorSimilarityFunction.EUCLIDEAN, + VectorSimilarity.L2_NORM.vectorSimilarityFunction(IndexVersionUtils.randomVersion(random()), ElementType.BYTE) + ); + assertEquals( + VectorSimilarityFunction.EUCLIDEAN, + VectorSimilarity.L2_NORM.vectorSimilarityFunction(IndexVersionUtils.randomVersion(random()), ElementType.FLOAT) + ); + assertEquals( + VectorSimilarityFunction.DOT_PRODUCT, + VectorSimilarity.DOT_PRODUCT.vectorSimilarityFunction(IndexVersionUtils.randomVersion(random()), ElementType.BYTE) + ); + assertEquals( + VectorSimilarityFunction.DOT_PRODUCT, + VectorSimilarity.DOT_PRODUCT.vectorSimilarityFunction(IndexVersionUtils.randomVersion(random()), ElementType.FLOAT) + ); + } + @Override protected void assertFetchMany(MapperService mapperService, String field, Object value, String format, int count) throws IOException { assumeFalse("Dense vectors currently don't support multiple values in the same field", false); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java index ce71236b3524f..81fdf7d7bec24 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java @@ -234,12 +234,13 @@ public int advance(int target) { } public static FloatVectorValues wrap(float[][] vectors) { + int dim = vectors.length > 0 ? vectors[0].length : 0; return new FloatVectorValues() { - int index = 0; + int index = -1; @Override public int dimension() { - return 0; + return dim; } @Override @@ -259,7 +260,7 @@ public int docID() { @Override public int nextDoc() { - throw new UnsupportedOperationException(); + return advance(index + 1); } @Override From c4e369dafe96c1cab5122a466005b3f413edd9b2 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Fri, 1 Dec 2023 14:35:49 -0500 Subject: [PATCH 103/181] [ML] Implementing Hugging Face text embedding service (#102730) * Pulling in main * Refactoring and adding hugging face text embedding * Adding a few missing items and reminders * Adding error handling for hugging face * Fixing some issues with hugging face * Fixing tests * Fixing spotless * Adding support for array of arrays response format * More comments * Addressing feedback * Adding more tests and fixing errors * Adding null check and tests --------- Co-authored-by: Elastic Machine --- .../org/elasticsearch/TransportVersions.java | 1 + .../inference/src/main/java/module-info.java | 2 + .../InferenceNamedWriteablesProvider.java | 7 +- .../xpack/inference/InferencePlugin.java | 2 + .../xpack/inference/UnparsedModel.java | 35 -- .../action/huggingface/HuggingFaceAction.java | 78 +++ .../huggingface/HuggingFaceActionCreator.java | 49 ++ .../huggingface/HuggingFaceActionVisitor.java | 18 + .../huggingface/HuggingFaceElserAction.java | 53 -- .../http/retry/BaseResponseHandler.java | 90 +++ .../external/http/retry/ErrorMessage.java | 12 + .../external/http/retry/RetryException.java | 6 +- .../http/retry/RetryingHttpSender.java | 16 +- .../huggingface/HuggingFaceClient.java | 48 -- .../HuggingFaceResponseHandler.java | 63 ++ .../external/openai/OpenAiClient.java | 3 +- .../openai/OpenAiResponseHandler.java | 64 ++ ....java => HuggingFaceInferenceRequest.java} | 6 +- ...=> HuggingFaceInferenceRequestEntity.java} | 4 +- .../external/response/XContentUtils.java | 46 ++ .../HuggingFaceElserResponseEntity.java | 6 +- .../HuggingFaceEmbeddingsResponseEntity.java | 161 +++++ .../HuggingFaceErrorResponseEntity.java | 52 ++ .../OpenAiEmbeddingsResponseEntity.java | 30 +- .../openai/OpenAiErrorResponseEntity.java | 6 +- .../inference/registry/ModelRegistry.java | 8 +- .../inference/services/SenderService.java | 77 +++ ...MapParsingUtils.java => ServiceUtils.java} | 32 +- .../services/elser/ElserMlNodeService.java | 4 +- .../elser/ElserMlNodeServiceSettings.java | 14 +- .../huggingface/HuggingFaceBaseService.java | 106 ++++ .../huggingface/HuggingFaceModel.java | 29 + .../huggingface/HuggingFaceService.java | 55 ++ .../HuggingFaceServiceSettings.java | 89 +++ .../elser/HuggingFaceElserModel.java | 44 +- .../elser/HuggingFaceElserSecretSettings.java | 9 +- .../elser/HuggingFaceElserService.java | 115 +--- .../HuggingFaceElserServiceSettings.java | 22 +- .../HuggingFaceEmbeddingsModel.java | 70 +++ .../openai/OpenAiResponseHandler.java | 127 ---- .../services/openai/OpenAiService.java | 93 +-- .../openai/OpenAiServiceSettings.java | 6 +- .../embeddings/OpenAiEmbeddingsModel.java | 5 +- .../OpenAiEmbeddingsRequestTaskSettings.java | 2 +- .../OpenAiEmbeddingsTaskSettings.java | 4 +- .../settings/DefaultSecretSettings.java | 9 +- .../HuggingFaceActionCreatorTests.java} | 151 ++++- .../huggingface/HuggingFaceActionTests.java | 114 ++++ .../HuggingFaceElserActionTests.java | 189 ------ .../http/retry/BaseResponseHandlerTests.java | 32 + .../HuggingFaceResponseHandlerTests.java | 93 +++ .../openai/OpenAiResponseHandlerTests.java | 84 +++ .../HuggingFaceElserRequestEntityTests.java | 2 +- .../HuggingFaceElserRequestTests.java | 6 +- ...gingFaceEmbeddingsResponseEntityTests.java | 339 ++++++++++ .../HuggingFaceErrorResponseEntityTests.java | 62 ++ .../OpenAiEmbeddingsResponseEntityTests.java | 33 - .../OpenAiErrorResponseEntityTests.java | 2 +- .../xpack/inference/model/TestModel.java | 14 +- .../services/SenderServiceTests.java | 143 +++++ ...UtilsTests.java => ServiceUtilsTests.java} | 32 +- .../xpack/inference/services/Utils.java | 27 + .../HuggingFaceBaseServiceTests.java | 111 ++++ .../HuggingFaceServiceSettingsTests.java | 107 ++++ .../huggingface/HuggingFaceServiceTests.java | 589 ++++++++++++++++++ .../elser/HuggingFaceElserModelTests.java | 32 + .../HuggingFaceElserSecretSettingsTests.java | 4 + .../HuggingFaceEmbeddingsModelTests.java | 34 + .../openai/OpenAiResponseHandlerTests.java | 63 -- .../openai/OpenAiServiceSettingsTests.java | 1 - .../services/openai/OpenAiServiceTests.java | 305 +++++---- .../settings/DefaultSecretSettingsTests.java | 4 + 72 files changed, 3369 insertions(+), 982 deletions(-) delete mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnparsedModel.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreator.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionVisitor.java delete mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ErrorMessage.java delete mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClient.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandler.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java rename x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/{HuggingFaceElserRequest.java => HuggingFaceInferenceRequest.java} (86%) rename x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/{HuggingFaceElserRequestEntity.java => HuggingFaceInferenceRequestEntity.java} (85%) create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceErrorResponseEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java rename x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/{MapParsingUtils.java => ServiceUtils.java} (79%) create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceModel.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java delete mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiResponseHandler.java rename x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/{huggingface/HuggingFaceClientTests.java => action/huggingface/HuggingFaceActionCreatorTests.java} (50%) create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionTests.java delete mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserActionTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandlerTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandlerTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandlerTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntityTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceErrorResponseEntityTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java rename x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/{MapParsingUtilsTests.java => ServiceUtilsTests.java} (88%) create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/Utils.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModelTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModelTests.java delete mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiResponseHandlerTests.java diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index b6e204f3839f7..c392d3b6b4e29 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -183,6 +183,7 @@ static TransportVersion def(int id) { public static final TransportVersion INFERENCE_SERVICE_RESULTS_ADDED = def(8_550_00_0); public static final TransportVersion ESQL_PROFILE = def(8_551_00_0); public static final TransportVersion CLUSTER_STATS_RESCORER_USAGE_ADDED = def(8_552_00_0); + public static final TransportVersion ML_INFERENCE_HF_SERVICE_ADDED = def(8_553_00_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/x-pack/plugin/inference/src/main/java/module-info.java b/x-pack/plugin/inference/src/main/java/module-info.java index 87f623bdfe5cc..3879a0a344e06 100644 --- a/x-pack/plugin/inference/src/main/java/module-info.java +++ b/x-pack/plugin/inference/src/main/java/module-info.java @@ -22,6 +22,8 @@ exports org.elasticsearch.xpack.inference.registry; exports org.elasticsearch.xpack.inference.rest; exports org.elasticsearch.xpack.inference.services; + exports org.elasticsearch.xpack.inference.external.http.sender; + exports org.elasticsearch.xpack.inference.external.http; exports org.elasticsearch.xpack.inference.services.elser; exports org.elasticsearch.xpack.inference.services.huggingface.elser; exports org.elasticsearch.xpack.inference.services.openai; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java index 092b1200fb80a..c632c568fea16 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeServiceSettings; import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeTaskSettings; +import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettings; import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserSecretSettings; import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserServiceSettings; import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceSettings; @@ -32,6 +33,7 @@ public class InferenceNamedWriteablesProvider { private InferenceNamedWriteablesProvider() {} + @SuppressWarnings("deprecation") public static List getNamedWriteables() { List namedWriteables = new ArrayList<>(); @@ -62,7 +64,7 @@ public static List getNamedWriteables() { new NamedWriteableRegistry.Entry(TaskSettings.class, ElserMlNodeTaskSettings.NAME, ElserMlNodeTaskSettings::new) ); - // Hugging Face ELSER config + // Hugging Face config namedWriteables.add( new NamedWriteableRegistry.Entry( ServiceSettings.class, @@ -70,6 +72,9 @@ public static List getNamedWriteables() { HuggingFaceElserServiceSettings::new ) ); + namedWriteables.add( + new NamedWriteableRegistry.Entry(ServiceSettings.class, HuggingFaceServiceSettings.NAME, HuggingFaceServiceSettings::new) + ); namedWriteables.add( new NamedWriteableRegistry.Entry(SecretSettings.class, HuggingFaceElserSecretSettings.NAME, HuggingFaceElserSecretSettings::new) ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 7e7f2c9e05680..3adc63c9863cb 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -52,6 +52,7 @@ import org.elasticsearch.xpack.inference.rest.RestPutInferenceModelAction; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeService; +import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceService; import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserService; import org.elasticsearch.xpack.inference.services.openai.OpenAiService; @@ -147,6 +148,7 @@ public List getInferenceServiceFactories() { return List.of( ElserMlNodeService::new, context -> new HuggingFaceElserService(httpFactory, serviceComponents), + context -> new HuggingFaceService(httpFactory, serviceComponents), context -> new OpenAiService(httpFactory, serviceComponents) ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnparsedModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnparsedModel.java deleted file mode 100644 index 03e0f4d8a4543..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnparsedModel.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference; - -import org.elasticsearch.ElasticsearchStatusException; -import org.elasticsearch.inference.ModelConfigurations; -import org.elasticsearch.inference.TaskType; -import org.elasticsearch.rest.RestStatus; - -import java.util.Map; - -public record UnparsedModel(String modelId, TaskType taskType, String service, Map settings, Map secrets) { - - public static UnparsedModel unparsedModelFromMap(Map configMap, Map secretsMap) { - String modelId = removeStringOrThrowIfNull(configMap, ModelConfigurations.MODEL_ID); - String service = removeStringOrThrowIfNull(configMap, ModelConfigurations.SERVICE); - String taskTypeStr = removeStringOrThrowIfNull(configMap, TaskType.NAME); - TaskType taskType = TaskType.fromString(taskTypeStr); - - return new UnparsedModel(modelId, taskType, service, configMap, secretsMap); - } - - private static String removeStringOrThrowIfNull(Map sourceMap, String fieldName) { - String value = (String) sourceMap.remove(fieldName); - if (value == null) { - throw new ElasticsearchStatusException("Missing required field [{}]", RestStatus.BAD_REQUEST, fieldName); - } - return value; - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java new file mode 100644 index 0000000000000..2cf9168f60986 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.huggingface; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; +import org.elasticsearch.xpack.inference.external.http.retry.RetrySettings; +import org.elasticsearch.xpack.inference.external.http.retry.RetryingHttpSender; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.external.huggingface.HuggingFaceAccount; +import org.elasticsearch.xpack.inference.external.request.huggingface.HuggingFaceInferenceRequest; +import org.elasticsearch.xpack.inference.external.request.huggingface.HuggingFaceInferenceRequestEntity; +import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceModel; + +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.createInternalServerError; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.wrapFailuresInElasticsearchException; + +public class HuggingFaceAction implements ExecutableAction { + private static final Logger logger = LogManager.getLogger(HuggingFaceAction.class); + + private final HuggingFaceAccount account; + private final String errorMessage; + private final RetryingHttpSender sender; + private final ResponseHandler responseHandler; + + public HuggingFaceAction( + Sender sender, + HuggingFaceModel model, + ServiceComponents serviceComponents, + ResponseHandler responseHandler, + String requestType + ) { + Objects.requireNonNull(serviceComponents); + Objects.requireNonNull(model); + Objects.requireNonNull(requestType); + + this.responseHandler = Objects.requireNonNull(responseHandler); + + this.sender = new RetryingHttpSender( + Objects.requireNonNull(sender), + serviceComponents.throttlerManager(), + logger, + new RetrySettings(serviceComponents.settings()), + serviceComponents.threadPool() + ); + this.account = new HuggingFaceAccount(model.getUri(), model.getApiKey()); + this.errorMessage = format("Failed to send Hugging Face %s request to [%s]", requestType, model.getUri().toString()); + } + + @Override + public void execute(List input, ActionListener listener) { + try { + HuggingFaceInferenceRequest request = new HuggingFaceInferenceRequest(account, new HuggingFaceInferenceRequestEntity(input)); + ActionListener wrappedListener = wrapFailuresInElasticsearchException(errorMessage, listener); + + sender.send(request.createRequest(), responseHandler, wrappedListener); + } catch (ElasticsearchException e) { + listener.onFailure(e); + } catch (Exception e) { + listener.onFailure(createInternalServerError(e, errorMessage)); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreator.java new file mode 100644 index 0000000000000..ba46519814b04 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreator.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.huggingface; + +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.external.huggingface.HuggingFaceResponseHandler; +import org.elasticsearch.xpack.inference.external.response.huggingface.HuggingFaceElserResponseEntity; +import org.elasticsearch.xpack.inference.external.response.huggingface.HuggingFaceEmbeddingsResponseEntity; +import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModel; +import org.elasticsearch.xpack.inference.services.huggingface.embeddings.HuggingFaceEmbeddingsModel; + +import java.util.Objects; + +/** + * Provides a way to construct an {@link ExecutableAction} using the visitor pattern based on the hugging face model type. + */ +public class HuggingFaceActionCreator implements HuggingFaceActionVisitor { + private final Sender sender; + private final ServiceComponents serviceComponents; + + public HuggingFaceActionCreator(Sender sender, ServiceComponents serviceComponents) { + this.sender = Objects.requireNonNull(sender); + this.serviceComponents = Objects.requireNonNull(serviceComponents); + } + + @Override + public ExecutableAction create(HuggingFaceEmbeddingsModel model) { + var responseHandler = new HuggingFaceResponseHandler( + "hugging face text embeddings", + HuggingFaceEmbeddingsResponseEntity::fromResponse + ); + + return new HuggingFaceAction(sender, model, serviceComponents, responseHandler, "text embeddings"); + } + + @Override + public ExecutableAction create(HuggingFaceElserModel model) { + var responseHandler = new HuggingFaceResponseHandler("hugging face elser", HuggingFaceElserResponseEntity::fromResponse); + + return new HuggingFaceAction(sender, model, serviceComponents, responseHandler, "ELSER"); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionVisitor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionVisitor.java new file mode 100644 index 0000000000000..070be8db50ff0 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionVisitor.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.huggingface; + +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModel; +import org.elasticsearch.xpack.inference.services.huggingface.embeddings.HuggingFaceEmbeddingsModel; + +public interface HuggingFaceActionVisitor { + ExecutableAction create(HuggingFaceEmbeddingsModel mode); + + ExecutableAction create(HuggingFaceElserModel mode); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserAction.java deleted file mode 100644 index fb648e2aabcfd..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserAction.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.action.huggingface; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.inference.InferenceServiceResults; -import org.elasticsearch.xpack.inference.external.action.ExecutableAction; -import org.elasticsearch.xpack.inference.external.http.sender.Sender; -import org.elasticsearch.xpack.inference.external.huggingface.HuggingFaceAccount; -import org.elasticsearch.xpack.inference.external.huggingface.HuggingFaceClient; -import org.elasticsearch.xpack.inference.external.request.huggingface.HuggingFaceElserRequest; -import org.elasticsearch.xpack.inference.external.request.huggingface.HuggingFaceElserRequestEntity; -import org.elasticsearch.xpack.inference.services.ServiceComponents; -import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModel; - -import java.util.List; - -import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.xpack.inference.external.action.ActionUtils.createInternalServerError; -import static org.elasticsearch.xpack.inference.external.action.ActionUtils.wrapFailuresInElasticsearchException; - -public class HuggingFaceElserAction implements ExecutableAction { - - private final HuggingFaceAccount account; - private final HuggingFaceClient client; - private final String errorMessage; - - public HuggingFaceElserAction(Sender sender, HuggingFaceElserModel model, ServiceComponents serviceComponents) { - this.client = new HuggingFaceClient(sender, serviceComponents); - this.account = new HuggingFaceAccount(model.getServiceSettings().uri(), model.getSecretSettings().apiKey()); - this.errorMessage = format("Failed to send ELSER Hugging Face request to [%s]", model.getServiceSettings().uri().toString()); - } - - @Override - public void execute(List input, ActionListener listener) { - try { - HuggingFaceElserRequest request = new HuggingFaceElserRequest(account, new HuggingFaceElserRequestEntity(input)); - ActionListener wrappedListener = wrapFailuresInElasticsearchException(errorMessage, listener); - - client.send(request, wrappedListener); - } catch (ElasticsearchException e) { - listener.onFailure(e); - } catch (Exception e) { - listener.onFailure(createInternalServerError(e, errorMessage)); - } - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java new file mode 100644 index 0000000000000..31d987118c28d --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.retry; + +import org.apache.http.client.methods.HttpRequestBase; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.inference.external.http.HttpResult; + +import java.io.IOException; +import java.util.Objects; +import java.util.function.Function; + +import static org.elasticsearch.core.Strings.format; + +public abstract class BaseResponseHandler implements ResponseHandler { + + public static final String SERVER_ERROR = "Received a server error status code"; + public static final String RATE_LIMIT = "Received a rate limit status code"; + public static final String AUTHENTICATION = "Received an authentication error status code"; + public static final String REDIRECTION = "Unhandled redirection"; + public static final String UNSUCCESSFUL = "Received an unsuccessful status code"; + + protected final String requestType; + private final CheckedFunction parseFunction; + private final Function errorParseFunction; + + public BaseResponseHandler( + String requestType, + CheckedFunction parseFunction, + Function errorParseFunction + ) { + this.requestType = Objects.requireNonNull(requestType); + this.parseFunction = Objects.requireNonNull(parseFunction); + this.errorParseFunction = Objects.requireNonNull(errorParseFunction); + } + + @Override + public InferenceServiceResults parseResult(HttpResult result) throws RetryException { + try { + return parseFunction.apply(result); + } catch (Exception e) { + throw new RetryException(true, e); + } + } + + @Override + public String getRequestType() { + return requestType; + } + + protected Exception buildError(String message, HttpRequestBase request, HttpResult result) { + var errorEntityMsg = errorParseFunction.apply(result); + var responseStatusCode = result.response().getStatusLine().getStatusCode(); + + if (errorEntityMsg == null) { + return new ElasticsearchStatusException( + format("%s for request [%s] status [%s]", message, request.getRequestLine(), responseStatusCode), + toRestStatus(responseStatusCode) + ); + } + + return new ElasticsearchStatusException( + format( + "%s for request [%s] status [%s]. Error message: [%s]", + message, + request.getRequestLine(), + responseStatusCode, + errorEntityMsg.getErrorMessage() + ), + toRestStatus(responseStatusCode) + ); + } + + static RestStatus toRestStatus(int statusCode) { + RestStatus code = null; + if (statusCode < 500) { + code = RestStatus.fromCode(statusCode); + } + + return code == null ? RestStatus.BAD_REQUEST : code; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ErrorMessage.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ErrorMessage.java new file mode 100644 index 0000000000000..a4be7f15827fb --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ErrorMessage.java @@ -0,0 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.retry; + +public interface ErrorMessage { + String getErrorMessage(); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryException.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryException.java index 3fe8225927f06..b4598717e7fc8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryException.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryException.java @@ -18,7 +18,11 @@ public RetryException(boolean shouldRetry, Throwable cause) { this.shouldRetry = shouldRetry; } - public RetryException(boolean shouldRetry, String msg) { + /** + * This should really only be used for testing. Ideally a retry exception would be associated with + * an actual exception that can be provided back to the client in the event that retrying fails. + */ + RetryException(boolean shouldRetry, String msg) { super(msg); this.shouldRetry = shouldRetry; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSender.java index e91349cfbc2b7..70f2a9e0dde16 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSender.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSender.java @@ -9,16 +9,19 @@ import org.apache.http.client.methods.HttpRequestBase; import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.RetryableAction; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import java.io.IOException; +import java.net.UnknownHostException; import java.util.Objects; import java.util.concurrent.Executor; @@ -109,13 +112,22 @@ public boolean shouldRetry(Exception e) { /** * If the connection gets closed by the server or because of the connections time to live is exceeded we'll likely get a - * {@link org.apache.http.ConnectionClosedException} exception which is a child of IOException. For now, - * we'll consider all IOExceptions retryable because something failed while we were trying to send the request + * {@link org.apache.http.ConnectionClosedException} exception which is a child of IOException. + * * @param e the Exception received while sending the request * @return a {@link RetryException} if this exception can be retried */ private Exception transformIfRetryable(Exception e) { var exceptionToReturn = e; + + if (e instanceof UnknownHostException) { + return new ElasticsearchStatusException( + format("Invalid host [%s], please check that the URL is correct.", request.getURI()), + RestStatus.BAD_REQUEST, + e + ); + } + if (e instanceof IOException) { exceptionToReturn = new RetryException(true, e); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClient.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClient.java deleted file mode 100644 index f24a5529a4663..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClient.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.huggingface; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.inference.InferenceServiceResults; -import org.elasticsearch.xpack.inference.external.http.retry.AlwaysRetryingResponseHandler; -import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; -import org.elasticsearch.xpack.inference.external.http.retry.RetrySettings; -import org.elasticsearch.xpack.inference.external.http.retry.RetryingHttpSender; -import org.elasticsearch.xpack.inference.external.http.sender.Sender; -import org.elasticsearch.xpack.inference.external.request.huggingface.HuggingFaceElserRequest; -import org.elasticsearch.xpack.inference.external.response.huggingface.HuggingFaceElserResponseEntity; -import org.elasticsearch.xpack.inference.services.ServiceComponents; - -import java.io.IOException; - -public class HuggingFaceClient { - private static final Logger logger = LogManager.getLogger(HuggingFaceClient.class); - private static final ResponseHandler ELSER_RESPONSE_HANDLER = createElserHandler(); - - private final RetryingHttpSender sender; - - public HuggingFaceClient(Sender sender, ServiceComponents serviceComponents) { - this.sender = new RetryingHttpSender( - sender, - serviceComponents.throttlerManager(), - logger, - new RetrySettings(serviceComponents.settings()), - serviceComponents.threadPool() - ); - } - - public void send(HuggingFaceElserRequest request, ActionListener listener) throws IOException { - this.sender.send(request.createRequest(), ELSER_RESPONSE_HANDLER, listener); - } - - private static ResponseHandler createElserHandler() { - return new AlwaysRetryingResponseHandler("elser hugging face", HuggingFaceElserResponseEntity::fromResponse); - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandler.java new file mode 100644 index 0000000000000..eb7bc3d6a0b28 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandler.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.huggingface; + +import org.apache.http.client.methods.HttpRequestBase; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.BaseResponseHandler; +import org.elasticsearch.xpack.inference.external.http.retry.RetryException; +import org.elasticsearch.xpack.inference.external.response.huggingface.HuggingFaceErrorResponseEntity; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; + +import java.io.IOException; + +import static org.elasticsearch.xpack.inference.external.http.HttpUtils.checkForEmptyBody; + +public class HuggingFaceResponseHandler extends BaseResponseHandler { + + public HuggingFaceResponseHandler(String requestType, CheckedFunction parseFunction) { + super(requestType, parseFunction, HuggingFaceErrorResponseEntity::fromResponse); + } + + @Override + public void validateResponse(ThrottlerManager throttlerManager, Logger logger, HttpRequestBase request, HttpResult result) + throws RetryException { + checkForFailureStatusCode(request, result); + checkForEmptyBody(throttlerManager, logger, request, result); + } + + /** + * Validates the status code and throws a RetryException if it is not in the range [200, 300). + * + * The Hugging Face error codes are loosely defined here. + * @param request the http request + * @param result the http response and body + * @throws RetryException thrown if status code is {@code >= 300 or < 200} + */ + void checkForFailureStatusCode(HttpRequestBase request, HttpResult result) throws RetryException { + int statusCode = result.response().getStatusLine().getStatusCode(); + if (statusCode >= 200 && statusCode < 300) { + return; + } + + if (statusCode == 503 || statusCode == 502 || statusCode == 429) { + throw new RetryException(true, buildError(RATE_LIMIT, request, result)); + } else if (statusCode >= 500) { + throw new RetryException(false, buildError(SERVER_ERROR, request, result)); + } else if (statusCode == 401) { + throw new RetryException(false, buildError(AUTHENTICATION, request, result)); + } else if (statusCode >= 300 && statusCode < 400) { + throw new RetryException(false, buildError(REDIRECTION, request, result)); + } else { + throw new RetryException(false, buildError(UNSUCCESSFUL, request, result)); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiClient.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiClient.java index af809f1be97f9..e31bc3b2fd41e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiClient.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiClient.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.inference.external.request.openai.OpenAiEmbeddingsRequest; import org.elasticsearch.xpack.inference.external.response.openai.OpenAiEmbeddingsResponseEntity; import org.elasticsearch.xpack.inference.services.ServiceComponents; -import org.elasticsearch.xpack.inference.services.openai.OpenAiResponseHandler; import java.io.IOException; @@ -43,6 +42,6 @@ public void send(OpenAiEmbeddingsRequest request, ActionListener OpenAiEmbeddingsResponseEntity.fromResponse(result)); + return new OpenAiResponseHandler("openai text embedding", OpenAiEmbeddingsResponseEntity::fromResponse); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java new file mode 100644 index 0000000000000..7609b734db4f5 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.openai; + +import org.apache.http.client.methods.HttpRequestBase; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.BaseResponseHandler; +import org.elasticsearch.xpack.inference.external.http.retry.RetryException; +import org.elasticsearch.xpack.inference.external.response.openai.OpenAiErrorResponseEntity; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; + +import java.io.IOException; + +import static org.elasticsearch.xpack.inference.external.http.HttpUtils.checkForEmptyBody; + +public class OpenAiResponseHandler extends BaseResponseHandler { + + public OpenAiResponseHandler(String requestType, CheckedFunction parseFunction) { + super(requestType, parseFunction, OpenAiErrorResponseEntity::fromResponse); + } + + @Override + public void validateResponse(ThrottlerManager throttlerManager, Logger logger, HttpRequestBase request, HttpResult result) + throws RetryException { + checkForFailureStatusCode(request, result); + checkForEmptyBody(throttlerManager, logger, request, result); + } + + /** + * Validates the status code throws an RetryException if not in the range [200, 300). + * + * The OpenAI API error codes are documented here. + * @param request The http request + * @param result The http response and body + * @throws RetryException Throws if status code is {@code >= 300 or < 200 } + */ + void checkForFailureStatusCode(HttpRequestBase request, HttpResult result) throws RetryException { + int statusCode = result.response().getStatusLine().getStatusCode(); + if (statusCode >= 200 && statusCode < 300) { + return; + } + + // handle error codes + if (statusCode >= 500) { + throw new RetryException(false, buildError(SERVER_ERROR, request, result)); + } else if (statusCode == 429) { + throw new RetryException(false, buildError(RATE_LIMIT, request, result)); // TODO back off and retry + } else if (statusCode == 401) { + throw new RetryException(false, buildError(AUTHENTICATION, request, result)); + } else if (statusCode >= 300 && statusCode < 400) { + throw new RetryException(false, buildError(REDIRECTION, request, result)); + } else { + throw new RetryException(false, buildError(UNSUCCESSFUL, request, result)); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequest.java similarity index 86% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequest.java rename to x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequest.java index 563b0036bdb09..8b37439fc6c8a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequest.java @@ -21,12 +21,12 @@ import static org.elasticsearch.xpack.inference.external.request.RequestUtils.createAuthBearerHeader; -public class HuggingFaceElserRequest implements Request { +public class HuggingFaceInferenceRequest implements Request { private final HuggingFaceAccount account; - private final HuggingFaceElserRequestEntity entity; + private final HuggingFaceInferenceRequestEntity entity; - public HuggingFaceElserRequest(HuggingFaceAccount account, HuggingFaceElserRequestEntity entity) { + public HuggingFaceInferenceRequest(HuggingFaceAccount account, HuggingFaceInferenceRequestEntity entity) { this.account = Objects.requireNonNull(account); this.entity = Objects.requireNonNull(entity); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequestEntity.java similarity index 85% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntity.java rename to x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequestEntity.java index 10ba249f9da7d..8656d3271a52e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequestEntity.java @@ -14,11 +14,11 @@ import java.util.List; import java.util.Objects; -public record HuggingFaceElserRequestEntity(List inputs) implements ToXContentObject { +public record HuggingFaceInferenceRequestEntity(List inputs) implements ToXContentObject { private static final String INPUTS_FIELD = "inputs"; - public HuggingFaceElserRequestEntity { + public HuggingFaceInferenceRequestEntity { Objects.requireNonNull(inputs); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java new file mode 100644 index 0000000000000..4f4091873fba9 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response; + +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +import static org.elasticsearch.core.Strings.format; + +public class XContentUtils { + + public static void moveToFirstToken(XContentParser parser) throws IOException { + if (parser.currentToken() == null) { + parser.nextToken(); + } + } + + /** + * Iterates over the tokens until it finds a field name token with the text matching the field requested. + * + * @param parser parser to move + * @param field the field name to find + * @param errorMsgTemplate a template message to populate an exception if the field cannot be found + * @throws IllegalStateException if the field cannot be found + */ + public static void positionParserAtTokenAfterField(XContentParser parser, String field, String errorMsgTemplate) throws IOException { + XContentParser.Token token; + + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME && parser.currentName().equals(field)) { + parser.nextToken(); + return; + } + } + + throw new IllegalStateException(format(errorMsgTemplate, field)); + } + + private XContentUtils() {} +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java index fab22dce889a5..7ef0d1cdbf3c7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java @@ -21,6 +21,8 @@ import java.util.Collections; import java.util.List; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; + public class HuggingFaceElserResponseEntity { /** @@ -58,9 +60,7 @@ public static SparseEmbeddingResults fromResponse(HttpResult response) throws IO var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { - if (jsonParser.currentToken() == null) { - jsonParser.nextToken(); - } + moveToFirstToken(jsonParser); List parsedEmbeddings = XContentParserUtils.parseList( jsonParser, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntity.java new file mode 100644 index 0000000000000..fb7cbf5d49768 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntity.java @@ -0,0 +1,161 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.huggingface; + +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField; + +public class HuggingFaceEmbeddingsResponseEntity { + private static final String FAILED_TO_FIND_FIELD_TEMPLATE = "Failed to find required field [%s] in Hugging Face embeddings response"; + + /** + * Parse the response from hugging face. The known formats are an array of arrays and object with an {@code embeddings} field containing + * an array of arrays. + */ + public static TextEmbeddingResults fromResponse(HttpResult response) throws IOException { + var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); + + try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { + moveToFirstToken(jsonParser); + + XContentParser.Token token = jsonParser.currentToken(); + if (token == XContentParser.Token.START_ARRAY) { + return parseArrayFormat(jsonParser); + } else if (token == XContentParser.Token.START_OBJECT) { + return parseObjectFormat(jsonParser); + } else { + throwUnknownToken(token, jsonParser); + } + } + + // This should never be reached. The above code should either return successfully or hit the throwUnknownToken + // or throw a parsing exception + throw new IllegalStateException("Reached an invalid state while parsing the hugging face response"); + } + + /** + * The response from hugging face could be formatted as [[0.1, ...], [0.1, ...]]. + * Each entry in the array will correspond to the entry within the inputs array within the request sent to hugging face. For example + * for a request like: + * + *
    +     *     
    +     *         {
    +     *             "inputs": ["hello this is my name", "I wish I was there!"]
    +     *         }
    +     *     
    +     * 
    + * + * The response would look like: + * + *
    +     *     
    +     *         [
    +     *              [
    +     *                  0.1,
    +     *                  0.234
    +     *              ],
    +     *              [
    +     *                  0.34,
    +     *                  0.56
    +     *              ]
    +     *         ]
    +     *     
    +     * 
    + * + * Example models with this response format: + * intfloat/e5-small-v2 + * intfloat/e5-base-v2 + * intfloat/multilingual-e5-base + * sentence-transformers/all-MiniLM-L6-v2 + * sentence-transformers/all-MiniLM-L12-v2 + */ + private static TextEmbeddingResults parseArrayFormat(XContentParser parser) throws IOException { + List embeddingList = XContentParserUtils.parseList( + parser, + HuggingFaceEmbeddingsResponseEntity::parseEmbeddingEntry + ); + + return new TextEmbeddingResults(embeddingList); + } + + /** + * The response from hugging face could be formatted as {"embeddings": [[0.1, ...], [0.1, ...]}. + * Each entry in the array will correspond to the entry within the inputs array within the request sent to hugging face. For example + * for a request like: + * + *
    +     *     
    +     *         {
    +     *             "inputs": ["hello this is my name", "I wish I was there!"]
    +     *         }
    +     *     
    +     * 
    + * + * The response would look like: + * + *
    +     *     
    +     *         {
    +     *             "embeddings": [
    +     *                  [
    +     *                      0.1,
    +     *                      0.234
    +     *                  ],
    +     *                  [
    +     *                      0.34,
    +     *                      0.56
    +     *                  ]
    +     *             ]
    +     *         }
    +     *     
    +     * 
    + * + * Example models with this response format: + * intfloat/multilingual-e5-small + * sentence-transformers/all-mpnet-base-v2 + */ + private static TextEmbeddingResults parseObjectFormat(XContentParser parser) throws IOException { + positionParserAtTokenAfterField(parser, "embeddings", FAILED_TO_FIND_FIELD_TEMPLATE); + + List embeddingList = XContentParserUtils.parseList( + parser, + HuggingFaceEmbeddingsResponseEntity::parseEmbeddingEntry + ); + + return new TextEmbeddingResults(embeddingList); + } + + private static TextEmbeddingResults.Embedding parseEmbeddingEntry(XContentParser parser) throws IOException { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); + + List embeddingValues = XContentParserUtils.parseList(parser, HuggingFaceEmbeddingsResponseEntity::parseEmbeddingList); + return new TextEmbeddingResults.Embedding(embeddingValues); + } + + private static float parseEmbeddingList(XContentParser parser) throws IOException { + XContentParser.Token token = parser.currentToken(); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser); + return parser.floatValue(); + } + + private HuggingFaceEmbeddingsResponseEntity() {} +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceErrorResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceErrorResponseEntity.java new file mode 100644 index 0000000000000..faeb7c6ac4fa9 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceErrorResponseEntity.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.huggingface; + +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.ErrorMessage; + +public record HuggingFaceErrorResponseEntity(String message) implements ErrorMessage { + /** + * An example error response for invalid auth would look like + * + * { + * "error": "A valid user token is required" + * } + * + * + * + * @param response The error response + * @return An error entity if the response is JSON with the above structure + * or null if the response does not contain the error field + */ + public static HuggingFaceErrorResponseEntity fromResponse(HttpResult response) { + try ( + XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON) + .createParser(XContentParserConfiguration.EMPTY, response.body()) + ) { + var responseMap = jsonParser.map(); + var error = (String) responseMap.get("error"); + if (error != null) { + return new HuggingFaceErrorResponseEntity(error); + } + } catch (Exception e) { + // swallow the error + } + + return null; + } + + @Override + public String getErrorMessage() { + return message; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java index c301ab2194415..b723cb0f86dea 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java @@ -19,9 +19,11 @@ import java.io.IOException; import java.util.List; -import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField; public class OpenAiEmbeddingsResponseEntity { + private static final String FAILED_TO_FIND_FIELD_TEMPLATE = "Failed to find required field [%s] in OpenAI embeddings response"; /** * Parses the OpenAI json response. @@ -70,14 +72,12 @@ public static TextEmbeddingResults fromResponse(HttpResult response) throws IOEx var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { - if (jsonParser.currentToken() == null) { - jsonParser.nextToken(); - } + moveToFirstToken(jsonParser); XContentParser.Token token = jsonParser.currentToken(); XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); - positionParserAtTokenAfterField(jsonParser, "data"); + positionParserAtTokenAfterField(jsonParser, "data", FAILED_TO_FIND_FIELD_TEMPLATE); List embeddingList = XContentParserUtils.parseList( jsonParser, @@ -88,28 +88,10 @@ public static TextEmbeddingResults fromResponse(HttpResult response) throws IOEx } } - /** - * Iterates over the tokens until it finds a field name token with the text matching the field requested. - * - * @throws IllegalStateException if the field cannot be found - */ - private static void positionParserAtTokenAfterField(XContentParser parser, String field) throws IOException { - XContentParser.Token token; - - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME && parser.currentName().equals(field)) { - parser.nextToken(); - return; - } - } - - throw new IllegalStateException(format("Failed to find required field [%s] in OpenAI embeddings response", field)); - } - private static TextEmbeddingResults.Embedding parseEmbeddingObject(XContentParser parser) throws IOException { XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - positionParserAtTokenAfterField(parser, "embedding"); + positionParserAtTokenAfterField(parser, "embedding", FAILED_TO_FIND_FIELD_TEMPLATE); List embeddingValues = XContentParserUtils.parseList(parser, OpenAiEmbeddingsResponseEntity::parseEmbeddingList); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiErrorResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiErrorResponseEntity.java index 10f42a8ec7d19..a364be29ada33 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiErrorResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiErrorResponseEntity.java @@ -12,11 +12,11 @@ import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.ErrorMessage; -import java.io.IOException; import java.util.Map; -public class OpenAiErrorResponseEntity { +public class OpenAiErrorResponseEntity implements ErrorMessage { private final String errorMessage; @@ -60,7 +60,7 @@ public static OpenAiErrorResponseEntity fromResponse(HttpResult response) { return new OpenAiErrorResponseEntity(message); } } - } catch (IOException e) { + } catch (Exception e) { // swallow the error } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java index 05c664f7ceeea..aa2e0a81a59b2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java @@ -42,7 +42,7 @@ import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.inference.InferenceIndex; import org.elasticsearch.xpack.inference.InferenceSecretsIndex; -import org.elasticsearch.xpack.inference.services.MapParsingUtils; +import org.elasticsearch.xpack.inference.services.ServiceUtils; import java.io.IOException; import java.util.ArrayList; @@ -73,9 +73,9 @@ public static UnparsedModel unparsedModelFromMap(ModelConfigMap modelConfigMap) if (modelConfigMap.config() == null) { throw new ElasticsearchStatusException("Missing config map", RestStatus.BAD_REQUEST); } - String modelId = MapParsingUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.MODEL_ID); - String service = MapParsingUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.SERVICE); - String taskTypeStr = MapParsingUtils.removeStringOrThrowIfNull(modelConfigMap.config(), TaskType.NAME); + String modelId = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.MODEL_ID); + String service = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.SERVICE); + String taskTypeStr = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), TaskType.NAME); TaskType taskType = TaskType.fromString(taskTypeStr); return new UnparsedModel(modelId, taskType, service, modelConfigMap.config(), modelConfigMap.secrets()); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java new file mode 100644 index 0000000000000..bb45e8fd684a6 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services; + +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.IOUtils; +import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.Model; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.atomic.AtomicReference; + +public abstract class SenderService implements InferenceService { + private final SetOnce factory; + private final SetOnce serviceComponents; + private final AtomicReference sender = new AtomicReference<>(); + + public SenderService(SetOnce factory, SetOnce serviceComponents) { + this.factory = Objects.requireNonNull(factory); + this.serviceComponents = Objects.requireNonNull(serviceComponents); + } + + protected Sender getSender() { + return sender.get(); + } + + protected ServiceComponents getServiceComponents() { + return serviceComponents.get(); + } + + @Override + public void infer(Model model, List input, Map taskSettings, ActionListener listener) { + init(); + + doInfer(model, input, taskSettings, listener); + } + + protected abstract void doInfer( + Model model, + List input, + Map taskSettings, + ActionListener listener + ); + + @Override + public void start(Model model, ActionListener listener) { + init(); + + doStart(model, listener); + } + + protected void doStart(Model model, ActionListener listener) { + listener.onResponse(true); + } + + private void init() { + sender.updateAndGet(current -> Objects.requireNonNullElseGet(current, () -> factory.get().createSender(name()))); + sender.get().start(); + } + + @Override + public void close() throws IOException { + IOUtils.closeWhileHandlingException(sender.get()); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/MapParsingUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java similarity index 79% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/MapParsingUtils.java rename to x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java index 45bbddc92f135..597cd172ff661 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/MapParsingUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.core.Strings; +import org.elasticsearch.inference.Model; import org.elasticsearch.rest.RestStatus; import java.net.URI; @@ -20,7 +21,7 @@ import static org.elasticsearch.core.Strings.format; -public class MapParsingUtils { +public class ServiceUtils { /** * Remove the object from the map and cast to the expected type. * If the object cannot be cast to type an ElasticsearchStatusException @@ -71,7 +72,7 @@ public static String removeStringOrThrowIfNull(Map sourceMap, St public static void throwIfNotEmptyMap(Map settingsMap, String serviceName) { if (settingsMap != null && settingsMap.isEmpty() == false) { - throw MapParsingUtils.unknownSettingsError(settingsMap, serviceName); + throw ServiceUtils.unknownSettingsError(settingsMap, serviceName); } } @@ -102,7 +103,7 @@ public static URI convertToUri(String url, String settingName, String settingSco try { return createUri(url); } catch (IllegalArgumentException ignored) { - validationException.addValidationError(MapParsingUtils.invalidUrlErrorMsg(url, settingName, settingScope)); + validationException.addValidationError(ServiceUtils.invalidUrlErrorMsg(url, settingName, settingScope)); return null; } } @@ -138,12 +139,12 @@ public static String extractRequiredString( String scope, ValidationException validationException ) { - String requiredField = MapParsingUtils.removeAsType(map, settingName, String.class); + String requiredField = ServiceUtils.removeAsType(map, settingName, String.class); if (requiredField == null) { - validationException.addValidationError(MapParsingUtils.missingSettingErrorMsg(settingName, scope)); + validationException.addValidationError(ServiceUtils.missingSettingErrorMsg(settingName, scope)); } else if (requiredField.isEmpty()) { - validationException.addValidationError(MapParsingUtils.mustBeNonEmptyString(settingName, scope)); + validationException.addValidationError(ServiceUtils.mustBeNonEmptyString(settingName, scope)); } if (validationException.validationErrors().isEmpty() == false) { @@ -159,10 +160,10 @@ public static String extractOptionalString( String scope, ValidationException validationException ) { - String optionalField = MapParsingUtils.removeAsType(map, settingName, String.class); + String optionalField = ServiceUtils.removeAsType(map, settingName, String.class); if (optionalField != null && optionalField.isEmpty()) { - validationException.addValidationError(MapParsingUtils.mustBeNonEmptyString(settingName, scope)); + validationException.addValidationError(ServiceUtils.mustBeNonEmptyString(settingName, scope)); } if (validationException.validationErrors().isEmpty() == false) { @@ -171,4 +172,19 @@ public static String extractOptionalString( return optionalField; } + + public static String parsePersistedConfigErrorMsg(String modelId, String serviceName) { + return format("Failed to parse stored model [%s] for [%s] service, please delete and add the service again", modelId, serviceName); + } + + public static ElasticsearchStatusException createInvalidModelException(Model model) { + return new ElasticsearchStatusException( + format( + "The internal model was invalid, please delete the service [%s] with id [%s] and add it again.", + model.getConfigurations().getService(), + model.getConfigurations().getModelId() + ), + RestStatus.INTERNAL_SERVER_ERROR + ); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java index 048920356aca0..7becc57999fb6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java @@ -32,8 +32,8 @@ import java.util.Set; import static org.elasticsearch.xpack.core.ml.inference.assignment.AllocationStatus.State.STARTED; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.removeFromMapOrThrowIfNull; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.throwIfNotEmptyMap; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; public class ElserMlNodeService implements InferenceService { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java index d1f27302f85f1..2ea7b080d059d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java @@ -15,7 +15,7 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.inference.services.MapParsingUtils; +import org.elasticsearch.xpack.inference.services.ServiceUtils; import java.io.IOException; import java.util.Map; @@ -43,26 +43,24 @@ public class ElserMlNodeServiceSettings implements ServiceSettings { */ public static ElserMlNodeServiceSettings.Builder fromMap(Map map) { ValidationException validationException = new ValidationException(); - Integer numAllocations = MapParsingUtils.removeAsType(map, NUM_ALLOCATIONS, Integer.class); - Integer numThreads = MapParsingUtils.removeAsType(map, NUM_THREADS, Integer.class); + Integer numAllocations = ServiceUtils.removeAsType(map, NUM_ALLOCATIONS, Integer.class); + Integer numThreads = ServiceUtils.removeAsType(map, NUM_THREADS, Integer.class); if (numAllocations == null) { validationException.addValidationError( - MapParsingUtils.missingSettingErrorMsg(NUM_ALLOCATIONS, ModelConfigurations.SERVICE_SETTINGS) + ServiceUtils.missingSettingErrorMsg(NUM_ALLOCATIONS, ModelConfigurations.SERVICE_SETTINGS) ); } else if (numAllocations < 1) { validationException.addValidationError(mustBeAPositiveNumberError(NUM_ALLOCATIONS, numAllocations)); } if (numThreads == null) { - validationException.addValidationError( - MapParsingUtils.missingSettingErrorMsg(NUM_THREADS, ModelConfigurations.SERVICE_SETTINGS) - ); + validationException.addValidationError(ServiceUtils.missingSettingErrorMsg(NUM_THREADS, ModelConfigurations.SERVICE_SETTINGS)); } else if (numThreads < 1) { validationException.addValidationError(mustBeAPositiveNumberError(NUM_THREADS, numThreads)); } - String version = MapParsingUtils.removeAsType(map, MODEL_VERSION, String.class); + String version = ServiceUtils.removeAsType(map, MODEL_VERSION, String.class); if (version != null && ElserMlNodeService.VALID_ELSER_MODELS.contains(version) == false) { validationException.addValidationError("unknown ELSER model version [" + version + "]"); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java new file mode 100644 index 0000000000000..a7dc26b8472d1 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface; + +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.inference.external.action.huggingface.HuggingFaceActionCreator; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.services.SenderService; +import org.elasticsearch.xpack.inference.services.ServiceComponents; + +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; + +public abstract class HuggingFaceBaseService extends SenderService { + + public HuggingFaceBaseService(SetOnce factory, SetOnce serviceComponents) { + super(factory, serviceComponents); + } + + @Override + public HuggingFaceModel parseRequestConfig( + String modelId, + TaskType taskType, + Map config, + Set platformArchitectures + ) { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + + var model = createModel( + modelId, + taskType, + serviceSettingsMap, + serviceSettingsMap, + TaskType.unsupportedTaskTypeErrorMsg(taskType, name()) + ); + + throwIfNotEmptyMap(config, name()); + throwIfNotEmptyMap(serviceSettingsMap, name()); + + return model; + } + + @Override + public HuggingFaceModel parsePersistedConfigWithSecrets( + String modelId, + TaskType taskType, + Map config, + Map secrets + ) { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + Map secretSettingsMap = removeFromMapOrThrowIfNull(secrets, ModelSecrets.SECRET_SETTINGS); + + return createModel(modelId, taskType, serviceSettingsMap, secretSettingsMap, parsePersistedConfigErrorMsg(modelId, name())); + } + + @Override + public HuggingFaceModel parsePersistedConfig(String modelId, TaskType taskType, Map config) { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + + return createModel(modelId, taskType, serviceSettingsMap, null, parsePersistedConfigErrorMsg(modelId, name())); + } + + protected abstract HuggingFaceModel createModel( + String modelId, + TaskType taskType, + Map serviceSettings, + Map secretSettings, + String failureMessage + ); + + @Override + public void doInfer( + Model model, + List input, + Map taskSettings, + ActionListener listener + ) { + if (model instanceof HuggingFaceModel == false) { + listener.onFailure(createInvalidModelException(model)); + return; + } + + var huggingFaceModel = (HuggingFaceModel) model; + var actionCreator = new HuggingFaceActionCreator(getSender(), getServiceComponents()); + + var action = huggingFaceModel.accept(actionCreator); + action.execute(input, listener); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceModel.java new file mode 100644 index 0000000000000..d672afa99ea9d --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceModel.java @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.action.huggingface.HuggingFaceActionVisitor; + +import java.net.URI; + +public abstract class HuggingFaceModel extends Model { + public HuggingFaceModel(ModelConfigurations configurations, ModelSecrets secrets) { + super(configurations, secrets); + } + + public abstract ExecutableAction accept(HuggingFaceActionVisitor creator); + + public abstract URI getUri(); + + public abstract SecureString getApiKey(); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java new file mode 100644 index 0000000000000..99e39f6f55912 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface; + +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModel; +import org.elasticsearch.xpack.inference.services.huggingface.embeddings.HuggingFaceEmbeddingsModel; + +import java.util.Map; + +public class HuggingFaceService extends HuggingFaceBaseService { + public static final String NAME = "hugging_face"; + + public HuggingFaceService(SetOnce factory, SetOnce serviceComponents) { + super(factory, serviceComponents); + } + + @Override + protected HuggingFaceModel createModel( + String modelId, + TaskType taskType, + Map serviceSettings, + @Nullable Map secretSettings, + String failureMessage + ) { + return switch (taskType) { + case TEXT_EMBEDDING -> new HuggingFaceEmbeddingsModel(modelId, taskType, NAME, serviceSettings, secretSettings); + case SPARSE_EMBEDDING -> new HuggingFaceElserModel(modelId, taskType, NAME, serviceSettings, secretSettings); + default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); + }; + } + + @Override + public String name() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_HF_SERVICE_ADDED; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java new file mode 100644 index 0000000000000..dc98990b1ef8c --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java @@ -0,0 +1,89 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.net.URI; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.convertToUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; + +public record HuggingFaceServiceSettings(URI uri) implements ServiceSettings { + public static final String NAME = "hugging_face_service_settings"; + + static final String URL = "url"; + + public static HuggingFaceServiceSettings fromMap(Map map) { + return new HuggingFaceServiceSettings(extractUri(map, URL)); + } + + public static URI extractUri(Map map, String fieldName) { + ValidationException validationException = new ValidationException(); + + String parsedUrl = extractRequiredString(map, fieldName, ModelConfigurations.SERVICE_SETTINGS, validationException); + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + URI uri = convertToUri(parsedUrl, fieldName, ModelConfigurations.SERVICE_SETTINGS, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return uri; + } + + public HuggingFaceServiceSettings { + Objects.requireNonNull(uri); + } + + public HuggingFaceServiceSettings(String url) { + this(createUri(url)); + } + + public HuggingFaceServiceSettings(StreamInput in) throws IOException { + this(in.readString()); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(URL, uri.toString()); + builder.endObject(); + + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_HF_SERVICE_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(uri.toString()); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModel.java index 6dc8ec280dc9d..24160387179ff 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModel.java @@ -7,18 +7,41 @@ package org.elasticsearch.xpack.inference.services.huggingface.elser; -import org.elasticsearch.inference.Model; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.action.huggingface.HuggingFaceActionVisitor; +import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceModel; -public class HuggingFaceElserModel extends Model { +import java.net.URI; +import java.util.Map; + +public class HuggingFaceElserModel extends HuggingFaceModel { public HuggingFaceElserModel( + String modelId, + TaskType taskType, + String service, + Map serviceSettings, + @Nullable Map secrets + ) { + this( + modelId, + taskType, + service, + HuggingFaceElserServiceSettings.fromMap(serviceSettings), + HuggingFaceElserSecretSettings.fromMap(secrets) + ); + } + + HuggingFaceElserModel( String modelId, TaskType taskType, String service, HuggingFaceElserServiceSettings serviceSettings, - HuggingFaceElserSecretSettings secretSettings + @Nullable HuggingFaceElserSecretSettings secretSettings ) { super(new ModelConfigurations(modelId, taskType, service, serviceSettings), new ModelSecrets(secretSettings)); } @@ -32,4 +55,19 @@ public HuggingFaceElserServiceSettings getServiceSettings() { public HuggingFaceElserSecretSettings getSecretSettings() { return (HuggingFaceElserSecretSettings) super.getSecretSettings(); } + + @Override + public ExecutableAction accept(HuggingFaceActionVisitor creator) { + return creator.create(this); + } + + @Override + public URI getUri() { + return getServiceSettings().uri(); + } + + @Override + public SecureString getApiKey() { + return getSecretSettings().apiKey(); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettings.java index f2df48366f786..bf024e97f1e0a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettings.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.SecretSettings; import org.elasticsearch.xcontent.XContentBuilder; @@ -21,14 +22,18 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.extractRequiredSecureString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredSecureString; public record HuggingFaceElserSecretSettings(SecureString apiKey) implements SecretSettings { public static final String NAME = "hugging_face_elser_secret_settings"; static final String API_KEY = "api_key"; - public static HuggingFaceElserSecretSettings fromMap(Map map) { + public static HuggingFaceElserSecretSettings fromMap(@Nullable Map map) { + if (map == null) { + return null; + } + ValidationException validationException = new ValidationException(); SecureString secureApiToken = extractRequiredSecureString(map, API_KEY, ModelSecrets.SECRET_SETTINGS, validationException); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java index 8c978112c4ec3..c06b6a62db29a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java @@ -8,44 +8,24 @@ package org.elasticsearch.xpack.inference.services.huggingface.elser; import org.apache.lucene.util.SetOnce; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.core.IOUtils; -import org.elasticsearch.inference.InferenceService; -import org.elasticsearch.inference.InferenceServiceResults; -import org.elasticsearch.inference.Model; -import org.elasticsearch.inference.ModelConfigurations; -import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.inference.external.action.huggingface.HuggingFaceElserAction; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; -import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceBaseService; +import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceModel; -import java.io.IOException; -import java.util.List; import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.concurrent.atomic.AtomicReference; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.removeFromMapOrThrowIfNull; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.throwIfNotEmptyMap; - -public class HuggingFaceElserService implements InferenceService { +public class HuggingFaceElserService extends HuggingFaceBaseService { public static final String NAME = "hugging_face_elser"; - private final SetOnce factory; - private final SetOnce serviceComponents; - private final AtomicReference sender = new AtomicReference<>(); - public HuggingFaceElserService(SetOnce factory, SetOnce serviceComponents) { - this.factory = Objects.requireNonNull(factory); - this.serviceComponents = Objects.requireNonNull(serviceComponents); + super(factory, serviceComponents); } @Override @@ -54,86 +34,17 @@ public String name() { } @Override - public HuggingFaceElserModel parseRequestConfig( - String modelId, - TaskType taskType, - Map config, - Set platformArchitectures - ) { - Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); - - HuggingFaceElserServiceSettings serviceSettings = HuggingFaceElserServiceSettings.fromMap(serviceSettingsMap); - HuggingFaceElserSecretSettings secretSettings = HuggingFaceElserSecretSettings.fromMap(serviceSettingsMap); - - throwIfNotEmptyMap(config, NAME); - throwIfNotEmptyMap(serviceSettingsMap, NAME); - - return new HuggingFaceElserModel(modelId, taskType, NAME, serviceSettings, secretSettings); - } - - @Override - public HuggingFaceElserModel parsePersistedConfigWithSecrets( + protected HuggingFaceModel createModel( String modelId, TaskType taskType, - Map config, - Map secrets + Map serviceSettings, + @Nullable Map secretSettings, + String failureMessage ) { - Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); - Map secretSettingsMap = removeFromMapOrThrowIfNull(secrets, ModelSecrets.SECRET_SETTINGS); - - HuggingFaceElserServiceSettings serviceSettings = HuggingFaceElserServiceSettings.fromMap(serviceSettingsMap); - HuggingFaceElserSecretSettings secretSettings = HuggingFaceElserSecretSettings.fromMap(secretSettingsMap); - - return new HuggingFaceElserModel(modelId, taskType, NAME, serviceSettings, secretSettings); - } - - @Override - public HuggingFaceElserModel parsePersistedConfig(String modelId, TaskType taskType, Map config) { - Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); - HuggingFaceElserServiceSettings serviceSettings = HuggingFaceElserServiceSettings.fromMap(serviceSettingsMap); - - return new HuggingFaceElserModel(modelId, taskType, NAME, serviceSettings, null); - } - - @Override - public void infer(Model model, List input, Map taskSettings, ActionListener listener) { - if (model.getConfigurations().getTaskType() != TaskType.SPARSE_EMBEDDING) { - listener.onFailure( - new ElasticsearchStatusException( - TaskType.unsupportedTaskTypeErrorMsg(model.getConfigurations().getTaskType(), NAME), - RestStatus.BAD_REQUEST - ) - ); - return; - } - - if (model instanceof HuggingFaceElserModel == false) { - listener.onFailure(new ElasticsearchException("The internal model was invalid")); - return; - } - - init(); - - HuggingFaceElserModel huggingFaceElserModel = (HuggingFaceElserModel) model; - HuggingFaceElserAction action = new HuggingFaceElserAction(sender.get(), huggingFaceElserModel, serviceComponents.get()); - - action.execute(input, listener); - } - - @Override - public void start(Model model, ActionListener listener) { - init(); - listener.onResponse(true); - } - - @Override - public void close() throws IOException { - IOUtils.closeWhileHandlingException(sender.get()); - } - - private void init() { - sender.updateAndGet(current -> Objects.requireNonNullElseGet(current, () -> factory.get().createSender(name()))); - sender.get().start(); + return switch (taskType) { + case SPARSE_EMBEDDING -> new HuggingFaceElserModel(modelId, taskType, NAME, serviceSettings, secretSettings); + default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); + }; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java index 4b8213909f66b..dd185c4ca8385 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java @@ -9,10 +9,8 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.xcontent.XContentBuilder; @@ -21,9 +19,8 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.convertToUri; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.createUri; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.extractRequiredString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; +import static org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettings.extractUri; public record HuggingFaceElserServiceSettings(URI uri) implements ServiceSettings { public static final String NAME = "hugging_face_elser_service_settings"; @@ -31,20 +28,7 @@ public record HuggingFaceElserServiceSettings(URI uri) implements ServiceSetting static final String URL = "url"; public static HuggingFaceElserServiceSettings fromMap(Map map) { - ValidationException validationException = new ValidationException(); - - String parsedUrl = extractRequiredString(map, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); - if (validationException.validationErrors().isEmpty() == false) { - throw validationException; - } - - URI uri = convertToUri(parsedUrl, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); - - if (validationException.validationErrors().isEmpty() == false) { - throw validationException; - } - - return new HuggingFaceElserServiceSettings(uri); + return new HuggingFaceElserServiceSettings(extractUri(map, URL)); } public HuggingFaceElserServiceSettings { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java new file mode 100644 index 0000000000000..1f2e545a06901 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface.embeddings; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.action.huggingface.HuggingFaceActionVisitor; +import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceModel; +import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; + +import java.net.URI; +import java.util.Map; + +public class HuggingFaceEmbeddingsModel extends HuggingFaceModel { + public HuggingFaceEmbeddingsModel( + String modelId, + TaskType taskType, + String service, + Map serviceSettings, + @Nullable Map secrets + ) { + this(modelId, taskType, service, HuggingFaceServiceSettings.fromMap(serviceSettings), DefaultSecretSettings.fromMap(secrets)); + } + + // Should only be used directly for testing + HuggingFaceEmbeddingsModel( + String modelId, + TaskType taskType, + String service, + HuggingFaceServiceSettings serviceSettings, + @Nullable DefaultSecretSettings secrets + ) { + super(new ModelConfigurations(modelId, taskType, service, serviceSettings), new ModelSecrets(secrets)); + } + + @Override + public HuggingFaceServiceSettings getServiceSettings() { + return (HuggingFaceServiceSettings) super.getServiceSettings(); + } + + @Override + public DefaultSecretSettings getSecretSettings() { + return (DefaultSecretSettings) super.getSecretSettings(); + } + + @Override + public URI getUri() { + return getServiceSettings().uri(); + } + + @Override + public SecureString getApiKey() { + return getSecretSettings().apiKey(); + } + + @Override + public ExecutableAction accept(HuggingFaceActionVisitor creator) { + return creator.create(this); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiResponseHandler.java deleted file mode 100644 index b5b6b5df99862..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiResponseHandler.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.services.openai; - -import org.apache.http.RequestLine; -import org.apache.http.client.methods.HttpRequestBase; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.core.CheckedFunction; -import org.elasticsearch.inference.InferenceServiceResults; -import org.elasticsearch.xpack.inference.external.http.HttpResult; -import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; -import org.elasticsearch.xpack.inference.external.http.retry.RetryException; -import org.elasticsearch.xpack.inference.external.response.openai.OpenAiErrorResponseEntity; -import org.elasticsearch.xpack.inference.logging.ThrottlerManager; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.xpack.inference.external.http.HttpUtils.checkForEmptyBody; - -public class OpenAiResponseHandler implements ResponseHandler { - - protected final String requestType; - private final CheckedFunction parseFunction; - - public OpenAiResponseHandler(String requestType, CheckedFunction parseFunction) { - this.requestType = Objects.requireNonNull(requestType); - this.parseFunction = Objects.requireNonNull(parseFunction); - } - - @Override - public void validateResponse(ThrottlerManager throttlerManager, Logger logger, HttpRequestBase request, HttpResult result) - throws RetryException { - checkForFailureStatusCode(request, result); - checkForEmptyBody(throttlerManager, logger, request, result); - } - - @Override - public InferenceServiceResults parseResult(HttpResult result) throws RetryException { - try { - return parseFunction.apply(result); - } catch (Exception e) { - throw new RetryException(true, e); - } - } - - @Override - public String getRequestType() { - return requestType; - } - - /** - * Validates the status code throws an RetryException if not in the range [200, 300). - * - * The OpenAI API error codes are document at https://platform.openai.com/docs/guides/error-codes/api-errors - * @param request The http request - * @param result The http response and body - * @throws RetryException Throws if status code is {@code >= 300 or < 200 } - */ - static void checkForFailureStatusCode(HttpRequestBase request, HttpResult result) throws RetryException { - int statusCode = result.response().getStatusLine().getStatusCode(); - if (statusCode >= 200 && statusCode < 300) { - return; - } - - // handle error codes - if (statusCode >= 500) { - String errorMsg = buildErrorMessageWithResponse( - "Received a server error status code for request [%s] status [%s]", - request.getRequestLine(), - statusCode, - result - ); - throw new RetryException(false, errorMsg); - } else if (statusCode == 429) { - String errorMsg = buildErrorMessageWithResponse( - "Received a rate limit status code for request [%s] status [%s]", - request.getRequestLine(), - statusCode, - result - ); - throw new RetryException(false, errorMsg); // TODO back off and retry - } else if (statusCode == 401) { - String errorMsg = buildErrorMessageWithResponse( - "Received a authentication error status code for request [%s] status [%s]", - request.getRequestLine(), - statusCode, - result - ); - throw new RetryException(false, errorMsg); - } else if (statusCode >= 300 && statusCode < 400) { - String errorMsg = buildErrorMessageWithResponse( - "Unhandled redirection for request [%s] status [%s]", - request.getRequestLine(), - statusCode, - result - ); - throw new RetryException(false, errorMsg); - } else { - String errorMsg = buildErrorMessageWithResponse( - "Received an unsuccessful status code for request [%s] status [%s]", - request.getRequestLine(), - statusCode, - result - ); - throw new RetryException(false, errorMsg); - } - } - - static String buildErrorMessageWithResponse(String baseMessage, RequestLine requestLine, int statusCode, HttpResult response) { - var errorEntity = OpenAiErrorResponseEntity.fromResponse(response); - - if (errorEntity == null) { - return format(baseMessage, requestLine, statusCode); - } else { - var base = format(baseMessage, requestLine, statusCode); - return base + ". Error message: [" + errorEntity.getErrorMessage() + "]"; - } - - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java index 0a7ae147d13d1..acf7b84bfccb1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java @@ -12,8 +12,7 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.core.IOUtils; -import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; @@ -22,31 +21,24 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.inference.external.action.openai.OpenAiActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; -import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.services.SenderService; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModel; -import java.io.IOException; import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.Set; -import java.util.concurrent.atomic.AtomicReference; -import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.removeFromMapOrThrowIfNull; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.throwIfNotEmptyMap; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; -public class OpenAiService implements InferenceService { +public class OpenAiService extends SenderService { public static final String NAME = "openai"; - private final SetOnce factory; - private final SetOnce serviceComponents; - private final AtomicReference sender = new AtomicReference<>(); - public OpenAiService(SetOnce factory, SetOnce serviceComponents) { - this.factory = Objects.requireNonNull(factory); - this.serviceComponents = Objects.requireNonNull(serviceComponents); + super(factory, serviceComponents); } @Override @@ -80,12 +72,12 @@ public OpenAiModel parseRequestConfig( return model; } - private OpenAiModel createModel( + private static OpenAiModel createModel( String modelId, TaskType taskType, Map serviceSettings, Map taskSettings, - Map secretSettings, + @Nullable Map secretSettings, String failureMessage ) { return switch (taskType) { @@ -105,22 +97,14 @@ public OpenAiModel parsePersistedConfigWithSecrets( Map taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); Map secretSettingsMap = removeFromMapOrThrowIfNull(secrets, ModelSecrets.SECRET_SETTINGS); - OpenAiModel model = createModel( + return createModel( modelId, taskType, serviceSettingsMap, taskSettingsMap, secretSettingsMap, - format("Failed to parse stored model [%s] for [%s] service, please delete and add the service again", modelId, NAME) + parsePersistedConfigErrorMsg(modelId, NAME) ); - - throwIfNotEmptyMap(config, NAME); - throwIfNotEmptyMap(secrets, NAME); - throwIfNotEmptyMap(serviceSettingsMap, NAME); - throwIfNotEmptyMap(taskSettingsMap, NAME); - throwIfNotEmptyMap(secretSettingsMap, NAME); - - return model; } @Override @@ -128,63 +112,28 @@ public OpenAiModel parsePersistedConfig(String modelId, TaskType taskType, Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); Map taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); - OpenAiModel model = createModel( - modelId, - taskType, - serviceSettingsMap, - taskSettingsMap, - null, - format("Failed to parse stored model [%s] for [%s] service, please delete and add the service again", modelId, NAME) - ); - - throwIfNotEmptyMap(config, NAME); - throwIfNotEmptyMap(serviceSettingsMap, NAME); - throwIfNotEmptyMap(taskSettingsMap, NAME); - - return model; + return createModel(modelId, taskType, serviceSettingsMap, taskSettingsMap, null, parsePersistedConfigErrorMsg(modelId, NAME)); } @Override - public void infer(Model model, List input, Map taskSettings, ActionListener listener) { - init(); - + public void doInfer( + Model model, + List input, + Map taskSettings, + ActionListener listener + ) { if (model instanceof OpenAiModel == false) { - listener.onFailure( - new ElasticsearchStatusException( - format( - "The internal model was invalid, please delete the service [%s] with id [%s] and add it again.", - model.getConfigurations().getService(), - model.getConfigurations().getModelId() - ), - RestStatus.INTERNAL_SERVER_ERROR - ) - ); + listener.onFailure(createInvalidModelException(model)); return; } OpenAiModel openAiModel = (OpenAiModel) model; - var actionCreator = new OpenAiActionCreator(sender.get(), serviceComponents.get()); + var actionCreator = new OpenAiActionCreator(getSender(), getServiceComponents()); var action = openAiModel.accept(actionCreator, taskSettings); action.execute(input, listener); } - @Override - public void start(Model model, ActionListener listener) { - init(); - listener.onResponse(true); - } - - @Override - public void close() throws IOException { - IOUtils.closeWhileHandlingException(sender.get()); - } - - private void init() { - sender.updateAndGet(current -> Objects.requireNonNullElseGet(current, () -> factory.get().createSender(name()))); - sender.get().start(); - } - @Override public TransportVersion getMinimalSupportedVersion() { return TransportVersions.ML_INFERENCE_OPENAI_ADDED; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettings.java index adb947b01691e..6c7ff17e352d5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettings.java @@ -21,9 +21,9 @@ import java.net.URI; import java.util.Map; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.convertToUri; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.createUri; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.extractOptionalString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.convertToUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; /** * Defines the base settings for interacting with OpenAI. diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java index 210b84d8ca31e..5e2c352d88a01 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.inference.services.openai.embeddings; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.TaskType; @@ -25,7 +26,7 @@ public OpenAiEmbeddingsModel( String service, Map serviceSettings, Map taskSettings, - Map secrets + @Nullable Map secrets ) { this( modelId, @@ -44,7 +45,7 @@ public OpenAiEmbeddingsModel( String service, OpenAiServiceSettings serviceSettings, OpenAiEmbeddingsTaskSettings taskSettings, - DefaultSecretSettings secrets + @Nullable DefaultSecretSettings secrets ) { super(new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), new ModelSecrets(secrets)); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java index 4933717192266..7df57516ad632 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java @@ -12,7 +12,7 @@ import java.util.Map; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.extractOptionalString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsTaskSettings.MODEL; import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsTaskSettings.USER; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java index 05781c03f9cb0..45a9ce1cabbc3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java @@ -21,8 +21,8 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.extractOptionalString; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.extractRequiredString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; /** * Defines the task settings for the openai service. diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettings.java index 3ad29d56a88be..2689634d75d98 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettings.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.SecretSettings; import org.elasticsearch.xcontent.XContentBuilder; @@ -21,7 +22,7 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.extractRequiredSecureString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredSecureString; /** * Contains secret settings that are common to all services. @@ -32,7 +33,11 @@ public record DefaultSecretSettings(SecureString apiKey) implements SecretSettin static final String API_KEY = "api_key"; - public static DefaultSecretSettings fromMap(Map map) { + public static DefaultSecretSettings fromMap(@Nullable Map map) { + if (map == null) { + return null; + } + ValidationException validationException = new ValidationException(); SecureString secureApiToken = extractRequiredSecureString(map, API_KEY, ModelSecrets.SECRET_SETTINGS, validationException); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClientTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreatorTests.java similarity index 50% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClientTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreatorTests.java index 65d665b71f8ee..c66f967de508f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClientTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreatorTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.inference.external.huggingface; +package org.elasticsearch.xpack.inference.external.action.huggingface; import org.apache.http.HttpHeaders; import org.elasticsearch.ElasticsearchException; @@ -20,9 +20,12 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; -import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.elasticsearch.xpack.inference.results.SparseEmbeddingResultsTests; +import org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests; import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModelTests; +import org.elasticsearch.xpack.inference.services.huggingface.embeddings.HuggingFaceEmbeddingsModelTests; import org.junit.After; import org.junit.Before; @@ -37,19 +40,16 @@ import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.retry.RetrySettingsTests.buildSettingsWithRetryFields; -import static org.elasticsearch.xpack.inference.external.request.huggingface.HuggingFaceElserRequestTests.createRequest; import static org.elasticsearch.xpack.inference.logging.ThrottlerManagerTests.mockThrottlerManager; -import static org.elasticsearch.xpack.inference.results.SparseEmbeddingResultsTests.buildExpectation; +import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; -public class HuggingFaceClientTests extends ESTestCase { +public class HuggingFaceActionCreatorTests extends ESTestCase { private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); private final MockWebServer webServer = new MockWebServer(); private ThreadPool threadPool; @@ -59,7 +59,7 @@ public class HuggingFaceClientTests extends ESTestCase { public void init() throws Exception { webServer.start(); threadPool = createThreadPool(inferenceUtilityPool()); - clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mockThrottlerManager()); + clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); } @After @@ -70,7 +70,7 @@ public void shutdown() throws IOException { } @SuppressWarnings("unchecked") - public void testSend_SuccessfulResponse() throws IOException, URISyntaxException { + public void testExecute_ReturnsSuccessfulResponse_ForElserAction() throws IOException { var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); try (var sender = senderFactory.createSender("test_service")) { @@ -85,19 +85,22 @@ public void testSend_SuccessfulResponse() throws IOException, URISyntaxException """; webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); - HuggingFaceClient huggingFaceClient = new HuggingFaceClient( - sender, - new ServiceComponents(threadPool, mockThrottlerManager(), Settings.EMPTY) - ); + var model = HuggingFaceElserModelTests.createModel(getUrl(webServer), "secret"); + var actionCreator = new HuggingFaceActionCreator(sender, createWithEmptySettings(threadPool)); + var action = actionCreator.create(model); PlainActionFuture listener = new PlainActionFuture<>(); - huggingFaceClient.send(createRequest(getUrl(webServer), "secret", "abc"), listener); + action.execute(List.of("abc"), listener); var result = listener.actionGet(TIMEOUT); assertThat( result.asMap(), - is(buildExpectation(List.of(new SparseEmbeddingResultsTests.EmbeddingExpectation(Map.of(".", 0.13315596f), false)))) + is( + SparseEmbeddingResultsTests.buildExpectation( + List.of(new SparseEmbeddingResultsTests.EmbeddingExpectation(Map.of(".", 0.13315596f), false)) + ) + ) ); assertThat(webServer.requests(), hasSize(1)); @@ -117,7 +120,7 @@ public void testSend_SuccessfulResponse() throws IOException, URISyntaxException } @SuppressWarnings("unchecked") - public void testSend_FailsFromInvalidResponseFormat() throws IOException, URISyntaxException { + public void testSend_FailsFromInvalidResponseFormat_ForElserAction() throws IOException, URISyntaxException { var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); try (var sender = senderFactory.createSender("test_service")) { @@ -140,7 +143,8 @@ public void testSend_FailsFromInvalidResponseFormat() throws IOException, URISyn """; webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); - HuggingFaceClient huggingFaceClient = new HuggingFaceClient( + var model = HuggingFaceElserModelTests.createModel(getUrl(webServer), "secret"); + var actionCreator = new HuggingFaceActionCreator( sender, new ServiceComponents( threadPool, @@ -149,9 +153,10 @@ public void testSend_FailsFromInvalidResponseFormat() throws IOException, URISyn buildSettingsWithRetryFields(TimeValue.timeValueMillis(1), TimeValue.timeValueMinutes(1), TimeValue.timeValueSeconds(0)) ) ); + var action = actionCreator.create(model); PlainActionFuture listener = new PlainActionFuture<>(); - huggingFaceClient.send(createRequest(getUrl(webServer), "secret", "abc"), listener); + action.execute(List.of("abc"), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); assertThat( @@ -175,20 +180,108 @@ public void testSend_FailsFromInvalidResponseFormat() throws IOException, URISyn } } - public void testSend_ThrowsException() throws URISyntaxException, IOException { - var sender = mock(Sender.class); - doThrow(new ElasticsearchException("failed")).when(sender).send(any(), any()); + @SuppressWarnings("unchecked") + public void testExecute_ReturnsSuccessfulResponse_ForEmbeddingsAction() throws IOException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "embeddings": [ + [ + -0.0123, + 0.123 + ] + ] + { + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = HuggingFaceEmbeddingsModelTests.createModel(getUrl(webServer), "secret"); + var actionCreator = new HuggingFaceActionCreator(sender, createWithEmptySettings(threadPool)); + var action = actionCreator.create(model); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var result = listener.actionGet(TIMEOUT); - HuggingFaceClient huggingFaceClient = new HuggingFaceClient( - sender, - new ServiceComponents(threadPool, mockThrottlerManager(), Settings.EMPTY) - ); - PlainActionFuture listener = new PlainActionFuture<>(); + assertThat(result.asMap(), is(TextEmbeddingResultsTests.buildExpectation(List.of(List.of(-0.0123F, 0.123F))))); - huggingFaceClient.send(createRequest(getUrl(webServer), "secret", "abc"), listener); + assertThat(webServer.requests(), hasSize(1)); + assertNull(webServer.requests().get(0).getUri().getQuery()); + assertThat( + webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), + equalTo(XContentType.JSON.mediaTypeWithoutParameters()) + ); + assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); - var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); - assertThat(thrownException.getMessage(), is("failed")); + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + assertThat(requestMap.size(), is(1)); + assertThat(requestMap.get("inputs"), instanceOf(List.class)); + var inputList = (List) requestMap.get("inputs"); + assertThat(inputList, contains("abc")); + } } + @SuppressWarnings("unchecked") + public void testSend_FailsFromInvalidResponseFormat_ForEmbeddingsAction() throws IOException, URISyntaxException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + // this will fail because the only valid formats are {"embeddings": [[...]]} or [[...]] + String responseJson = """ + [ + { + "embeddings": [ + [ + -0.0123, + 0.123 + ] + ] + { + ] + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = HuggingFaceEmbeddingsModelTests.createModel(getUrl(webServer), "secret"); + var actionCreator = new HuggingFaceActionCreator( + sender, + new ServiceComponents( + threadPool, + mockThrottlerManager(), + // timeout as zero for no retries + buildSettingsWithRetryFields(TimeValue.timeValueMillis(1), TimeValue.timeValueMinutes(1), TimeValue.timeValueSeconds(0)) + ) + ); + var action = actionCreator.create(model); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [START_ARRAY] but found [START_OBJECT]") + ); + + assertThat(webServer.requests(), hasSize(1)); + assertNull(webServer.requests().get(0).getUri().getQuery()); + assertThat( + webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), + equalTo(XContentType.JSON.mediaTypeWithoutParameters()) + ); + assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + assertThat(requestMap.size(), is(1)); + assertThat(requestMap.get("inputs"), instanceOf(List.class)); + var inputList = (List) requestMap.get("inputs"); + assertThat(inputList, contains("abc")); + } + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionTests.java new file mode 100644 index 0000000000000..7b1301a75a1fd --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionTests.java @@ -0,0 +1,114 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.huggingface; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.AlwaysRetryingResponseHandler; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModelTests.createModel; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; + +public class HuggingFaceActionTests extends ESTestCase { + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private static final String URl = "http://localhost:12345"; + private ThreadPool threadPool; + + @Before + public void init() throws Exception { + threadPool = createThreadPool(inferenceUtilityPool()); + } + + @After + public void shutdown() throws IOException { + terminate(threadPool); + } + + public void testExecute_ThrowsElasticsearchException_WhenSenderThrows() { + var sender = mock(Sender.class); + doThrow(new ElasticsearchException("failed")).when(sender).send(any(), any()); + + var action = createAction(URl, sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is("failed")); + } + + public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled() { + var sender = mock(Sender.class); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onFailure(new IllegalStateException("failed")); + + return Void.TYPE; + }).when(sender).send(any(), any()); + + var action = createAction(URl, sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is(format("Failed to send Hugging Face test action request to [%s]", URl))); + } + + public void testExecute_ThrowsException() { + var sender = mock(Sender.class); + doThrow(new IllegalArgumentException("failed")).when(sender).send(any(), any()); + + var action = createAction(URl, sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is(format("Failed to send Hugging Face test action request to [%s]", URl))); + } + + private HuggingFaceAction createAction(String url, Sender sender) { + var model = createModel(url, "secret"); + + return new HuggingFaceAction( + sender, + model, + new ServiceComponents(threadPool, mock(ThrottlerManager.class), Settings.EMPTY), + new AlwaysRetryingResponseHandler("test", (result) -> null), + "test action" + ); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserActionTests.java deleted file mode 100644 index 6e1c2d528c467..0000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserActionTests.java +++ /dev/null @@ -1,189 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.action.huggingface; - -import org.apache.http.HttpHeaders; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.inference.InferenceServiceResults; -import org.elasticsearch.inference.TaskType; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.http.MockResponse; -import org.elasticsearch.test.http.MockWebServer; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.inference.external.http.HttpClientManager; -import org.elasticsearch.xpack.inference.external.http.HttpResult; -import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; -import org.elasticsearch.xpack.inference.external.http.sender.Sender; -import org.elasticsearch.xpack.inference.logging.ThrottlerManager; -import org.elasticsearch.xpack.inference.results.SparseEmbeddingResultsTests; -import org.elasticsearch.xpack.inference.services.ServiceComponents; -import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModel; -import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserSecretSettings; -import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserServiceSettings; -import org.junit.After; -import org.junit.Before; - -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.concurrent.TimeUnit; - -import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; -import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; -import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; -import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; -import static org.elasticsearch.xpack.inference.results.SparseEmbeddingResultsTests.buildExpectation; -import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; - -public class HuggingFaceElserActionTests extends ESTestCase { - private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); - private final MockWebServer webServer = new MockWebServer(); - private ThreadPool threadPool; - private HttpClientManager clientManager; - - @Before - public void init() throws Exception { - webServer.start(); - threadPool = createThreadPool(inferenceUtilityPool()); - clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); - } - - @After - public void shutdown() throws IOException { - clientManager.close(); - terminate(threadPool); - webServer.close(); - } - - @SuppressWarnings("unchecked") - public void testExecute_ReturnsSuccessfulResponse() throws IOException { - var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); - - try (var sender = senderFactory.createSender("test_service")) { - sender.start(); - - String responseJson = """ - [ - { - ".": 0.133155956864357 - } - ] - """; - webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); - - var action = createAction(getUrl(webServer), sender); - - PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); - - var result = listener.actionGet(TIMEOUT); - - assertThat( - result.asMap(), - is(buildExpectation(List.of(new SparseEmbeddingResultsTests.EmbeddingExpectation(Map.of(".", 0.13315596f), false)))) - ); - - assertThat(webServer.requests(), hasSize(1)); - assertNull(webServer.requests().get(0).getUri().getQuery()); - assertThat( - webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), - equalTo(XContentType.JSON.mediaTypeWithoutParameters()) - ); - assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); - - var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - assertThat(requestMap.size(), is(1)); - assertThat(requestMap.get("inputs"), instanceOf(List.class)); - var inputList = (List) requestMap.get("inputs"); - assertThat(inputList, contains("abc")); - } - } - - public void testExecute_ThrowsURISyntaxException_ForInvalidUrl() throws IOException { - try (var sender = mock(Sender.class)) { - var thrownException = expectThrows(IllegalArgumentException.class, () -> createAction("^^", sender)); - assertThat(thrownException.getMessage(), is("unable to parse url [^^]")); - } - } - - public void testExecute_ThrowsElasticsearchException_WhenSenderThrows() { - var sender = mock(Sender.class); - doThrow(new ElasticsearchException("failed")).when(sender).send(any(), any()); - - var action = createAction(getUrl(webServer), sender); - - PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); - - var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); - - assertThat(thrownException.getMessage(), is("failed")); - } - - public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled() { - var sender = mock(Sender.class); - - doAnswer(invocation -> { - @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[1]; - listener.onFailure(new IllegalStateException("failed")); - - return Void.TYPE; - }).when(sender).send(any(), any()); - - var action = createAction(getUrl(webServer), sender); - - PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); - - var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); - - assertThat(thrownException.getMessage(), is(format("Failed to send ELSER Hugging Face request to [%s]", getUrl(webServer)))); - } - - public void testExecute_ThrowsException() { - var sender = mock(Sender.class); - doThrow(new IllegalArgumentException("failed")).when(sender).send(any(), any()); - - var action = createAction(getUrl(webServer), sender); - - PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); - - var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); - - assertThat(thrownException.getMessage(), is(format("Failed to send ELSER Hugging Face request to [%s]", getUrl(webServer)))); - } - - private HuggingFaceElserAction createAction(String url, Sender sender) { - var model = new HuggingFaceElserModel( - "id", - TaskType.SPARSE_EMBEDDING, - "service", - new HuggingFaceElserServiceSettings(url), - new HuggingFaceElserSecretSettings(new SecureString("secret".toCharArray())) - ); - - return new HuggingFaceElserAction(sender, model, new ServiceComponents(threadPool, mock(ThrottlerManager.class), Settings.EMPTY)); - } -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandlerTests.java new file mode 100644 index 0000000000000..b7095979b0fa5 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandlerTests.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.retry; + +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; + +import static org.elasticsearch.xpack.inference.external.http.retry.BaseResponseHandler.toRestStatus; +import static org.hamcrest.core.Is.is; + +public class BaseResponseHandlerTests extends ESTestCase { + public void testToRestStatus_ReturnsBadRequest_WhenStatusIs500() { + assertThat(toRestStatus(500), is(RestStatus.BAD_REQUEST)); + } + + public void testToRestStatus_ReturnsBadRequest_WhenStatusIs501() { + assertThat(toRestStatus(501), is(RestStatus.BAD_REQUEST)); + } + + public void testToRestStatus_ReturnsStatusCodeValue_WhenStatusIs200() { + assertThat(toRestStatus(200), is(RestStatus.OK)); + } + + public void testToRestStatus_ReturnsBadRequest_WhenStatusIsUnknown() { + assertThat(toRestStatus(1000), is(RestStatus.BAD_REQUEST)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandlerTests.java new file mode 100644 index 0000000000000..9bebddc9ebb87 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandlerTests.java @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.huggingface; + +import org.apache.http.HttpResponse; +import org.apache.http.StatusLine; +import org.apache.http.client.methods.HttpRequestBase; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.RetryException; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.core.Is.is; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class HuggingFaceResponseHandlerTests extends ESTestCase { + + public void testCheckForFailureStatusCode() { + var statusLine = mock(StatusLine.class); + + var httpResponse = mock(HttpResponse.class); + when(httpResponse.getStatusLine()).thenReturn(statusLine); + + var httpRequest = mock(HttpRequestBase.class); + + var httpResult = new HttpResult(httpResponse, new byte[] {}); + + var handler = new HuggingFaceResponseHandler("", result -> null); + + // 200 ok + when(statusLine.getStatusCode()).thenReturn(200); + handler.checkForFailureStatusCode(httpRequest, httpResult); + // 503 + when(statusLine.getStatusCode()).thenReturn(503); + var retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + assertTrue(retryException.shouldRetry()); + assertThat( + retryException.getCause().getMessage(), + containsString("Received a rate limit status code for request [null] status [503]") + ); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.BAD_REQUEST)); + // 502 + when(statusLine.getStatusCode()).thenReturn(502); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + assertTrue(retryException.shouldRetry()); + assertThat( + retryException.getCause().getMessage(), + containsString("Received a rate limit status code for request [null] status [502]") + ); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.BAD_REQUEST)); + // 429 + when(statusLine.getStatusCode()).thenReturn(429); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + assertTrue(retryException.shouldRetry()); + assertThat( + retryException.getCause().getMessage(), + containsString("Received a rate limit status code for request [null] status [429]") + ); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.TOO_MANY_REQUESTS)); + // 401 + when(statusLine.getStatusCode()).thenReturn(401); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + assertFalse(retryException.shouldRetry()); + assertThat( + retryException.getCause().getMessage(), + containsString("Received an authentication error status code for request [null] status [401]") + ); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.UNAUTHORIZED)); + // 300 + when(statusLine.getStatusCode()).thenReturn(300); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + assertFalse(retryException.shouldRetry()); + assertThat(retryException.getCause().getMessage(), containsString("Unhandled redirection for request [null] status [300]")); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.MULTIPLE_CHOICES)); + // 402 + when(statusLine.getStatusCode()).thenReturn(402); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + assertFalse(retryException.shouldRetry()); + assertThat( + retryException.getCause().getMessage(), + containsString("Received an unsuccessful status code for request [null] status [402]") + ); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.PAYMENT_REQUIRED)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandlerTests.java new file mode 100644 index 0000000000000..56495b053e172 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandlerTests.java @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.openai; + +import org.apache.http.HttpResponse; +import org.apache.http.StatusLine; +import org.apache.http.client.methods.HttpRequestBase; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.RetryException; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.core.Is.is; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class OpenAiResponseHandlerTests extends ESTestCase { + + public void testCheckForFailureStatusCode() { + var statusLine = mock(StatusLine.class); + + var httpResponse = mock(HttpResponse.class); + when(httpResponse.getStatusLine()).thenReturn(statusLine); + + var httpRequest = mock(HttpRequestBase.class); + + var httpResult = new HttpResult(httpResponse, new byte[] {}); + + var handler = new OpenAiResponseHandler("", result -> null); + + // 200 ok + when(statusLine.getStatusCode()).thenReturn(200); + handler.checkForFailureStatusCode(httpRequest, httpResult); + // 503 + when(statusLine.getStatusCode()).thenReturn(503); + var retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + assertFalse(retryException.shouldRetry()); + assertThat( + retryException.getCause().getMessage(), + containsString("Received a server error status code for request [null] status [503]") + ); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.BAD_REQUEST)); + // 429 + when(statusLine.getStatusCode()).thenReturn(429); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + assertFalse(retryException.shouldRetry()); + assertThat( + retryException.getCause().getMessage(), + containsString("Received a rate limit status code for request [null] status [429]") + ); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.TOO_MANY_REQUESTS)); + // 401 + when(statusLine.getStatusCode()).thenReturn(401); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + assertFalse(retryException.shouldRetry()); + assertThat( + retryException.getCause().getMessage(), + containsString("Received an authentication error status code for request [null] status [401]") + ); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.UNAUTHORIZED)); + // 300 + when(statusLine.getStatusCode()).thenReturn(300); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + assertFalse(retryException.shouldRetry()); + assertThat(retryException.getCause().getMessage(), containsString("Unhandled redirection for request [null] status [300]")); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.MULTIPLE_CHOICES)); + // 402 + when(statusLine.getStatusCode()).thenReturn(402); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + assertFalse(retryException.shouldRetry()); + assertThat( + retryException.getCause().getMessage(), + containsString("Received an unsuccessful status code for request [null] status [402]") + ); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.PAYMENT_REQUIRED)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntityTests.java index 06279e9c89da6..738ab3d155bc4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntityTests.java @@ -21,7 +21,7 @@ public class HuggingFaceElserRequestEntityTests extends ESTestCase { public void testXContent() throws IOException { - var entity = new HuggingFaceElserRequestEntity(List.of("abc")); + var entity = new HuggingFaceInferenceRequestEntity(List.of("abc")); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); entity.toXContent(builder, null); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestTests.java index 2a8ce9a46e498..1a5eb7fb8845c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestTests.java @@ -44,10 +44,10 @@ public void testCreateRequest() throws URISyntaxException, IOException { assertThat(inputList, contains("abc")); } - public static HuggingFaceElserRequest createRequest(String url, String apiKey, String input) throws URISyntaxException { + public static HuggingFaceInferenceRequest createRequest(String url, String apiKey, String input) throws URISyntaxException { var account = new HuggingFaceAccount(new URI(url), new SecureString(apiKey.toCharArray())); - var entity = new HuggingFaceElserRequestEntity(List.of(input)); + var entity = new HuggingFaceInferenceRequestEntity(List.of(input)); - return new HuggingFaceElserRequest(account, entity); + return new HuggingFaceInferenceRequest(account, entity); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntityTests.java new file mode 100644 index 0000000000000..e3f14ad085761 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntityTests.java @@ -0,0 +1,339 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.huggingface; + +import org.apache.http.HttpResponse; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.List; + +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class HuggingFaceEmbeddingsResponseEntityTests extends ESTestCase { + public void testFromResponse_CreatesResultsForASingleItem_ArrayFormat() throws IOException { + String responseJson = """ + [ + [ + 0.014539449, + -0.015288644 + ] + ] + """; + + TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat(parsedResults.embeddings(), is(List.of(new TextEmbeddingResults.Embedding(List.of(0.014539449F, -0.015288644F))))); + } + + public void testFromResponse_CreatesResultsForASingleItem_ObjectFormat() throws IOException { + String responseJson = """ + { + "embeddings": [ + [ + 0.014539449, + -0.015288644 + ] + ] + } + """; + + TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat(parsedResults.embeddings(), is(List.of(new TextEmbeddingResults.Embedding(List.of(0.014539449F, -0.015288644F))))); + } + + public void testFromResponse_CreatesResultsForMultipleItems_ArrayFormat() throws IOException { + String responseJson = """ + [ + [ + 0.014539449, + -0.015288644 + ], + [ + 0.0123, + -0.0123 + ] + ] + """; + + TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat( + parsedResults.embeddings(), + is( + List.of( + new TextEmbeddingResults.Embedding(List.of(0.014539449F, -0.015288644F)), + new TextEmbeddingResults.Embedding(List.of(0.0123F, -0.0123F)) + ) + ) + ); + } + + public void testFromResponse_CreatesResultsForMultipleItems_ObjectFormat() throws IOException { + String responseJson = """ + { + "embeddings": [ + [ + 0.014539449, + -0.015288644 + ], + [ + 0.0123, + -0.0123 + ] + ] + } + """; + + TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat( + parsedResults.embeddings(), + is( + List.of( + new TextEmbeddingResults.Embedding(List.of(0.014539449F, -0.015288644F)), + new TextEmbeddingResults.Embedding(List.of(0.0123F, -0.0123F)) + ) + ) + ); + } + + public void testFromResponse_FailsWhenArrayOfObjects() { + String responseJson = """ + [ + {} + ] + """; + + var thrownException = expectThrows( + ParsingException.class, + () -> HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [START_ARRAY] but found [START_OBJECT]") + ); + } + + public void testFromResponse_FailsWhenEmbeddingsFieldIsNotPresent() { + String responseJson = """ + { + "not_embeddings": [ + [ + 0.014539449, + -0.015288644 + ] + ] + } + """; + + var thrownException = expectThrows( + IllegalStateException.class, + () -> HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat(thrownException.getMessage(), is("Failed to find required field [embeddings] in Hugging Face embeddings response")); + } + + public void testFromResponse_FailsWhenEmbeddingsFieldNotAnArray() { + String responseJson = """ + { + "embeddings": { + "a": [ + 0.014539449, + -0.015288644 + ] + } + } + """; + + var thrownException = expectThrows( + ParsingException.class, + () -> HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [START_ARRAY] but found [START_OBJECT]") + ); + } + + public void testFromResponse_FailsWhenEmbeddingValueIsAString_ArrayFormat() { + String responseJson = """ + [ + [ + "abc" + ] + ] + """; + + var thrownException = expectThrows( + ParsingException.class, + () -> HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [VALUE_NUMBER] but found [VALUE_STRING]") + ); + } + + public void testFromResponse_FailsWhenEmbeddingValueIsAString_ObjectFormat() { + String responseJson = """ + { + "embeddings": [ + [ + "abc" + ] + ] + } + """; + + var thrownException = expectThrows( + ParsingException.class, + () -> HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [VALUE_NUMBER] but found [VALUE_STRING]") + ); + } + + public void testFromResponse_SucceedsWhenEmbeddingValueIsInt_ArrayFormat() throws IOException { + String responseJson = """ + [ + [ + 1 + ] + ] + """; + + TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat(parsedResults.embeddings(), is(List.of(new TextEmbeddingResults.Embedding(List.of(1.0F))))); + } + + public void testFromResponse_SucceedsWhenEmbeddingValueIsInt_ObjectFormat() throws IOException { + String responseJson = """ + { + "embeddings": [ + [ + 1 + ] + ] + } + """; + + TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat(parsedResults.embeddings(), is(List.of(new TextEmbeddingResults.Embedding(List.of(1.0F))))); + } + + public void testFromResponse_SucceedsWhenEmbeddingValueIsLong_ArrayFormat() throws IOException { + String responseJson = """ + [ + [ + 40294967295 + ] + ] + """; + + TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat(parsedResults.embeddings(), is(List.of(new TextEmbeddingResults.Embedding(List.of(4.0294965E10F))))); + } + + public void testFromResponse_SucceedsWhenEmbeddingValueIsLong_ObjectFormat() throws IOException { + String responseJson = """ + { + "embeddings": [ + [ + 40294967295 + ] + ] + } + """; + + TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat(parsedResults.embeddings(), is(List.of(new TextEmbeddingResults.Embedding(List.of(4.0294965E10F))))); + } + + public void testFromResponse_FailsWhenEmbeddingValueIsAnObject_ObjectFormat() { + String responseJson = """ + { + "embeddings": [ + [ + {} + ] + ] + } + """; + + var thrownException = expectThrows( + ParsingException.class, + () -> HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [VALUE_NUMBER] but found [START_OBJECT]") + ); + } + + public void testFromResponse_FailsWithUnknownToken() { + String responseJson = """ + "super" + """; + + var thrownException = expectThrows( + ParsingException.class, + () -> HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat(thrownException.getMessage(), is("Failed to parse object: unexpected token [VALUE_STRING] found")); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceErrorResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceErrorResponseEntityTests.java new file mode 100644 index 0000000000000..ed381de844731 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceErrorResponseEntityTests.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.huggingface; + +import org.apache.http.HttpResponse; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.external.http.HttpResult; + +import java.nio.charset.StandardCharsets; + +import static org.mockito.Mockito.mock; + +public class HuggingFaceErrorResponseEntityTests extends ESTestCase { + public void testFromResponse() { + String responseJson = """ + { + "error": "A valid user token is required" + } + """; + + HuggingFaceErrorResponseEntity errorMessage = HuggingFaceErrorResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + assertNotNull(errorMessage); + assertEquals("A valid user token is required", errorMessage.getErrorMessage()); + } + + public void testFromResponse_noMessage() { + String responseJson = """ + { + "error": { + "type": "invalid_request_error" + } + } + """; + + HuggingFaceErrorResponseEntity errorMessage = HuggingFaceErrorResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + assertNull(errorMessage); + } + + public void testFromResponse_noError() { + String responseJson = """ + { + "something": { + "not": "relevant" + } + } + """; + + HuggingFaceErrorResponseEntity errorMessage = HuggingFaceErrorResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + assertNull(errorMessage); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java index 56d8171640b53..2301be28f62c4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java @@ -317,37 +317,4 @@ public void testFromResponse_FailsWhenEmbeddingValueIsAnObject() { is("Failed to parse object: expecting token of type [VALUE_NUMBER] but found [START_OBJECT]") ); } - - public void testFromResponse_FailsWhenIsMissingFinalClosingBracket() { - String responseJson = """ - { - "object": "list", - "data": [ - { - "object": "embedding", - "index": 0, - "embedding": [ - {} - ] - } - ], - "model": "text-embedding-ada-002-v2", - "usage": { - "prompt_tokens": 8, - "total_tokens": 8 - } - """; - - var thrownException = expectThrows( - ParsingException.class, - () -> OpenAiEmbeddingsResponseEntity.fromResponse( - new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) - ) - ); - - assertThat( - thrownException.getMessage(), - is("Failed to parse object: expecting token of type [VALUE_NUMBER] but found [START_OBJECT]") - ); - } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiErrorResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiErrorResponseEntityTests.java index 4e3465e24c951..4dc6c4190f92c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiErrorResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiErrorResponseEntityTests.java @@ -49,7 +49,7 @@ public void testFromResponse_noMessage() { assertNull(errorMessage); } - public void testFromResponse_noErro() { + public void testFromResponse_noError() { String responseJson = """ { "something": { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java index 43928da8ed3b3..0f37ac87fe45a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java @@ -19,7 +19,7 @@ import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.inference.services.MapParsingUtils; +import org.elasticsearch.xpack.inference.services.ServiceUtils; import java.io.IOException; import java.util.Map; @@ -73,12 +73,10 @@ public record TestServiceSettings(String model) implements ServiceSettings { public static TestServiceSettings fromMap(Map map) { ValidationException validationException = new ValidationException(); - String model = MapParsingUtils.removeAsType(map, "model", String.class); + String model = ServiceUtils.removeAsType(map, "model", String.class); if (model == null) { - validationException.addValidationError( - MapParsingUtils.missingSettingErrorMsg("model", ModelConfigurations.SERVICE_SETTINGS) - ); + validationException.addValidationError(ServiceUtils.missingSettingErrorMsg("model", ModelConfigurations.SERVICE_SETTINGS)); } if (validationException.validationErrors().isEmpty() == false) { @@ -121,7 +119,7 @@ public record TestTaskSettings(Integer temperature) implements TaskSettings { private static final String NAME = "test_task_settings"; public static TestTaskSettings fromMap(Map map) { - Integer temperature = MapParsingUtils.removeAsType(map, "temperature", Integer.class); + Integer temperature = ServiceUtils.removeAsType(map, "temperature", Integer.class); return new TestTaskSettings(temperature); } @@ -162,10 +160,10 @@ public record TestSecretSettings(String apiKey) implements SecretSettings { public static TestSecretSettings fromMap(Map map) { ValidationException validationException = new ValidationException(); - String apiKey = MapParsingUtils.removeAsType(map, "api_key", String.class); + String apiKey = ServiceUtils.removeAsType(map, "api_key", String.class); if (apiKey == null) { - validationException.addValidationError(MapParsingUtils.missingSettingErrorMsg("api_key", ModelSecrets.SECRET_SETTINGS)); + validationException.addValidationError(ServiceUtils.missingSettingErrorMsg("api_key", ModelSecrets.SECRET_SETTINGS)); } if (validationException.validationErrors().isEmpty() == false) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java new file mode 100644 index 0000000000000..fb61a86c7b9c4 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java @@ -0,0 +1,143 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services; + +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +public class SenderServiceTests extends ESTestCase { + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + + private ThreadPool threadPool; + + @Before + public void init() throws Exception { + threadPool = createThreadPool(inferenceUtilityPool()); + } + + @After + public void shutdown() throws IOException { + terminate(threadPool); + } + + public void testStart_InitializesTheSender() throws IOException { + var sender = mock(Sender.class); + + var factory = mock(HttpRequestSenderFactory.class); + when(factory.createSender(anyString())).thenReturn(sender); + + try (var service = new TestSenderService(new SetOnce<>(factory), new SetOnce<>(createWithEmptySettings(threadPool)))) { + PlainActionFuture listener = new PlainActionFuture<>(); + service.start(mock(Model.class), listener); + + listener.actionGet(TIMEOUT); + verify(sender, times(1)).start(); + verify(factory, times(1)).createSender(anyString()); + } + + verify(sender, times(1)).close(); + verifyNoMoreInteractions(factory); + verifyNoMoreInteractions(sender); + } + + public void testStart_CallingStartTwiceKeepsSameSenderReference() throws IOException { + var sender = mock(Sender.class); + + var factory = mock(HttpRequestSenderFactory.class); + when(factory.createSender(anyString())).thenReturn(sender); + + try (var service = new TestSenderService(new SetOnce<>(factory), new SetOnce<>(createWithEmptySettings(threadPool)))) { + PlainActionFuture listener = new PlainActionFuture<>(); + service.start(mock(Model.class), listener); + listener.actionGet(TIMEOUT); + + service.start(mock(Model.class), listener); + listener.actionGet(TIMEOUT); + + verify(factory, times(1)).createSender(anyString()); + verify(sender, times(2)).start(); + } + + verify(sender, times(1)).close(); + verifyNoMoreInteractions(factory); + verifyNoMoreInteractions(sender); + } + + private static final class TestSenderService extends SenderService { + TestSenderService(SetOnce factory, SetOnce serviceComponents) { + super(factory, serviceComponents); + } + + @Override + protected void doInfer( + Model model, + List input, + Map taskSettings, + ActionListener listener + ) { + + } + + @Override + public String name() { + return "test service"; + } + + @Override + public Model parseRequestConfig(String modelId, TaskType taskType, Map config, Set platfromArchitectures) { + return null; + } + + @Override + public Model parsePersistedConfigWithSecrets( + String modelId, + TaskType taskType, + Map config, + Map secrets + ) { + return null; + } + + @Override + public Model parsePersistedConfig(String modelId, TaskType taskType, Map config) { + return null; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersion.current(); + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/MapParsingUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java similarity index 88% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/MapParsingUtilsTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java index 9ff23ea38541d..eb54745806a68 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/MapParsingUtilsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java @@ -14,34 +14,34 @@ import java.util.HashMap; import java.util.Map; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.convertToUri; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.createUri; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.extractOptionalString; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.extractRequiredSecureString; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.extractRequiredString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.convertToUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredSecureString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; -public class MapParsingUtilsTests extends ESTestCase { +public class ServiceUtilsTests extends ESTestCase { public void testRemoveAsTypeWithTheCorrectType() { Map map = new HashMap<>(Map.of("a", 5, "b", "a string", "c", Boolean.TRUE, "d", 1.0)); - Integer i = MapParsingUtils.removeAsType(map, "a", Integer.class); + Integer i = ServiceUtils.removeAsType(map, "a", Integer.class); assertEquals(Integer.valueOf(5), i); assertNull(map.get("a")); // field has been removed - String str = MapParsingUtils.removeAsType(map, "b", String.class); + String str = ServiceUtils.removeAsType(map, "b", String.class); assertEquals("a string", str); assertNull(map.get("b")); - Boolean b = MapParsingUtils.removeAsType(map, "c", Boolean.class); + Boolean b = ServiceUtils.removeAsType(map, "c", Boolean.class); assertEquals(Boolean.TRUE, b); assertNull(map.get("c")); - Double d = MapParsingUtils.removeAsType(map, "d", Double.class); + Double d = ServiceUtils.removeAsType(map, "d", Double.class); assertEquals(Double.valueOf(1.0), d); assertNull(map.get("d")); @@ -51,20 +51,20 @@ public void testRemoveAsTypeWithTheCorrectType() { public void testRemoveAsTypeWithInCorrectType() { Map map = new HashMap<>(Map.of("a", 5, "b", "a string", "c", Boolean.TRUE, "d", 5.0, "e", 5)); - var e = expectThrows(ElasticsearchStatusException.class, () -> MapParsingUtils.removeAsType(map, "a", String.class)); + var e = expectThrows(ElasticsearchStatusException.class, () -> ServiceUtils.removeAsType(map, "a", String.class)); assertThat( e.getMessage(), containsString("field [a] is not of the expected type. The value [5] cannot be converted to a [String]") ); - e = expectThrows(ElasticsearchStatusException.class, () -> MapParsingUtils.removeAsType(map, "b", Boolean.class)); + e = expectThrows(ElasticsearchStatusException.class, () -> ServiceUtils.removeAsType(map, "b", Boolean.class)); assertThat( e.getMessage(), containsString("field [b] is not of the expected type. The value [a string] cannot be converted to a [Boolean]") ); assertNull(map.get("b")); - e = expectThrows(ElasticsearchStatusException.class, () -> MapParsingUtils.removeAsType(map, "c", Integer.class)); + e = expectThrows(ElasticsearchStatusException.class, () -> ServiceUtils.removeAsType(map, "c", Integer.class)); assertThat( e.getMessage(), containsString("field [c] is not of the expected type. The value [true] cannot be converted to a [Integer]") @@ -72,7 +72,7 @@ public void testRemoveAsTypeWithInCorrectType() { assertNull(map.get("c")); // cannot convert double to integer - e = expectThrows(ElasticsearchStatusException.class, () -> MapParsingUtils.removeAsType(map, "d", Integer.class)); + e = expectThrows(ElasticsearchStatusException.class, () -> ServiceUtils.removeAsType(map, "d", Integer.class)); assertThat( e.getMessage(), containsString("field [d] is not of the expected type. The value [5.0] cannot be converted to a [Integer]") @@ -80,7 +80,7 @@ public void testRemoveAsTypeWithInCorrectType() { assertNull(map.get("d")); // cannot convert integer to double - e = expectThrows(ElasticsearchStatusException.class, () -> MapParsingUtils.removeAsType(map, "e", Double.class)); + e = expectThrows(ElasticsearchStatusException.class, () -> ServiceUtils.removeAsType(map, "e", Double.class)); assertThat( e.getMessage(), containsString("field [e] is not of the expected type. The value [5] cannot be converted to a [Double]") @@ -92,7 +92,7 @@ public void testRemoveAsTypeWithInCorrectType() { public void testRemoveAsTypeMissingReturnsNull() { Map map = new HashMap<>(Map.of("a", 5, "b", "a string", "c", Boolean.TRUE)); - assertNull(MapParsingUtils.removeAsType(new HashMap<>(), "missing", Integer.class)); + assertNull(ServiceUtils.removeAsType(new HashMap<>(), "missing", Integer.class)); assertThat(map.entrySet(), hasSize(3)); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/Utils.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/Utils.java new file mode 100644 index 0000000000000..59abda79abad0 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/Utils.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services; + +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class Utils { + public static Model getInvalidModel(String modelId, String serviceName) { + var mockConfigs = mock(ModelConfigurations.class); + when(mockConfigs.getModelId()).thenReturn(modelId); + when(mockConfigs.getService()).thenReturn(serviceName); + + var mockModel = mock(Model.class); + when(mockModel.getConfigurations()).thenReturn(mockConfigs); + + return mockModel; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java new file mode 100644 index 0000000000000..d5eb6e76b622b --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java @@ -0,0 +1,111 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface; + +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.elasticsearch.xpack.inference.services.Utils.getInvalidModel; +import static org.hamcrest.CoreMatchers.is; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +public class HuggingFaceBaseServiceTests extends ESTestCase { + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + + private ThreadPool threadPool; + + @Before + public void init() throws Exception { + threadPool = createThreadPool(inferenceUtilityPool()); + } + + @After + public void shutdown() throws IOException { + terminate(threadPool); + } + + public void testInfer_ThrowsErrorWhenModelIsNotHuggingFaceModel() throws IOException { + var sender = mock(Sender.class); + + var factory = mock(HttpRequestSenderFactory.class); + when(factory.createSender(anyString())).thenReturn(sender); + + var mockModel = getInvalidModel("model_id", "service_name"); + + try (var service = new TestService(new SetOnce<>(factory), new SetOnce<>(createWithEmptySettings(threadPool)))) { + PlainActionFuture listener = new PlainActionFuture<>(); + service.infer(mockModel, List.of(""), new HashMap<>(), listener); + + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + thrownException.getMessage(), + is("The internal model was invalid, please delete the service [service_name] with id [model_id] and add it again.") + ); + + verify(factory, times(1)).createSender(anyString()); + verify(sender, times(1)).start(); + } + + verify(sender, times(1)).close(); + verifyNoMoreInteractions(factory); + verifyNoMoreInteractions(sender); + } + + private static final class TestService extends HuggingFaceBaseService { + + TestService(SetOnce factory, SetOnce serviceComponents) { + super(factory, serviceComponents); + } + + @Override + public String name() { + return "test"; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersion.current(); + } + + @Override + protected HuggingFaceModel createModel( + String modelId, + TaskType taskType, + Map serviceSettings, + Map secretSettings, + String failureMessage + ) { + return null; + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java new file mode 100644 index 0000000000000..001d869f67a5c --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; + +public class HuggingFaceServiceSettingsTests extends AbstractWireSerializingTestCase { + + public static HuggingFaceServiceSettings createRandom() { + return new HuggingFaceServiceSettings(randomAlphaOfLength(15)); + } + + public void testFromMap() { + var url = "https://www.abc.com"; + var serviceSettings = HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(HuggingFaceServiceSettings.URL, url))); + + assertThat(serviceSettings, is(new HuggingFaceServiceSettings(url))); + } + + public void testFromMap_MissingUrl_ThrowsError() { + var thrownException = expectThrows(ValidationException.class, () -> HuggingFaceServiceSettings.fromMap(new HashMap<>())); + + assertThat( + thrownException.getMessage(), + containsString( + Strings.format( + "Validation Failed: 1: [service_settings] does not contain the required setting [%s];", + HuggingFaceServiceSettings.URL + ) + ) + ); + } + + public void testFromMap_EmptyUrl_ThrowsError() { + var thrownException = expectThrows( + ValidationException.class, + () -> HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(HuggingFaceServiceSettings.URL, ""))) + ); + + assertThat( + thrownException.getMessage(), + containsString( + Strings.format( + "Validation Failed: 1: [service_settings] Invalid value empty string. [%s] must be a non-empty string;", + HuggingFaceServiceSettings.URL + ) + ) + ); + } + + public void testFromMap_InvalidUrl_ThrowsError() { + var url = "https://www.abc^.com"; + var thrownException = expectThrows( + ValidationException.class, + () -> HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(HuggingFaceServiceSettings.URL, url))) + ); + + assertThat( + thrownException.getMessage(), + is( + Strings.format( + "Validation Failed: 1: [service_settings] Invalid url [%s] received for field [%s];", + url, + HuggingFaceServiceSettings.URL + ) + ) + ); + } + + @Override + protected Writeable.Reader instanceReader() { + return HuggingFaceServiceSettings::new; + } + + @Override + protected HuggingFaceServiceSettings createTestInstance() { + return createRandom(); + } + + @Override + protected HuggingFaceServiceSettings mutateInstance(HuggingFaceServiceSettings instance) throws IOException { + return createRandom(); + } + + public static Map getServiceSettingsMap(String url) { + var map = new HashMap(); + + map.put(HuggingFaceServiceSettings.URL, url); + + return map; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java new file mode 100644 index 0000000000000..fa31d026b16f5 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java @@ -0,0 +1,589 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface; + +import org.apache.http.HttpHeaders; +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.http.MockResponse; +import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.elasticsearch.xpack.inference.results.SparseEmbeddingResultsTests; +import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModel; +import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModelTests; +import org.elasticsearch.xpack.inference.services.huggingface.embeddings.HuggingFaceEmbeddingsModel; +import org.elasticsearch.xpack.inference.services.huggingface.embeddings.HuggingFaceEmbeddingsModelTests; +import org.hamcrest.Matchers; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; +import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectation; +import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettingsTests.getServiceSettingsMap; +import static org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettingsTests.getSecretSettingsMap; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.mockito.Mockito.mock; + +public class HuggingFaceServiceTests extends ESTestCase { + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + + private final MockWebServer webServer = new MockWebServer(); + private ThreadPool threadPool; + private HttpClientManager clientManager; + + @Before + public void init() throws Exception { + webServer.start(); + threadPool = createThreadPool(inferenceUtilityPool()); + clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); + } + + @After + public void shutdown() throws IOException { + clientManager.close(); + terminate(threadPool); + webServer.close(); + } + + public void testParseRequestConfig_CreatesAnEmbeddingsModel() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var model = service.parseRequestConfig( + "id", + TaskType.TEXT_EMBEDDING, + getRequestConfigMap(getServiceSettingsMap("url"), getSecretSettingsMap("secret")), + Set.of() + ); + + assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); + + var embeddingsModel = (HuggingFaceEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParseRequestConfig_CreatesAnElserModel() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var model = service.parseRequestConfig( + "id", + TaskType.SPARSE_EMBEDDING, + getRequestConfigMap(getServiceSettingsMap("url"), getSecretSettingsMap("secret")), + Set.of() + ); + + assertThat(model, instanceOf(HuggingFaceElserModel.class)); + + var embeddingsModel = (HuggingFaceElserModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInConfig() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var config = getRequestConfigMap(getServiceSettingsMap("url"), getSecretSettingsMap("secret")); + config.put("extra_key", "value"); + + var thrownException = expectThrows( + ElasticsearchStatusException.class, + () -> service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of()) + ); + + assertThat( + thrownException.getMessage(), + is("Model configuration contains settings [{extra_key=value}] unknown to the [hugging_face] service") + ); + } + } + + public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInServiceSettingsMap() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var serviceSettings = getServiceSettingsMap("url"); + serviceSettings.put("extra_key", "value"); + + var config = getRequestConfigMap(serviceSettings, getSecretSettingsMap("secret")); + + var thrownException = expectThrows( + ElasticsearchStatusException.class, + () -> service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of()) + ); + + assertThat( + thrownException.getMessage(), + is("Model configuration contains settings [{extra_key=value}] unknown to the [hugging_face] service") + ); + } + } + + public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInSecretSettingsMap() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var secretSettingsMap = getSecretSettingsMap("secret"); + secretSettingsMap.put("extra_key", "value"); + + var config = getRequestConfigMap(getServiceSettingsMap("url"), secretSettingsMap); + + var thrownException = expectThrows( + ElasticsearchStatusException.class, + () -> service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of()) + ); + + assertThat( + thrownException.getMessage(), + is("Model configuration contains settings [{extra_key=value}] unknown to the [hugging_face] service") + ); + } + } + + public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModel() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), getSecretSettingsMap("secret")); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); + + var embeddingsModel = (HuggingFaceEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_CreatesAnElserModel() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), getSecretSettingsMap("secret")); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.SPARSE_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(HuggingFaceElserModel.class)); + + var embeddingsModel = (HuggingFaceElserModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInConfig() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), getSecretSettingsMap("secret")); + persistedConfig.config().put("extra_key", "value"); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); + + var embeddingsModel = (HuggingFaceEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInSecretsSettings() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var secretSettingsMap = getSecretSettingsMap("secret"); + secretSettingsMap.put("extra_key", "value"); + + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), secretSettingsMap); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); + + var embeddingsModel = (HuggingFaceEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInSecrets() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), getSecretSettingsMap("secret")); + persistedConfig.secrets.put("extra_key", "value"); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); + + var embeddingsModel = (HuggingFaceEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInServiceSettings() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var serviceSettingsMap = getServiceSettingsMap("url"); + serviceSettingsMap.put("extra_key", "value"); + + var persistedConfig = getPersistedConfigMap(serviceSettingsMap, getSecretSettingsMap("secret")); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); + + var embeddingsModel = (HuggingFaceEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInTaskSettings() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var taskSettingsMap = new HashMap(); + taskSettingsMap.put("extra_key", "value"); + + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), taskSettingsMap, getSecretSettingsMap("secret")); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); + + var embeddingsModel = (HuggingFaceEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfig_CreatesAnEmbeddingsModel() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url")); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); + + var embeddingsModel = (HuggingFaceEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertNull(embeddingsModel.getSecretSettings()); + } + } + + public void testParsePersistedConfig_CreatesAnElserModel() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url")); + + var model = service.parsePersistedConfig("id", TaskType.SPARSE_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(HuggingFaceElserModel.class)); + + var embeddingsModel = (HuggingFaceElserModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertNull(embeddingsModel.getSecretSettings()); + } + } + + public void testParsePersistedConfig_DoesNotThrowWhenAnExtraKeyExistsInConfig() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url")); + persistedConfig.config().put("extra_key", "value"); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); + + var embeddingsModel = (HuggingFaceEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertNull(embeddingsModel.getSecretSettings()); + } + } + + public void testParsePersistedConfig_DoesNotThrowWhenAnExtraKeyExistsInServiceSettings() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var serviceSettingsMap = getServiceSettingsMap("url"); + serviceSettingsMap.put("extra_key", "value"); + + var persistedConfig = getPersistedConfigMap(serviceSettingsMap); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); + + var embeddingsModel = (HuggingFaceEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertNull(embeddingsModel.getSecretSettings()); + } + } + + public void testParsePersistedConfig_DoesNotThrowWhenAnExtraKeyExistsInTaskSettings() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var taskSettingsMap = new HashMap(); + taskSettingsMap.put("extra_key", "value"); + + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), taskSettingsMap, null); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); + + var embeddingsModel = (HuggingFaceEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertNull(embeddingsModel.getSecretSettings()); + } + } + + public void testInfer_SendsEmbeddingsRequest() throws IOException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var service = new HuggingFaceService(new SetOnce<>(senderFactory), new SetOnce<>(createWithEmptySettings(threadPool)))) { + + String responseJson = """ + { + "embeddings": [ + [ + -0.0123, + 0.0123 + ] + ] + { + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = HuggingFaceEmbeddingsModelTests.createModel(getUrl(webServer), "secret"); + PlainActionFuture listener = new PlainActionFuture<>(); + service.infer(model, List.of("abc"), new HashMap<>(), listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(result.asMap(), Matchers.is(buildExpectation(List.of(List.of(-0.0123F, 0.0123F))))); + assertThat(webServer.requests(), hasSize(1)); + assertNull(webServer.requests().get(0).getUri().getQuery()); + assertThat( + webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), + equalTo(XContentType.JSON.mediaTypeWithoutParameters()) + ); + assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + assertThat(requestMap.size(), Matchers.is(1)); + assertThat(requestMap.get("inputs"), Matchers.is(List.of("abc"))); + } + } + + public void testInfer_SendsElserRequest() throws IOException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var service = new HuggingFaceService(new SetOnce<>(senderFactory), new SetOnce<>(createWithEmptySettings(threadPool)))) { + + String responseJson = """ + [ + { + ".": 0.133155956864357 + } + ] + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = HuggingFaceElserModelTests.createModel(getUrl(webServer), "secret"); + PlainActionFuture listener = new PlainActionFuture<>(); + service.infer(model, List.of("abc"), new HashMap<>(), listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat( + result.asMap(), + Matchers.is( + SparseEmbeddingResultsTests.buildExpectation( + List.of(new SparseEmbeddingResultsTests.EmbeddingExpectation(Map.of(".", 0.13315596f), false)) + ) + ) + ); + assertThat(webServer.requests(), hasSize(1)); + assertNull(webServer.requests().get(0).getUri().getQuery()); + assertThat( + webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), + equalTo(XContentType.JSON.mediaTypeWithoutParameters()) + ); + assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + assertThat(requestMap.size(), Matchers.is(1)); + assertThat(requestMap.get("inputs"), Matchers.is(List.of("abc"))); + } + } + + private Map getRequestConfigMap(Map serviceSettings, Map secretSettings) { + var builtServiceSettings = new HashMap<>(); + builtServiceSettings.putAll(serviceSettings); + builtServiceSettings.putAll(secretSettings); + + return new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, builtServiceSettings)); + } + + private HuggingFaceServiceTests.PeristedConfig getPersistedConfigMap(Map serviceSettings) { + return getPersistedConfigMap(serviceSettings, Map.of(), null); + } + + private HuggingFaceServiceTests.PeristedConfig getPersistedConfigMap( + Map serviceSettings, + @Nullable Map secretSettings + ) { + return getPersistedConfigMap(serviceSettings, Map.of(), secretSettings); + } + + private HuggingFaceServiceTests.PeristedConfig getPersistedConfigMap( + Map serviceSettings, + Map taskSettings, + Map secretSettings + ) { + + var secrets = secretSettings == null ? null : new HashMap(Map.of(ModelSecrets.SECRET_SETTINGS, secretSettings)); + + return new HuggingFaceServiceTests.PeristedConfig( + new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, serviceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)), + secrets + ); + } + + private record PeristedConfig(Map config, Map secrets) {} +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModelTests.java new file mode 100644 index 0000000000000..89ad9fd5543df --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModelTests.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface.elser; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.is; + +public class HuggingFaceElserModelTests extends ESTestCase { + + public void testThrowsURISyntaxException_ForInvalidUrl() { + var thrownException = expectThrows(IllegalArgumentException.class, () -> createModel("^^", "secret")); + assertThat(thrownException.getMessage(), is("unable to parse url [^^]")); + } + + public static HuggingFaceElserModel createModel(String url, String apiKey) { + return new HuggingFaceElserModel( + "id", + TaskType.SPARSE_EMBEDDING, + "service", + new HuggingFaceElserServiceSettings(url), + new HuggingFaceElserSecretSettings(new SecureString(apiKey.toCharArray())) + ); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettingsTests.java index c3aa628705195..2b8281da8db13 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettingsTests.java @@ -33,6 +33,10 @@ public void testFromMap() { assertThat(new HuggingFaceElserSecretSettings(new SecureString(apiKey.toCharArray())), is(serviceSettings)); } + public void testFromMap_ReturnsNull_WhenMapIsNull() { + assertNull(HuggingFaceElserSecretSettings.fromMap(null)); + } + public void testFromMap_MissingApiKey_ThrowsError() { var thrownException = expectThrows(ValidationException.class, () -> HuggingFaceElserSecretSettings.fromMap(new HashMap<>())); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModelTests.java new file mode 100644 index 0000000000000..6cf70189cea74 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModelTests.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface.embeddings; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; + +import static org.hamcrest.Matchers.is; + +public class HuggingFaceEmbeddingsModelTests extends ESTestCase { + + public void testThrowsURISyntaxException_ForInvalidUrl() { + var thrownException = expectThrows(IllegalArgumentException.class, () -> createModel("^^", "secret")); + assertThat(thrownException.getMessage(), is("unable to parse url [^^]")); + } + + public static HuggingFaceEmbeddingsModel createModel(String url, String apiKey) { + return new HuggingFaceEmbeddingsModel( + "id", + TaskType.TEXT_EMBEDDING, + "service", + new HuggingFaceServiceSettings(url), + new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) + ); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiResponseHandlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiResponseHandlerTests.java deleted file mode 100644 index cdef3914ec7c7..0000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiResponseHandlerTests.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.services.openai; - -import org.apache.http.HttpResponse; -import org.apache.http.StatusLine; -import org.apache.http.client.methods.HttpRequestBase; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.inference.external.http.HttpResult; -import org.elasticsearch.xpack.inference.external.http.retry.RetryException; - -import static org.hamcrest.Matchers.containsString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class OpenAiResponseHandlerTests extends ESTestCase { - - public void testCheckForFailureStatusCode() { - var statusLine = mock(StatusLine.class); - when(statusLine.getStatusCode()).thenReturn(200).thenReturn(503).thenReturn(429).thenReturn(401).thenReturn(300).thenReturn(402); - - var httpResponse = mock(HttpResponse.class); - when(httpResponse.getStatusLine()).thenReturn(statusLine); - - var httpRequest = mock(HttpRequestBase.class); - - var httpResult = new HttpResult(httpResponse, new byte[] {}); - - // 200 ok - OpenAiResponseHandler.checkForFailureStatusCode(httpRequest, httpResult); - // 503 - var retryException = expectThrows( - RetryException.class, - () -> OpenAiResponseHandler.checkForFailureStatusCode(httpRequest, httpResult) - ); - assertFalse(retryException.shouldRetry()); - assertThat(retryException.getMessage(), containsString("Received a server error status code for request [null] status [503]")); - // 429 - retryException = expectThrows(RetryException.class, () -> OpenAiResponseHandler.checkForFailureStatusCode(httpRequest, httpResult)); - assertFalse(retryException.shouldRetry()); - assertThat(retryException.getMessage(), containsString("Received a rate limit status code for request [null] status [429]")); - // 401 - retryException = expectThrows(RetryException.class, () -> OpenAiResponseHandler.checkForFailureStatusCode(httpRequest, httpResult)); - assertFalse(retryException.shouldRetry()); - assertThat( - retryException.getMessage(), - containsString("Received a authentication error status code for request [null] status [401]") - ); - // 300 - retryException = expectThrows(RetryException.class, () -> OpenAiResponseHandler.checkForFailureStatusCode(httpRequest, httpResult)); - assertFalse(retryException.shouldRetry()); - assertThat(retryException.getMessage(), containsString("Unhandled redirection for request [null] status [300]")); - // 402 - retryException = expectThrows(RetryException.class, () -> OpenAiResponseHandler.checkForFailureStatusCode(httpRequest, httpResult)); - assertFalse(retryException.shouldRetry()); - assertThat(retryException.getMessage(), containsString("Received an unsuccessful status code for request [null] status [402]")); - } -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettingsTests.java index 9fbcc3bec7a60..9e20286c1d0ff 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettingsTests.java @@ -126,7 +126,6 @@ protected OpenAiServiceSettings mutateInstance(OpenAiServiceSettings instance) t } public static Map getServiceSettingsMap(@Nullable String url, @Nullable String org) { - var map = new HashMap(); if (url != null) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index 0d57e90dcd31b..a82600c537663 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; -import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.TaskType; @@ -48,6 +47,7 @@ import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.ORGANIZATION_HEADER; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectation; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.elasticsearch.xpack.inference.services.Utils.getInvalidModel; import static org.elasticsearch.xpack.inference.services.openai.OpenAiServiceSettingsTests.getServiceSettingsMap; import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsTaskSettingsTests.getTaskSettingsMap; import static org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettingsTests.getSecretSettingsMap; @@ -260,7 +260,7 @@ public void testParseRequestConfig_CreatesAnOpenAiEmbeddingsModelWithoutUserUrlO } } - public void testParsePersistedConfig_CreatesAnOpenAiEmbeddingsModel() throws IOException { + public void testParsePersistedConfigWithSecrets_CreatesAnOpenAiEmbeddingsModel() throws IOException { try ( var service = new OpenAiService( new SetOnce<>(mock(HttpRequestSenderFactory.class)), @@ -291,7 +291,7 @@ public void testParsePersistedConfig_CreatesAnOpenAiEmbeddingsModel() throws IOE } } - public void testParsePersistedConfig_ThrowsErrorTryingToParseInvalidModel() throws IOException { + public void testParsePersistedConfigWithSecrets_ThrowsErrorTryingToParseInvalidModel() throws IOException { try ( var service = new OpenAiService( new SetOnce<>(mock(HttpRequestSenderFactory.class)), @@ -321,7 +321,7 @@ public void testParsePersistedConfig_ThrowsErrorTryingToParseInvalidModel() thro } } - public void testParsePersistedConfig_CreatesAnOpenAiEmbeddingsModelWithoutUserUrlOrganization() throws IOException { + public void testParsePersistedConfigWithSecrets_CreatesAnOpenAiEmbeddingsModelWithoutUserUrlOrganization() throws IOException { try ( var service = new OpenAiService( new SetOnce<>(mock(HttpRequestSenderFactory.class)), @@ -352,7 +352,7 @@ public void testParsePersistedConfig_CreatesAnOpenAiEmbeddingsModelWithoutUserUr } } - public void testParsePersistedConfig_ThrowsWhenAnExtraKeyExistsInConfig() throws IOException { + public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInConfig() throws IOException { try ( var service = new OpenAiService( new SetOnce<>(mock(HttpRequestSenderFactory.class)), @@ -366,24 +366,25 @@ public void testParsePersistedConfig_ThrowsWhenAnExtraKeyExistsInConfig() throws ); persistedConfig.config().put("extra_key", "value"); - var thrownException = expectThrows( - ElasticsearchStatusException.class, - () -> service.parsePersistedConfigWithSecrets( - "id", - TaskType.TEXT_EMBEDDING, - persistedConfig.config(), - persistedConfig.secrets() - ) + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() ); - assertThat( - thrownException.getMessage(), - is("Model configuration contains settings [{extra_key=value}] unknown to the [openai] service") - ); + assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); + + var embeddingsModel = (OpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); + assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } } - public void testParsePersistedConfig_ThrowsWhenAnExtraKeyExistsInSecretsSettings() throws IOException { + public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInSecretsSettings() throws IOException { try ( var service = new OpenAiService( new SetOnce<>(mock(HttpRequestSenderFactory.class)), @@ -399,24 +400,25 @@ public void testParsePersistedConfig_ThrowsWhenAnExtraKeyExistsInSecretsSettings secretSettingsMap ); - var thrownException = expectThrows( - ElasticsearchStatusException.class, - () -> service.parsePersistedConfigWithSecrets( - "id", - TaskType.TEXT_EMBEDDING, - persistedConfig.config(), - persistedConfig.secrets() - ) + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() ); - assertThat( - thrownException.getMessage(), - is("Model configuration contains settings [{extra_key=value}] unknown to the [openai] service") - ); + assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); + + var embeddingsModel = (OpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); + assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } } - public void testParsePersistedConfig_ThrowsWhenAnExtraKeyExistsInSecrets() throws IOException { + public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInSecrets() throws IOException { try ( var service = new OpenAiService( new SetOnce<>(mock(HttpRequestSenderFactory.class)), @@ -430,24 +432,25 @@ public void testParsePersistedConfig_ThrowsWhenAnExtraKeyExistsInSecrets() throw ); persistedConfig.secrets.put("extra_key", "value"); - var thrownException = expectThrows( - ElasticsearchStatusException.class, - () -> service.parsePersistedConfigWithSecrets( - "id", - TaskType.TEXT_EMBEDDING, - persistedConfig.config(), - persistedConfig.secrets() - ) + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() ); - assertThat( - thrownException.getMessage(), - is("Model configuration contains settings [{extra_key=value}] unknown to the [openai] service") - ); + assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); + + var embeddingsModel = (OpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); + assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } } - public void testParsePersistedConfig_ThrowsWhenAnExtraKeyExistsInServiceSettings() throws IOException { + public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInServiceSettings() throws IOException { try ( var service = new OpenAiService( new SetOnce<>(mock(HttpRequestSenderFactory.class)), @@ -463,24 +466,25 @@ public void testParsePersistedConfig_ThrowsWhenAnExtraKeyExistsInServiceSettings getSecretSettingsMap("secret") ); - var thrownException = expectThrows( - ElasticsearchStatusException.class, - () -> service.parsePersistedConfigWithSecrets( - "id", - TaskType.TEXT_EMBEDDING, - persistedConfig.config(), - persistedConfig.secrets() - ) + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() ); - assertThat( - thrownException.getMessage(), - is("Model configuration contains settings [{extra_key=value}] unknown to the [openai] service") - ); + assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); + + var embeddingsModel = (OpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); + assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } } - public void testParsePersistedConfig_ThrowsWhenAnExtraKeyExistsInTaskSettings() throws IOException { + public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInTaskSettings() throws IOException { try ( var service = new OpenAiService( new SetOnce<>(mock(HttpRequestSenderFactory.class)), @@ -496,64 +500,160 @@ public void testParsePersistedConfig_ThrowsWhenAnExtraKeyExistsInTaskSettings() getSecretSettingsMap("secret") ); + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); + + var embeddingsModel = (OpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); + assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfig_CreatesAnOpenAiEmbeddingsModel() throws IOException { + try ( + var service = new OpenAiService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url", "org"), getTaskSettingsMap("model", "user")); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); + + var embeddingsModel = (OpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); + assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertNull(embeddingsModel.getSecretSettings()); + } + } + + public void testParsePersistedConfig_ThrowsErrorTryingToParseInvalidModel() throws IOException { + try ( + var service = new OpenAiService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url", "org"), getTaskSettingsMap("model", "user")); + var thrownException = expectThrows( ElasticsearchStatusException.class, - () -> service.parsePersistedConfigWithSecrets( - "id", - TaskType.TEXT_EMBEDDING, - persistedConfig.config(), - persistedConfig.secrets() - ) + () -> service.parsePersistedConfig("id", TaskType.SPARSE_EMBEDDING, persistedConfig.config()) ); assertThat( thrownException.getMessage(), - is("Model configuration contains settings [{extra_key=value}] unknown to the [openai] service") + is("Failed to parse stored model [id] for [openai] service, please delete and add the service again") ); } } - public void testStart_InitializesTheSender() throws IOException { - var sender = mock(Sender.class); + public void testParsePersistedConfig_CreatesAnOpenAiEmbeddingsModelWithoutUserUrlOrganization() throws IOException { + try ( + var service = new OpenAiService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap(null, null), getTaskSettingsMap("model", null)); - var factory = mock(HttpRequestSenderFactory.class); - when(factory.createSender(anyString())).thenReturn(sender); + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); - try (var service = new OpenAiService(new SetOnce<>(factory), new SetOnce<>(createWithEmptySettings(threadPool)))) { - PlainActionFuture listener = new PlainActionFuture<>(); - service.start(mock(Model.class), listener); + assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); - listener.actionGet(TIMEOUT); - verify(sender, times(1)).start(); - verify(factory, times(1)).createSender(anyString()); + var embeddingsModel = (OpenAiEmbeddingsModel) model; + assertNull(embeddingsModel.getServiceSettings().uri()); + assertNull(embeddingsModel.getServiceSettings().organizationId()); + assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertNull(embeddingsModel.getTaskSettings().user()); + assertNull(embeddingsModel.getSecretSettings()); } + } - verify(sender, times(1)).close(); - verifyNoMoreInteractions(factory); - verifyNoMoreInteractions(sender); + public void testParsePersistedConfig_DoesNotThrowWhenAnExtraKeyExistsInConfig() throws IOException { + try ( + var service = new OpenAiService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url", "org"), getTaskSettingsMap("model", "user")); + persistedConfig.config().put("extra_key", "value"); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); + + var embeddingsModel = (OpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); + assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertNull(embeddingsModel.getSecretSettings()); + } } - public void testStart_CallingStartTwiceKeepsSameSenderReference() throws IOException { - var sender = mock(Sender.class); + public void testParsePersistedConfig_NotThrowWhenAnExtraKeyExistsInServiceSettings() throws IOException { + try ( + var service = new OpenAiService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var serviceSettingsMap = getServiceSettingsMap("url", "org"); + serviceSettingsMap.put("extra_key", "value"); - var factory = mock(HttpRequestSenderFactory.class); - when(factory.createSender(anyString())).thenReturn(sender); + var persistedConfig = getPersistedConfigMap(serviceSettingsMap, getTaskSettingsMap("model", "user")); - try (var service = new OpenAiService(new SetOnce<>(factory), new SetOnce<>(createWithEmptySettings(threadPool)))) { - PlainActionFuture listener = new PlainActionFuture<>(); - service.start(mock(Model.class), listener); - listener.actionGet(TIMEOUT); + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); - service.start(mock(Model.class), listener); - listener.actionGet(TIMEOUT); + assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); - verify(factory, times(1)).createSender(anyString()); - verify(sender, times(2)).start(); + var embeddingsModel = (OpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); + assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertNull(embeddingsModel.getSecretSettings()); } + } - verify(sender, times(1)).close(); - verifyNoMoreInteractions(factory); - verifyNoMoreInteractions(sender); + public void testParsePersistedConfig_NotThrowWhenAnExtraKeyExistsInTaskSettings() throws IOException { + try ( + var service = new OpenAiService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var taskSettingsMap = getTaskSettingsMap("model", "user"); + taskSettingsMap.put("extra_key", "value"); + + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url", "org"), taskSettingsMap); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); + + var embeddingsModel = (OpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); + assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertNull(embeddingsModel.getSecretSettings()); + } } public void testInfer_ThrowsErrorWhenModelIsNotOpenAiModel() throws IOException { @@ -653,23 +753,12 @@ public void testInfer_UnauthorisedResponse() throws IOException { service.infer(model, List.of("abc"), new HashMap<>(), listener); var error = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); - assertThat(error.getMessage(), containsString("Received a authentication error status code for request")); + assertThat(error.getMessage(), containsString("Received an authentication error status code for request")); assertThat(error.getMessage(), containsString("Error message: [Incorrect API key provided:]")); assertThat(webServer.requests(), hasSize(1)); } } - private static Model getInvalidModel(String modelId, String serviceName) { - var mockConfigs = mock(ModelConfigurations.class); - when(mockConfigs.getModelId()).thenReturn(modelId); - when(mockConfigs.getService()).thenReturn(serviceName); - - var mockModel = mock(Model.class); - when(mockModel.getConfigurations()).thenReturn(mockConfigs); - - return mockModel; - } - private Map getRequestConfigMap( Map serviceSettings, Map taskSettings, @@ -696,5 +785,13 @@ private PeristedConfig getPersistedConfigMap( ); } + private PeristedConfig getPersistedConfigMap(Map serviceSettings, Map taskSettings) { + + return new PeristedConfig( + new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, serviceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)), + null + ); + } + private record PeristedConfig(Map config, Map secrets) {} } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettingsTests.java index 2fd952fbbdda4..bd7a3ef4dcf03 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettingsTests.java @@ -33,6 +33,10 @@ public void testFromMap() { assertThat(new DefaultSecretSettings(new SecureString(apiKey.toCharArray())), is(serviceSettings)); } + public void testFromMap_ReturnsNull_WhenMapIsNull() { + assertNull(DefaultSecretSettings.fromMap(null)); + } + public void testFromMap_MissingApiKey_ThrowsError() { var thrownException = expectThrows(ValidationException.class, () -> DefaultSecretSettings.fromMap(new HashMap<>())); From fcd923902d6395455e179b7df5a4e705240cf2a8 Mon Sep 17 00:00:00 2001 From: Volodymyr Krasnikov <129072588+volodk85@users.noreply.github.com> Date: Fri, 1 Dec 2023 12:32:44 -0800 Subject: [PATCH 104/181] Enable debug logging on the test (#102793) On a successful executions test expects to get exception `expectThrows(SearchPhaseExecutionException.class, () -> client().search(request).actionGet());` which is thrown here, line L401 https://github.com/elastic/elasticsearch/blob/b18b5cba60540b6335ba9c69b1ec96b50f2a082a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java#L395-L401 Apparently the condition `if (shardSearchFailures.length == getNumShards())` does not hold sometimes and execution thread passes through to the next search phase. This PR enables debug logging to get more insights on possible root cause. Also, it fixes test code, there were missing assignments of `searchShardsResponse` sothat entire `if` block was never assessed. Related to #97878 --- ...pshotsCanMatchOnCoordinatorIntegTests.java | 28 ++++++++++++++----- 1 file changed, 21 insertions(+), 7 deletions(-) diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java index 32c031f80177d..844e6099460b2 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.junit.annotations.TestIssueLogging; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xpack.core.searchablesnapshots.MountSearchableSnapshotAction; @@ -559,6 +560,10 @@ public void testQueryPhaseIsExecutedInAnAvailableNodeWhenAllShardsCanBeSkipped() * Can match against searchable snapshots is tested via both the Search API and the SearchShards (transport-only) API. * The latter is a way to do only a can-match rather than all search phases. */ + @TestIssueLogging( + issueUrl = "https://github.com/elastic/elasticsearch/issues/97878", + value = "org.elasticsearch.snapshots:DEBUG,org.elasticsearch.indices.recovery:DEBUG,org.elasticsearch.action.search:DEBUG" + ) public void testSearchableSnapshotShardsThatHaveMatchingDataAreNotSkippedOnTheCoordinatingNode() throws Exception { internalCluster().startMasterOnlyNode(); internalCluster().startCoordinatingOnlyNode(Settings.EMPTY); @@ -622,7 +627,18 @@ public void testSearchableSnapshotShardsThatHaveMatchingDataAreNotSkippedOnTheCo // All shards failed, since all shards are unassigned and the IndexMetadata min/max timestamp // is not available yet - expectThrows(SearchPhaseExecutionException.class, () -> client().search(request).actionGet()); + expectThrows(SearchPhaseExecutionException.class, () -> { + SearchResponse response = client().search(request).actionGet(); + logger.info( + "[TEST DEBUG INFO] Search hits: {} Successful shards: {}, failed shards: {}, skipped shards: {}, total shards: {}", + response.getHits().getTotalHits().value, + response.getSuccessfulShards(), + response.getFailedShards(), + response.getSkippedShards(), + response.getTotalShards() + ); + fail("This search call is expected to throw an exception but it did not"); + }); // test with SearchShards API boolean allowPartialSearchResults = false; @@ -639,15 +655,13 @@ public void testSearchableSnapshotShardsThatHaveMatchingDataAreNotSkippedOnTheCo { SearchShardsResponse searchShardsResponse = null; try { - client().execute(TransportSearchShardsAction.TYPE, searchShardsRequest).actionGet(); + searchShardsResponse = client().execute(TransportSearchShardsAction.TYPE, searchShardsRequest).actionGet(); } catch (SearchPhaseExecutionException e) { // ignore as this is expected to happen } if (searchShardsResponse != null) { - if (searchShardsResponse != null) { - for (SearchShardsGroup group : searchShardsResponse.getGroups()) { - assertFalse("no shard should be marked as skipped", group.skipped()); - } + for (SearchShardsGroup group : searchShardsResponse.getGroups()) { + assertFalse("no shard should be marked as skipped", group.skipped()); } } } @@ -680,7 +694,7 @@ public void testSearchableSnapshotShardsThatHaveMatchingDataAreNotSkippedOnTheCo { SearchShardsResponse searchShardsResponse = null; try { - client().execute(TransportSearchShardsAction.TYPE, searchShardsRequest).actionGet(); + searchShardsResponse = client().execute(TransportSearchShardsAction.TYPE, searchShardsRequest).actionGet(); } catch (SearchPhaseExecutionException e) { // ignore as this is expected to happen } From 76a6dd618b180f3873130791307b47d3faef8fe6 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 1 Dec 2023 14:49:28 -0800 Subject: [PATCH 105/181] New QA module for heap attack tests (#102833) The heap attack test suite is very special. It deliberately tries to take down Elasticsearch testing instances. When one of Elasticsearch testing instance is terminated, other tests will fail. This PR avoids such noise by adding a new QA module for only heap attack tests. --- .../internal/RestrictedBuildApiService.java | 1 + .../esql/qa/server/heap-attack/build.gradle | 19 ++++++++++++++ .../esql/qa/heap_attack}/HeapAttackIT.java | 25 +++++++++---------- .../esql/qa/server/multi-node/build.gradle | 2 +- 4 files changed, 33 insertions(+), 14 deletions(-) create mode 100644 x-pack/plugin/esql/qa/server/heap-attack/build.gradle rename x-pack/plugin/esql/qa/server/{single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node => heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/heap_attack}/HeapAttackIT.java (97%) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java index cafa02941d77c..2d5dc65a43fae 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java @@ -124,6 +124,7 @@ private static ListMultimap, String> createLegacyRestTestBasePluginUsag map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:eql:qa:correctness"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:eql:qa:mixed-node"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:esql:qa:security"); + map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:esql:qa:server:heap-attack"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:esql:qa:server:multi-node"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:esql:qa:server:single-node"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:fleet:qa:rest"); diff --git a/x-pack/plugin/esql/qa/server/heap-attack/build.gradle b/x-pack/plugin/esql/qa/server/heap-attack/build.gradle new file mode 100644 index 0000000000000..de88fdecf2b14 --- /dev/null +++ b/x-pack/plugin/esql/qa/server/heap-attack/build.gradle @@ -0,0 +1,19 @@ +apply plugin: 'elasticsearch.legacy-yaml-rest-test' + +dependencies { + javaRestTestImplementation project(xpackModule('esql:qa:testFixtures')) +} + +restResources { + restApi { + include '_common', 'bulk', 'indices', 'esql', 'xpack', 'enrich' + } +} + +testClusters.configureEach { + numberOfNodes = 1 + testDistribution = 'DEFAULT' + setting 'xpack.license.self_generated.type', 'trial' + setting 'xpack.monitoring.collection.enabled', 'true' + setting 'xpack.security.enabled', 'false' +} diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/HeapAttackIT.java b/x-pack/plugin/esql/qa/server/heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/heap_attack/HeapAttackIT.java similarity index 97% rename from x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/HeapAttackIT.java rename to x-pack/plugin/esql/qa/server/heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/heap_attack/HeapAttackIT.java index 31d0a7646e1b7..2cc13117a299f 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/HeapAttackIT.java +++ b/x-pack/plugin/esql/qa/server/heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/heap_attack/HeapAttackIT.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.qa.single_node; +package org.elasticsearch.xpack.esql.qa.heap_attack; import org.apache.http.client.config.RequestConfig; import org.apache.http.util.EntityUtils; @@ -40,6 +40,7 @@ import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.hamcrest.Matchers.any; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; /** @@ -472,25 +473,23 @@ private void initIndex(String name, String bulk) throws IOException { Request request = new Request("POST", "/" + name + "/_refresh"); Response response = client().performRequest(request); - assertThat( - EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8), - equalTo("{\"_shards\":{\"total\":2,\"successful\":1,\"failed\":0}}") - ); + assertWriteResponse(response); request = new Request("POST", "/" + name + "/_forcemerge"); request.addParameter("max_num_segments", "1"); response = client().performRequest(request); - assertThat( - EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8), - equalTo("{\"_shards\":{\"total\":2,\"successful\":1,\"failed\":0}}") - ); + assertWriteResponse(response); request = new Request("POST", "/" + name + "/_refresh"); response = client().performRequest(request); - assertThat( - EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8), - equalTo("{\"_shards\":{\"total\":2,\"successful\":1,\"failed\":0}}") - ); + assertWriteResponse(response); + } + + @SuppressWarnings("unchecked") + private static void assertWriteResponse(Response response) throws IOException { + Map shards = (Map) entityAsMap(response).get("_shards"); + assertThat((int) shards.get("successful"), greaterThanOrEqualTo(1)); + assertThat(shards.get("failed"), equalTo(0)); } @Before diff --git a/x-pack/plugin/esql/qa/server/multi-node/build.gradle b/x-pack/plugin/esql/qa/server/multi-node/build.gradle index 1b62fdea2671c..300ed4df92bc2 100644 --- a/x-pack/plugin/esql/qa/server/multi-node/build.gradle +++ b/x-pack/plugin/esql/qa/server/multi-node/build.gradle @@ -6,7 +6,7 @@ dependencies { restResources { restApi { - include '_common', 'bulk', 'indices', 'esql', 'xpack', 'enrich' + include '_common', 'bulk', 'indices', 'esql', 'xpack' } } From 624909b2840b76a310c00f4b70644a953d63ab1b Mon Sep 17 00:00:00 2001 From: David Turner Date: Sun, 3 Dec 2023 01:15:49 +0100 Subject: [PATCH 106/181] AwaitsFix for #102899 --- .../index/mapper/vectors/DenseVectorFieldMapperTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index 0b3b4fae82324..2b686ef1cb61d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -768,6 +768,7 @@ public void testMaxDimsByteVector() throws IOException { assertArrayEquals("Parsed vector is not equal to original.", vector, vectorField.vectorValue()); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102899") public void testVectorSimilarity() { assertEquals( VectorSimilarityFunction.COSINE, From 7e24080fb26a88d7b1a0b897ef425317251747d5 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Sun, 3 Dec 2023 02:08:12 +0100 Subject: [PATCH 107/181] Suppress gradle welcome messages (#102898) There are not usefuil for us --- gradle.properties | 1 + 1 file changed, 1 insertion(+) diff --git a/gradle.properties b/gradle.properties index 64cb394206e66..745fb4f9e51ae 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,3 +1,4 @@ +org.gradle.welcome=never org.gradle.warning.mode=none org.gradle.parallel=true # We need to declare --add-exports to make spotless working seamlessly with jdk16 From 9018f58954231358881bd03b36e3d88f92a35db8 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Sun, 3 Dec 2023 19:25:03 +0100 Subject: [PATCH 108/181] Fix failing to release ref-count after resolving ActionListener (#102900) Fixing a couple of spots that I found by making `SearchResponse` actually ref-counted, where we missed decrementing a ref-count after passing a just constructed object to a listener. Added short-cut utility for this to `ActionListener` because this pattern is already all over the place and will become even more common shortly as the search response ref-counting work is progressing. --- .../elasticsearch/action/ActionListener.java | 13 +++++++++ .../elasticsearch/action/ActionRunnable.java | 7 +---- .../search/AbstractSearchAsyncAction.java | 2 +- .../search/SearchScrollAsyncAction.java | 3 +- .../search/TransportMultiSearchAction.java | 11 ++----- .../action/search/TransportSearchAction.java | 6 ++-- .../action/search/ExpandSearchPhaseTests.java | 29 +++++++------------ .../search/FetchLookupFieldsPhaseTests.java | 7 +---- .../search/MultiSearchActionTookTests.java | 26 ++++++++--------- .../SearchQueryThenFetchAsyncActionTests.java | 8 +---- .../xpack/search/AsyncSearchTask.java | 6 ++-- .../GetCcrRestoreFileChunkAction.java | 7 +---- .../compute/OwningChannelActionListener.java | 6 +--- .../persistence/JobResultsProviderTests.java | 14 ++++----- .../action/TransportRollupSearchAction.java | 2 +- 15 files changed, 60 insertions(+), 87 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/ActionListener.java b/server/src/main/java/org/elasticsearch/action/ActionListener.java index b0e18d5ef9b55..5017f0af0007c 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionListener.java +++ b/server/src/main/java/org/elasticsearch/action/ActionListener.java @@ -16,6 +16,7 @@ import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.CheckedRunnable; +import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasable; import java.util.ArrayList; @@ -309,6 +310,18 @@ static void completeWith(ActionListener listener, CheckedSu } } + /** + * Shorthand for resolving given {@code listener} with given {@code response} and decrementing the response's ref count by one + * afterwards. + */ + static void respondAndRelease(ActionListener listener, R response) { + try { + listener.onResponse(response); + } finally { + response.decRef(); + } + } + /** * @return A listener which (if assertions are enabled) wraps around the given delegate and asserts that it is only called once. */ diff --git a/server/src/main/java/org/elasticsearch/action/ActionRunnable.java b/server/src/main/java/org/elasticsearch/action/ActionRunnable.java index 7feabf7e0241f..7c0879941af89 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionRunnable.java +++ b/server/src/main/java/org/elasticsearch/action/ActionRunnable.java @@ -70,12 +70,7 @@ public static ActionRunnable supplyAndDecRef( return wrap(listener, new CheckedConsumer<>() { @Override public void accept(ActionListener l) throws Exception { - var res = supplier.get(); - try { - l.onResponse(res); - } finally { - res.decRef(); - } + ActionListener.respondAndRelease(l, supplier.get()); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index 82c2f020a0962..d821764e788b7 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -700,7 +700,7 @@ public void sendSearchResponse(InternalSearchResponse internalSearchResponse, At searchContextId = null; } } - listener.onResponse(buildSearchResponse(internalSearchResponse, failures, scrollId, searchContextId)); + ActionListener.respondAndRelease(listener, buildSearchResponse(internalSearchResponse, failures, scrollId, searchContextId)); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java index fc1ccfb00d6ce..5681bda8b2741 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java @@ -247,7 +247,8 @@ protected final void sendResponse( if (request.scroll() != null) { scrollId = request.scrollId(); } - listener.onResponse( + ActionListener.respondAndRelease( + listener, new SearchResponse( internalResponse, scrollId, diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java index a7d971069f96d..1fc9bca607285 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java @@ -186,15 +186,10 @@ private void handleResponse(final int responseSlot, final MultiSearchResponse.It } private void finish() { - final var response = new MultiSearchResponse( - responses.toArray(new MultiSearchResponse.Item[responses.length()]), - buildTookInMillis() + ActionListener.respondAndRelease( + listener, + new MultiSearchResponse(responses.toArray(new MultiSearchResponse.Item[responses.length()]), buildTookInMillis()) ); - try { - listener.onResponse(response); - } finally { - response.decRef(); - } } /** diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 38d448a8a9372..9010fa1ea0e75 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -549,8 +549,8 @@ public void onResponse(SearchResponse searchResponse) { searchResponse.isTerminatedEarly(), searchResponse.getNumReducePhases() ); - - listener.onResponse( + ActionListener.respondAndRelease( + listener, new SearchResponse( internalSearchResponse, searchResponse.getScrollId(), @@ -571,7 +571,7 @@ public void onFailure(Exception e) { logCCSError(failure, clusterAlias, skipUnavailable); ccsClusterInfoUpdate(failure, clusters, clusterAlias, skipUnavailable); if (skipUnavailable) { - listener.onResponse(SearchResponse.empty(timeProvider::buildTookInMillis, clusters)); + ActionListener.respondAndRelease(listener, SearchResponse.empty(timeProvider::buildTookInMillis, clusters)); } else { listener.onFailure(wrapRemoteClusterFailure(clusterAlias, e)); } diff --git a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java index 126d09663a169..f8a22ec04fb15 100644 --- a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java @@ -102,15 +102,10 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL mSearchResponses.add(new MultiSearchResponse.Item(mockSearchPhaseContext.searchResponse.get(), null)); } - var response = new MultiSearchResponse( - mSearchResponses.toArray(new MultiSearchResponse.Item[0]), - randomIntBetween(1, 10000) + ActionListener.respondAndRelease( + listener, + new MultiSearchResponse(mSearchResponses.toArray(new MultiSearchResponse.Item[0]), randomIntBetween(1, 10000)) ); - try { - listener.onResponse(response); - } finally { - response.decRef(); - } } }; @@ -170,17 +165,15 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY ); - var response = new MultiSearchResponse( - new MultiSearchResponse.Item[] { - new MultiSearchResponse.Item(null, new RuntimeException("boom")), - new MultiSearchResponse.Item(searchResponse, null) }, - randomIntBetween(1, 10000) + ActionListener.respondAndRelease( + listener, + new MultiSearchResponse( + new MultiSearchResponse.Item[] { + new MultiSearchResponse.Item(null, new RuntimeException("boom")), + new MultiSearchResponse.Item(searchResponse, null) }, + randomIntBetween(1, 10000) + ) ); - try { - listener.onResponse(response); - } finally { - response.decRef(); - } } }; diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java index 215293517a467..38409752c7e7d 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java @@ -119,12 +119,7 @@ void sendExecuteMultiSearch( null ); } - var response = new MultiSearchResponse(responses, randomNonNegativeLong()); - try { - listener.onResponse(response); - } finally { - response.decRef(); - } + ActionListener.respondAndRelease(listener, new MultiSearchResponse(responses, randomNonNegativeLong())); } }; diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java index f1867b223760d..9b1ed6eee1028 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java @@ -147,21 +147,19 @@ public void search(final SearchRequest request, final ActionListener { counter.decrementAndGet(); - var resp = new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, - null, - 0, - 0, - 0, - 0L, - ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY + ActionListener.respondAndRelease( + listener, + new SearchResponse( + InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, + null, + 0, + 0, + 0, + 0L, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY + ) ); - try { - listener.onResponse(resp); - } finally { - resp.decRef(); - } }); } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java index 3097376de7a41..a973fa20851db 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java @@ -157,13 +157,7 @@ public void sendExecuteQuery( queryResult.size(1); successfulOps.incrementAndGet(); queryResult.incRef(); - new Thread(() -> { - try { - listener.onResponse(queryResult); - } finally { - queryResult.decRef(); - } - }).start(); + new Thread(() -> ActionListener.respondAndRelease(listener, queryResult)).start(); } finally { queryResult.decRef(); } diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java index d445a012ecee9..8851d27fb087d 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java @@ -201,7 +201,7 @@ public void addCompletionListener(ActionListener listener, } } if (executeImmediately) { - listener.onResponse(getResponseWithHeaders()); + ActionListener.respondAndRelease(listener, getResponseWithHeaders()); } } @@ -238,7 +238,7 @@ private void internalAddCompletionListener(ActionListener l if (hasRun.compareAndSet(false, true)) { // timeout occurred before completion removeCompletionListener(id); - listener.onResponse(getResponseWithHeaders()); + ActionListener.respondAndRelease(listener, getResponseWithHeaders()); } }, waitForCompletion, threadPool.generic()); } catch (Exception exc) { @@ -255,7 +255,7 @@ private void internalAddCompletionListener(ActionListener l } } if (executeImmediately) { - listener.onResponse(getResponseWithHeaders()); + ActionListener.respondAndRelease(listener, getResponseWithHeaders()); } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/GetCcrRestoreFileChunkAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/GetCcrRestoreFileChunkAction.java index 7aab281f4f7ed..53751343f0783 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/GetCcrRestoreFileChunkAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/GetCcrRestoreFileChunkAction.java @@ -85,12 +85,7 @@ protected void doExecute( try (CcrRestoreSourceService.SessionReader sessionReader = restoreSourceService.getSessionReader(sessionUUID)) { long offsetAfterRead = sessionReader.readFileBytes(fileName, reference); long offsetBeforeRead = offsetAfterRead - reference.length(); - var chunk = new GetCcrRestoreFileChunkResponse(offsetBeforeRead, reference); - try { - listener.onResponse(chunk); - } finally { - chunk.decRef(); - } + ActionListener.respondAndRelease(listener, new GetCcrRestoreFileChunkResponse(offsetBeforeRead, reference)); } } catch (IOException e) { listener.onFailure(e); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/OwningChannelActionListener.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/OwningChannelActionListener.java index 6512d80859163..50a20ee6ee73d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/OwningChannelActionListener.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/OwningChannelActionListener.java @@ -28,11 +28,7 @@ public OwningChannelActionListener(TransportChannel channel) { @Override public void onResponse(Response response) { - try { - listener.onResponse(response); - } finally { - response.decRef(); - } + ActionListener.respondAndRelease(listener, response); } @Override diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java index 0fe693490d466..39f02f71642ed 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java @@ -948,15 +948,13 @@ private Client getMockedClient(Consumer queryBuilderConsumer, Sear queryBuilderConsumer.accept(multiSearchRequest.requests().get(0).source().query()); @SuppressWarnings("unchecked") ActionListener actionListener = (ActionListener) invocationOnMock.getArguments()[1]; - MultiSearchResponse mresponse = new MultiSearchResponse( - new MultiSearchResponse.Item[] { new MultiSearchResponse.Item(response, null) }, - randomNonNegativeLong() + ActionListener.respondAndRelease( + actionListener, + new MultiSearchResponse( + new MultiSearchResponse.Item[] { new MultiSearchResponse.Item(response, null) }, + randomNonNegativeLong() + ) ); - try { - actionListener.onResponse(mresponse); - } finally { - mresponse.decRef(); - } return null; }).when(client).multiSearch(any(), any()); doAnswer(invocationOnMock -> { diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java index 9fe634a178179..ff167c5586dce 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java @@ -143,7 +143,7 @@ public AggregationReduceContext forFinalReduction() { ); } }; - listener.onResponse(processResponses(rollupSearchContext, msearchResponse, reduceContextBuilder)); + ActionListener.respondAndRelease(listener, processResponses(rollupSearchContext, msearchResponse, reduceContextBuilder)); }, listener::onFailure)); } From 759280c75856de312ec7b2a99c219cec4cc61b1b Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Mon, 4 Dec 2023 08:07:45 +0100 Subject: [PATCH 109/181] [Connector API] Implement update last_sync endpoint (#102858) --- .../api/connector.last_sync.json | 39 +++ .../334_connector_update_last_sync_stats.yml | 62 +++++ .../xpack/application/EnterpriseSearch.java | 5 + .../application/connector/Connector.java | 32 ++- .../connector/ConnectorIndexService.java | 51 +++- .../connector/ConnectorSyncInfo.java | 54 ++-- ...estUpdateConnectorLastSyncStatsAction.java | 45 ++++ ...ortUpdateConnectorLastSyncStatsAction.java | 55 ++++ .../UpdateConnectorLastSyncStatsAction.java | 240 ++++++++++++++++++ .../connector/ConnectorIndexServiceTests.java | 48 ++++ .../connector/ConnectorTestUtils.java | 4 +- ...StatsActionRequestBWCSerializingTests.java | 52 ++++ ...tatsActionResponseBWCSerializingTests.java | 43 ++++ .../xpack/security/operator/Constants.java | 1 + 14 files changed, 669 insertions(+), 62 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector.last_sync.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/334_connector_update_last_sync_stats.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSyncStatsAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSyncStatsAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionResponseBWCSerializingTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.last_sync.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.last_sync.json new file mode 100644 index 0000000000000..43b7b078eef58 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.last_sync.json @@ -0,0 +1,39 @@ +{ + "connector.last_sync": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Updates the stats of last sync in the connector document." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/{connector_id}/_last_sync", + "methods": [ + "PUT" + ], + "parts": { + "connector_id": { + "type": "string", + "description": "The unique identifier of the connector to be updated." + } + } + } + ] + }, + "body": { + "description": "Object with stats related to the last connector sync run.", + "required": true + } + } +} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/334_connector_update_last_sync_stats.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/334_connector_update_last_sync_stats.yml new file mode 100644 index 0000000000000..f9989b615bef6 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/334_connector_update_last_sync_stats.yml @@ -0,0 +1,62 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + + - do: + connector.put: + connector_id: test-connector + body: + index_name: search-1-test + name: my-connector + language: pl + is_native: false + service_type: super-connector +--- +"Update Connector Last Sync Stats": + - do: + connector.last_sync: + connector_id: test-connector + body: + last_sync_error: "oh no error" + last_access_control_sync_scheduled_at: "2023-05-25T12:30:00.000Z" + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { last_sync_error: "oh no error" } + - match: { last_access_control_sync_scheduled_at: "2023-05-25T12:30:00.000Z" } + +--- +"Update Connector Last Sync Stats - Connector doesn't exist": + - do: + catch: "missing" + connector.last_sync: + connector_id: test-non-existent-connector + body: + last_sync_error: "oh no error" + last_access_control_sync_scheduled_at: "2023-05-25T12:30:00.000Z" + +--- +"Update Connector Filtering - Wrong datetime expression": + - do: + catch: "bad_request" + connector.last_sync: + connector_id: test-connector + body: + last_access_control_sync_scheduled_at: "this is not a timestamp" + + +--- +"Update Connector Filtering - Wrong status": + - do: + catch: "bad_request" + connector.last_sync: + connector_id: test-connector + body: + last_sync_status: "this is not a valid status" + + diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index 29758c3c334cc..2a53a46760868 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -52,6 +52,7 @@ import org.elasticsearch.xpack.application.connector.action.RestPutConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorLastSeenAction; +import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorLastSyncStatsAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.action.TransportDeleteConnectorAction; @@ -60,10 +61,12 @@ import org.elasticsearch.xpack.application.connector.action.TransportPutConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorLastSeenAction; +import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorLastSyncStatsAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSyncStatsAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.syncjob.action.CancelConnectorSyncJobAction; @@ -200,6 +203,7 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(PutConnectorAction.INSTANCE, TransportPutConnectorAction.class), new ActionHandler<>(UpdateConnectorFilteringAction.INSTANCE, TransportUpdateConnectorFilteringAction.class), new ActionHandler<>(UpdateConnectorLastSeenAction.INSTANCE, TransportUpdateConnectorLastSeenAction.class), + new ActionHandler<>(UpdateConnectorLastSyncStatsAction.INSTANCE, TransportUpdateConnectorLastSyncStatsAction.class), new ActionHandler<>(UpdateConnectorPipelineAction.INSTANCE, TransportUpdateConnectorPipelineAction.class), new ActionHandler<>(UpdateConnectorSchedulingAction.INSTANCE, TransportUpdateConnectorSchedulingAction.class), @@ -265,6 +269,7 @@ public List getRestHandlers( new RestPutConnectorAction(), new RestUpdateConnectorFilteringAction(), new RestUpdateConnectorLastSeenAction(), + new RestUpdateConnectorLastSyncStatsAction(), new RestUpdateConnectorPipelineAction(), new RestUpdateConnectorSchedulingAction(), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java index f824009196648..45b906d815aee 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java @@ -287,45 +287,45 @@ public Connector(StreamInput in) throws IOException { ObjectParser.ValueType.STRING_OR_NULL ); - PARSER.declareString(optionalConstructorArg(), ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_ERROR); + PARSER.declareStringOrNull(optionalConstructorArg(), ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_ERROR); PARSER.declareField( optionalConstructorArg(), - (p, c) -> Instant.parse(p.text()), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_SCHEDULED_AT_FIELD, - ObjectParser.ValueType.STRING + ObjectParser.ValueType.STRING_OR_NULL ); PARSER.declareField( optionalConstructorArg(), - (p, c) -> ConnectorSyncStatus.connectorSyncStatus(p.text()), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : ConnectorSyncStatus.connectorSyncStatus(p.text()), ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_STATUS_FIELD, - ObjectParser.ValueType.STRING + ObjectParser.ValueType.STRING_OR_NULL ); PARSER.declareLong(optionalConstructorArg(), ConnectorSyncInfo.LAST_DELETED_DOCUMENT_COUNT_FIELD); PARSER.declareField( optionalConstructorArg(), - (p, c) -> Instant.parse(p.text()), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), ConnectorSyncInfo.LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD, - ObjectParser.ValueType.STRING + ObjectParser.ValueType.STRING_OR_NULL ); PARSER.declareLong(optionalConstructorArg(), ConnectorSyncInfo.LAST_INDEXED_DOCUMENT_COUNT_FIELD); - PARSER.declareString(optionalConstructorArg(), ConnectorSyncInfo.LAST_SYNC_ERROR_FIELD); + PARSER.declareStringOrNull(optionalConstructorArg(), ConnectorSyncInfo.LAST_SYNC_ERROR_FIELD); PARSER.declareField( optionalConstructorArg(), - (p, c) -> Instant.parse(p.text()), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), ConnectorSyncInfo.LAST_SYNC_SCHEDULED_AT_FIELD, - ObjectParser.ValueType.STRING + ObjectParser.ValueType.STRING_OR_NULL ); PARSER.declareField( optionalConstructorArg(), - (p, c) -> ConnectorSyncStatus.connectorSyncStatus(p.text()), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : ConnectorSyncStatus.connectorSyncStatus(p.text()), ConnectorSyncInfo.LAST_SYNC_STATUS_FIELD, - ObjectParser.ValueType.STRING + ObjectParser.ValueType.STRING_OR_NULL ); PARSER.declareField( optionalConstructorArg(), - (p, c) -> Instant.parse(p.text()), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), ConnectorSyncInfo.LAST_SYNCED_FIELD, - ObjectParser.ValueType.STRING + ObjectParser.ValueType.STRING_OR_NULL ); PARSER.declareString(optionalConstructorArg(), NAME_FIELD); @@ -485,6 +485,10 @@ public Map getConfiguration() { return configuration; } + public ConnectorSyncInfo getSyncInfo() { + return syncInfo; + } + public Instant getLastSeen() { return lastSeen; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index 9730a0217b942..d99ad28dc3970 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -33,6 +33,7 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSyncStatsAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; @@ -203,12 +204,42 @@ public void updateConnectorFiltering(UpdateConnectorFilteringAction.Request requ } /** - * Updates the {@link ConnectorIngestPipeline} property of a {@link Connector}. + * Updates the lastSeen property of a {@link Connector}. * - * @param request Request for updating connector ingest pipeline property. + * @param request The request for updating the connector's lastSeen status. + * @param listener The listener for handling responses, including successful updates or errors. + */ + public void updateConnectorLastSeen(UpdateConnectorLastSeenAction.Request request, ActionListener listener) { + try { + String connectorId = request.getConnectorId(); + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( + new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) + .id(connectorId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(request.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)) + ); + clientWithOrigin.update( + updateRequest, + new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { + if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { + l.onFailure(new ResourceNotFoundException(connectorId)); + return; + } + l.onResponse(updateResponse); + }) + ); + } catch (Exception e) { + listener.onFailure(e); + } + } + + /** + * Updates the {@link ConnectorSyncInfo} properties in a {@link Connector}. + * + * @param request Request for updating connector last sync stats properties. * @param listener Listener to respond to a successful response or an error. */ - public void updateConnectorPipeline(UpdateConnectorPipelineAction.Request request, ActionListener listener) { + public void updateConnectorLastSyncStats(UpdateConnectorLastSyncStatsAction.Request request, ActionListener listener) { try { String connectorId = request.getConnectorId(); final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( @@ -233,12 +264,12 @@ public void updateConnectorPipeline(UpdateConnectorPipelineAction.Request reques } /** - * Updates the {@link ConnectorScheduling} property of a {@link Connector}. + * Updates the {@link ConnectorIngestPipeline} property of a {@link Connector}. * - * @param request The request for updating the connector's scheduling. - * @param listener The listener for handling responses, including successful updates or errors. + * @param request Request for updating connector ingest pipeline property. + * @param listener Listener to respond to a successful response or an error. */ - public void updateConnectorScheduling(UpdateConnectorSchedulingAction.Request request, ActionListener listener) { + public void updateConnectorPipeline(UpdateConnectorPipelineAction.Request request, ActionListener listener) { try { String connectorId = request.getConnectorId(); final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( @@ -263,12 +294,12 @@ public void updateConnectorScheduling(UpdateConnectorSchedulingAction.Request re } /** - * Updates the lastSeen property of a {@link Connector}. + * Updates the {@link ConnectorScheduling} property of a {@link Connector}. * - * @param request The request for updating the connector's lastSeen status. + * @param request The request for updating the connector's scheduling. * @param listener The listener for handling responses, including successful updates or errors. */ - public void updateConnectorLastSeen(UpdateConnectorLastSeenAction.Request request, ActionListener listener) { + public void updateConnectorScheduling(UpdateConnectorSchedulingAction.Request request, ActionListener listener) { try { String connectorId = request.getConnectorId(); final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncInfo.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncInfo.java index 8f2002efff5b6..7daae030155b7 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncInfo.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncInfo.java @@ -90,51 +90,33 @@ public ConnectorSyncInfo(StreamInput in) throws IOException { this.lastSynced = in.readOptionalInstant(); } - static final ParseField LAST_ACCESS_CONTROL_SYNC_ERROR = new ParseField("last_access_control_sync_error"); - static final ParseField LAST_ACCESS_CONTROL_SYNC_STATUS_FIELD = new ParseField("last_access_control_sync_status"); - static final ParseField LAST_ACCESS_CONTROL_SYNC_SCHEDULED_AT_FIELD = new ParseField("last_access_control_sync_scheduled_at"); - static final ParseField LAST_DELETED_DOCUMENT_COUNT_FIELD = new ParseField("last_deleted_document_count"); - static final ParseField LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD = new ParseField("last_incremental_sync_scheduled_at"); - static final ParseField LAST_INDEXED_DOCUMENT_COUNT_FIELD = new ParseField("last_indexed_document_count"); - static final ParseField LAST_SYNC_ERROR_FIELD = new ParseField("last_sync_error"); - static final ParseField LAST_SYNC_SCHEDULED_AT_FIELD = new ParseField("last_sync_scheduled_at"); - static final ParseField LAST_SYNC_STATUS_FIELD = new ParseField("last_sync_status"); - static final ParseField LAST_SYNCED_FIELD = new ParseField("last_synced"); + public static final ParseField LAST_ACCESS_CONTROL_SYNC_ERROR = new ParseField("last_access_control_sync_error"); + public static final ParseField LAST_ACCESS_CONTROL_SYNC_STATUS_FIELD = new ParseField("last_access_control_sync_status"); + public static final ParseField LAST_ACCESS_CONTROL_SYNC_SCHEDULED_AT_FIELD = new ParseField("last_access_control_sync_scheduled_at"); + public static final ParseField LAST_DELETED_DOCUMENT_COUNT_FIELD = new ParseField("last_deleted_document_count"); + public static final ParseField LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD = new ParseField("last_incremental_sync_scheduled_at"); + public static final ParseField LAST_INDEXED_DOCUMENT_COUNT_FIELD = new ParseField("last_indexed_document_count"); + public static final ParseField LAST_SYNC_ERROR_FIELD = new ParseField("last_sync_error"); + public static final ParseField LAST_SYNC_SCHEDULED_AT_FIELD = new ParseField("last_sync_scheduled_at"); + public static final ParseField LAST_SYNC_STATUS_FIELD = new ParseField("last_sync_status"); + public static final ParseField LAST_SYNCED_FIELD = new ParseField("last_synced"); @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - - if (lastAccessControlSyncError != null) { - builder.field(LAST_ACCESS_CONTROL_SYNC_ERROR.getPreferredName(), lastAccessControlSyncError); - } - if (lastAccessControlSyncStatus != null) { - builder.field(LAST_ACCESS_CONTROL_SYNC_STATUS_FIELD.getPreferredName(), lastAccessControlSyncStatus); - } - if (lastAccessControlSyncScheduledAt != null) { - builder.field(LAST_ACCESS_CONTROL_SYNC_SCHEDULED_AT_FIELD.getPreferredName(), lastAccessControlSyncScheduledAt); - } + builder.field(LAST_ACCESS_CONTROL_SYNC_ERROR.getPreferredName(), lastAccessControlSyncError); + builder.field(LAST_ACCESS_CONTROL_SYNC_STATUS_FIELD.getPreferredName(), lastAccessControlSyncStatus); + builder.field(LAST_ACCESS_CONTROL_SYNC_SCHEDULED_AT_FIELD.getPreferredName(), lastAccessControlSyncScheduledAt); if (lastDeletedDocumentCount != null) { builder.field(LAST_DELETED_DOCUMENT_COUNT_FIELD.getPreferredName(), lastDeletedDocumentCount); } - if (lastIncrementalSyncScheduledAt != null) { - builder.field(LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD.getPreferredName(), lastIncrementalSyncScheduledAt); - } + builder.field(LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD.getPreferredName(), lastIncrementalSyncScheduledAt); if (lastIndexedDocumentCount != null) { builder.field(LAST_INDEXED_DOCUMENT_COUNT_FIELD.getPreferredName(), lastIndexedDocumentCount); } - if (lastSyncError != null) { - builder.field(LAST_SYNC_ERROR_FIELD.getPreferredName(), lastSyncError); - } - if (lastSyncScheduledAt != null) { - builder.field(LAST_SYNC_SCHEDULED_AT_FIELD.getPreferredName(), lastSyncScheduledAt); - } - if (lastSyncStatus != null) { - builder.field(LAST_SYNC_STATUS_FIELD.getPreferredName(), lastSyncStatus); - } - if (lastSynced != null) { - builder.field(LAST_SYNCED_FIELD.getPreferredName(), lastSynced); - } - + builder.field(LAST_SYNC_ERROR_FIELD.getPreferredName(), lastSyncError); + builder.field(LAST_SYNC_SCHEDULED_AT_FIELD.getPreferredName(), lastSyncScheduledAt); + builder.field(LAST_SYNC_STATUS_FIELD.getPreferredName(), lastSyncStatus); + builder.field(LAST_SYNCED_FIELD.getPreferredName(), lastSynced); return builder; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSyncStatsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSyncStatsAction.java new file mode 100644 index 0000000000000..8e373ce48caf3 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSyncStatsAction.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.PUT; + +public class RestUpdateConnectorLastSyncStatsAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_update_last_sync_stats_action"; + } + + @Override + public List routes() { + return List.of(new Route(PUT, "/" + EnterpriseSearch.CONNECTOR_API_ENDPOINT + "/{connector_id}/_last_sync")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + UpdateConnectorLastSyncStatsAction.Request request = UpdateConnectorLastSyncStatsAction.Request.fromXContentBytes( + restRequest.param("connector_id"), + restRequest.content(), + restRequest.getXContentType() + ); + return channel -> client.execute( + UpdateConnectorLastSyncStatsAction.INSTANCE, + request, + new RestToXContentListener<>(channel, UpdateConnectorLastSyncStatsAction.Response::status, r -> null) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSyncStatsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSyncStatsAction.java new file mode 100644 index 0000000000000..9ec0105668fbc --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSyncStatsAction.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.ConnectorIndexService; + +public class TransportUpdateConnectorLastSyncStatsAction extends HandledTransportAction< + UpdateConnectorLastSyncStatsAction.Request, + UpdateConnectorLastSyncStatsAction.Response> { + + protected final ConnectorIndexService connectorIndexService; + + @Inject + public TransportUpdateConnectorLastSyncStatsAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + UpdateConnectorLastSyncStatsAction.NAME, + transportService, + actionFilters, + UpdateConnectorLastSyncStatsAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorIndexService = new ConnectorIndexService(client); + } + + @Override + protected void doExecute( + Task task, + UpdateConnectorLastSyncStatsAction.Request request, + ActionListener listener + ) { + connectorIndexService.updateConnectorLastSyncStats( + request, + listener.map(r -> new UpdateConnectorLastSyncStatsAction.Response(r.getResult())) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java new file mode 100644 index 0000000000000..328831cf0b840 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java @@ -0,0 +1,240 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.ConnectorSyncInfo; +import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; + +import java.io.IOException; +import java.time.Instant; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class UpdateConnectorLastSyncStatsAction extends ActionType { + + public static final UpdateConnectorLastSyncStatsAction INSTANCE = new UpdateConnectorLastSyncStatsAction(); + public static final String NAME = "cluster:admin/xpack/connector/update_last_sync_stats"; + + public UpdateConnectorLastSyncStatsAction() { + super(NAME, UpdateConnectorLastSyncStatsAction.Response::new); + } + + public static class Request extends ActionRequest implements ToXContentObject { + + private final String connectorId; + + private final ConnectorSyncInfo syncInfo; + + public Request(String connectorId, ConnectorSyncInfo syncInfo) { + this.connectorId = connectorId; + this.syncInfo = syncInfo; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorId = in.readString(); + this.syncInfo = in.readOptionalWriteable(ConnectorSyncInfo::new); + } + + public String getConnectorId() { + return connectorId; + } + + public ConnectorSyncInfo getSyncInfo() { + return syncInfo; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorId)) { + validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + } + + return validationException; + } + + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("connector_update_last_sync_stats_request", false, ((args, connectorId) -> { + int i = 0; + return new UpdateConnectorLastSyncStatsAction.Request( + connectorId, + new ConnectorSyncInfo.Builder().setLastAccessControlSyncError((String) args[i++]) + .setLastAccessControlSyncScheduledAt((Instant) args[i++]) + .setLastAccessControlSyncStatus((ConnectorSyncStatus) args[i++]) + .setLastDeletedDocumentCount((Long) args[i++]) + .setLastIncrementalSyncScheduledAt((Instant) args[i++]) + .setLastIndexedDocumentCount((Long) args[i++]) + .setLastSyncError((String) args[i++]) + .setLastSyncScheduledAt((Instant) args[i++]) + .setLastSyncStatus((ConnectorSyncStatus) args[i++]) + .setLastSynced((Instant) args[i++]) + .build() + ); + })); + + static { + PARSER.declareStringOrNull(optionalConstructorArg(), ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_ERROR); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), + ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_SCHEDULED_AT_FIELD, + ObjectParser.ValueType.STRING_OR_NULL + ); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : ConnectorSyncStatus.connectorSyncStatus(p.text()), + ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_STATUS_FIELD, + ObjectParser.ValueType.STRING_OR_NULL + ); + PARSER.declareLong(optionalConstructorArg(), ConnectorSyncInfo.LAST_DELETED_DOCUMENT_COUNT_FIELD); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), + ConnectorSyncInfo.LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD, + ObjectParser.ValueType.STRING_OR_NULL + ); + PARSER.declareLong(optionalConstructorArg(), ConnectorSyncInfo.LAST_INDEXED_DOCUMENT_COUNT_FIELD); + PARSER.declareStringOrNull(optionalConstructorArg(), ConnectorSyncInfo.LAST_SYNC_ERROR_FIELD); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), + ConnectorSyncInfo.LAST_SYNC_SCHEDULED_AT_FIELD, + ObjectParser.ValueType.STRING_OR_NULL + ); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : ConnectorSyncStatus.connectorSyncStatus(p.text()), + ConnectorSyncInfo.LAST_SYNC_STATUS_FIELD, + ObjectParser.ValueType.STRING_OR_NULL + ); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), + ConnectorSyncInfo.LAST_SYNCED_FIELD, + ObjectParser.ValueType.STRING_OR_NULL + ); + } + + public static UpdateConnectorLastSyncStatsAction.Request fromXContentBytes( + String connectorId, + BytesReference source, + XContentType xContentType + ) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return UpdateConnectorLastSyncStatsAction.Request.fromXContent(parser, connectorId); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse: " + source.utf8ToString(), e); + } + } + + public static UpdateConnectorLastSyncStatsAction.Request fromXContent(XContentParser parser, String connectorId) + throws IOException { + return PARSER.parse(parser, connectorId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + syncInfo.toXContent(builder, params); + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorId); + out.writeOptionalWriteable(syncInfo); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorId, request.connectorId) && Objects.equals(syncInfo, request.syncInfo); + } + + @Override + public int hashCode() { + return Objects.hash(connectorId, syncInfo); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + final DocWriteResponse.Result result; + + public Response(StreamInput in) throws IOException { + super(in); + result = DocWriteResponse.Result.readFrom(in); + } + + public Response(DocWriteResponse.Result result) { + this.result = result; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + this.result.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("result", this.result.getLowercase()); + builder.endObject(); + return builder; + } + + public RestStatus status() { + return switch (result) { + case NOT_FOUND -> RestStatus.NOT_FOUND; + default -> RestStatus.OK; + }; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response that = (Response) o; + return Objects.equals(result, that.result); + } + + @Override + public int hashCode() { + return Objects.hash(result); + } + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index c93135942348a..e155cdfefbfa1 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSyncStatsAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; import org.junit.Before; @@ -131,6 +132,27 @@ public void testUpdateConnectorLastSeen() throws Exception { } + public void testUpdateConnectorLastSyncStats() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + + DocWriteResponse resp = awaitPutConnector(connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + ConnectorSyncInfo syncStats = ConnectorTestUtils.getRandomConnectorSyncInfo(); + + UpdateConnectorLastSyncStatsAction.Request lastSyncStats = new UpdateConnectorLastSyncStatsAction.Request( + connector.getConnectorId(), + syncStats + ); + + DocWriteResponse updateResponse = awaitUpdateConnectorLastSyncStats(lastSyncStats); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + + Connector indexedConnector = awaitGetConnector(connector.getConnectorId()); + + assertThat(syncStats, equalTo(indexedConnector.getSyncInfo())); + } + public void testUpdateConnectorScheduling() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); DocWriteResponse resp = awaitPutConnector(connector); @@ -302,6 +324,32 @@ public void onFailure(Exception e) { return resp.get(); } + private UpdateResponse awaitUpdateConnectorLastSyncStats(UpdateConnectorLastSyncStatsAction.Request updateLastSyncStats) + throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorIndexService.updateConnectorLastSyncStats(updateLastSyncStats, new ActionListener<>() { + @Override + public void onResponse(UpdateResponse indexResponse) { + resp.set(indexResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for update last sync stats request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from update last sync stats request", resp.get()); + return resp.get(); + } + private UpdateResponse awaitUpdateConnectorPipeline(UpdateConnectorPipelineAction.Request updatePipeline) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java index a0cf018142599..98d0112d8910f 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java @@ -70,9 +70,9 @@ public static ConnectorSyncInfo getRandomConnectorSyncInfo() { return new ConnectorSyncInfo.Builder().setLastAccessControlSyncError(randomFrom(new String[] { null, randomAlphaOfLength(10) })) .setLastAccessControlSyncScheduledAt(randomFrom(new Instant[] { null, Instant.ofEpochMilli(randomLong()) })) .setLastAccessControlSyncStatus(randomFrom(new ConnectorSyncStatus[] { null, getRandomSyncStatus() })) - .setLastDeletedDocumentCount(randomFrom(new Long[] { null, randomLong() })) + .setLastDeletedDocumentCount(randomLong()) .setLastIncrementalSyncScheduledAt(randomFrom(new Instant[] { null, Instant.ofEpochMilli(randomLong()) })) - .setLastIndexedDocumentCount(randomFrom(new Long[] { null, randomLong() })) + .setLastIndexedDocumentCount(randomLong()) .setLastSyncError(randomFrom(new String[] { null, randomAlphaOfLength(10) })) .setLastSyncScheduledAt(randomFrom(new Instant[] { null, Instant.ofEpochMilli(randomLong()) })) .setLastSyncStatus(randomFrom(new ConnectorSyncStatus[] { null, getRandomSyncStatus() })) diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..0728a7b328eb4 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionRequestBWCSerializingTests.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorLastSyncStatsActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase< + UpdateConnectorLastSyncStatsAction.Request> { + + private String connectorId; + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorLastSyncStatsAction.Request::new; + } + + @Override + protected UpdateConnectorLastSyncStatsAction.Request createTestInstance() { + this.connectorId = randomUUID(); + return new UpdateConnectorLastSyncStatsAction.Request(connectorId, ConnectorTestUtils.getRandomConnectorSyncInfo()); + } + + @Override + protected UpdateConnectorLastSyncStatsAction.Request mutateInstance(UpdateConnectorLastSyncStatsAction.Request instance) + throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorLastSyncStatsAction.Request doParseInstance(XContentParser parser) throws IOException { + return UpdateConnectorLastSyncStatsAction.Request.fromXContent(parser, this.connectorId); + } + + @Override + protected UpdateConnectorLastSyncStatsAction.Request mutateInstanceForVersion( + UpdateConnectorLastSyncStatsAction.Request instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..dd214e10699ef --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionResponseBWCSerializingTests.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorLastSyncStatsActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< + UpdateConnectorLastSyncStatsAction.Response> { + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorLastSyncStatsAction.Response::new; + } + + @Override + protected UpdateConnectorLastSyncStatsAction.Response createTestInstance() { + return new UpdateConnectorLastSyncStatsAction.Response(randomFrom(DocWriteResponse.Result.values())); + } + + @Override + protected UpdateConnectorLastSyncStatsAction.Response mutateInstance(UpdateConnectorLastSyncStatsAction.Response instance) + throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorLastSyncStatsAction.Response mutateInstanceForVersion( + UpdateConnectorLastSyncStatsAction.Response instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index aa6c67798e3e3..5412e7d05f27f 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -129,6 +129,7 @@ public class Constants { "cluster:admin/xpack/connector/put", "cluster:admin/xpack/connector/update_filtering", "cluster:admin/xpack/connector/update_last_seen", + "cluster:admin/xpack/connector/update_last_sync_stats", "cluster:admin/xpack/connector/update_pipeline", "cluster:admin/xpack/connector/update_scheduling", "cluster:admin/xpack/connector/sync_job/post", From 5a4d4b3c01195c99d6013bce31af7aeaa6693f4d Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Mon, 4 Dec 2023 09:13:42 +0100 Subject: [PATCH 110/181] Remove outdated spec (#102845) This spec can be removed as 7.15 was released --- .../test/indices.recovery/10_basic.yml | 44 ------------------- 1 file changed, 44 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.recovery/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.recovery/10_basic.yml index ac3f379fb86cd..d7731c0073140 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.recovery/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.recovery/10_basic.yml @@ -1,8 +1,5 @@ --- "Indices recovery test": - - skip: - version: " - 7.14.99" - reason: recovery from snapshot bytes not available until 7.15 - do: indices.create: @@ -45,47 +42,6 @@ - gte: { test_1.shards.0.verify_index.total_time_in_millis: 0 } --- -"Indices recovery test without recovery from snapshot": -# to be removed once 7.15 is out. - - do: - indices.create: - index: test_1 - body: - settings: - index: - number_of_replicas: 0 - - - do: - cluster.health: - wait_for_status: green - - - do: - indices.recovery: - index: [test_1] - human: true - - - match: { test_1.shards.0.type: "EMPTY_STORE" } - - match: { test_1.shards.0.stage: "DONE" } - - match: { test_1.shards.0.primary: true } - - match: { test_1.shards.0.start_time: /^2\d\d\d-.+/ } - - match: { test_1.shards.0.target.ip: /^\d+\.\d+\.\d+\.\d+$/ } - - gte: { test_1.shards.0.index.files.total: 0 } - - gte: { test_1.shards.0.index.files.reused: 0 } - - gte: { test_1.shards.0.index.files.recovered: 0 } - - match: { test_1.shards.0.index.files.percent: /^\d+\.\d\%$/ } - - gte: { test_1.shards.0.index.size.total_in_bytes: 0 } - - gte: { test_1.shards.0.index.size.reused_in_bytes: 0 } - - gte: { test_1.shards.0.index.size.recovered_in_bytes: 0 } - - match: { test_1.shards.0.index.size.percent: /^\d+\.\d\%$/ } - - gte: { test_1.shards.0.index.source_throttle_time_in_millis: 0 } - - gte: { test_1.shards.0.index.target_throttle_time_in_millis: 0 } - - gte: { test_1.shards.0.translog.recovered: 0 } - - gte: { test_1.shards.0.translog.total: -1 } - - gte: { test_1.shards.0.translog.total_on_start: 0 } - - gte: { test_1.shards.0.translog.total_time_in_millis: 0 } - - gte: { test_1.shards.0.verify_index.check_index_time_in_millis: 0 } - - gte: { test_1.shards.0.verify_index.total_time_in_millis: 0 } ---- "Indices recovery test for closed index": - skip: version: " - 7.1.99" From aedbe683ae93116923672866d1bb635567a5f861 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 4 Dec 2023 08:30:22 +0000 Subject: [PATCH 111/181] Fail S3 repository analysis on partial reads (#102840) Today when reading a blob from a S3 repository we will resume a download on a partial success. If this happens concurrently with a blob overwrite then we may resume the download against a blob with updated contents, which causes a checksum mismatch. A checksum mismatch during an overwrite suggests an atomicity failure, which can be misleading to users. With this commit we consider partial downloads during repository analysis as immediate errors instead, clarifying the repository problem. Relates #101100 --- docs/changelog/102840.yaml | 5 +++ .../s3/S3RetryingInputStream.java | 6 +++ .../s3/S3BlobContainerRetriesTests.java | 45 ++++++++++++++++++- .../AbstractBlobContainerRetriesTestCase.java | 17 ++++--- 4 files changed, 66 insertions(+), 7 deletions(-) create mode 100644 docs/changelog/102840.yaml diff --git a/docs/changelog/102840.yaml b/docs/changelog/102840.yaml new file mode 100644 index 0000000000000..1d87cede632c9 --- /dev/null +++ b/docs/changelog/102840.yaml @@ -0,0 +1,5 @@ +pr: 102840 +summary: Fail S3 repository analysis on partial reads +area: Snapshot/Restore +type: enhancement +issues: [] diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java index 6cad60f32de47..04eadba9f9f8f 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java @@ -166,6 +166,12 @@ private void ensureOpen() { } private void reopenStreamOrFail(IOException e) throws IOException { + if (purpose == OperationPurpose.REPOSITORY_ANALYSIS) { + logger.warn(() -> format(""" + failed reading [%s/%s] at offset [%s]""", blobStore.bucket(), blobKey, start + currentOffset), e); + throw e; + } + final int maxAttempts = blobStore.getMaxRetries() + 1; final long meaningfulProgressSize = Math.max(1L, blobStore.bufferSizeInBytes() / 100L); diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java index a8a6d71928795..9ed68976aac8a 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java @@ -14,6 +14,7 @@ import com.sun.net.httpserver.HttpHandler; import org.apache.http.HttpStatus; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.cluster.metadata.RepositoryMetadata; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; @@ -36,6 +37,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.env.Environment; import org.elasticsearch.repositories.blobstore.AbstractBlobContainerRetriesTestCase; +import org.elasticsearch.repositories.blobstore.BlobStoreTestUtil; import org.elasticsearch.telemetry.metric.MeterRegistry; import org.hamcrest.Matcher; import org.junit.After; @@ -519,7 +521,7 @@ public void handle(HttpExchange exchange) throws IOException { httpServer.createContext(downloadStorageEndpoint(blobContainer, "read_blob_max_retries"), new FlakyReadHandler()); - try (InputStream inputStream = blobContainer.readBlob(randomPurpose(), "read_blob_max_retries")) { + try (InputStream inputStream = blobContainer.readBlob(randomRetryingPurpose(), "read_blob_max_retries")) { final int readLimit; final InputStream wrappedStream; if (randomBoolean()) { @@ -535,12 +537,53 @@ public void handle(HttpExchange exchange) throws IOException { } } + public void testReadDoesNotRetryForRepositoryAnalysis() { + final int maxRetries = between(0, 5); + final int bufferSizeBytes = scaledRandomIntBetween( + 0, + randomFrom(1000, Math.toIntExact(S3Repository.BUFFER_SIZE_SETTING.get(Settings.EMPTY).getBytes())) + ); + final BlobContainer blobContainer = createBlobContainer(maxRetries, null, true, ByteSizeValue.ofBytes(bufferSizeBytes)); + + final byte[] bytes = randomBlobContent(); + + @SuppressForbidden(reason = "use a http server") + class FlakyReadHandler implements HttpHandler { + private int failureCount; + + @Override + public void handle(HttpExchange exchange) throws IOException { + if (failureCount != 0) { + ExceptionsHelper.maybeDieOnAnotherThread(new AssertionError("failureCount=" + failureCount)); + } + failureCount += 1; + Streams.readFully(exchange.getRequestBody()); + sendIncompleteContent(exchange, bytes); + exchange.close(); + } + } + + httpServer.createContext(downloadStorageEndpoint(blobContainer, "read_blob_repo_analysis"), new FlakyReadHandler()); + + expectThrows(Exception.class, () -> { + try (InputStream inputStream = blobContainer.readBlob(OperationPurpose.REPOSITORY_ANALYSIS, "read_blob_repo_analysis")) { + final byte[] bytesRead = BytesReference.toBytes(Streams.readFully(inputStream)); + assertArrayEquals(Arrays.copyOfRange(bytes, 0, bytes.length), bytesRead); + } + }); + } + @Override protected Matcher getMaxRetriesMatcher(int maxRetries) { // some attempts make meaningful progress and do not count towards the max retry limit return allOf(greaterThanOrEqualTo(maxRetries), lessThanOrEqualTo(S3RetryingInputStream.MAX_SUPPRESSED_EXCEPTIONS)); } + @Override + protected OperationPurpose randomRetryingPurpose() { + return randomValueOtherThan(OperationPurpose.REPOSITORY_ANALYSIS, BlobStoreTestUtil::randomPurpose); + } + /** * Asserts that an InputStream is fully consumed, or aborted, when it is closed */ diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java index 4110472e8ef76..8d44c37fcd9f1 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java @@ -14,6 +14,7 @@ import org.apache.http.ConnectionClosedException; import org.apache.http.HttpStatus; import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.unit.ByteSizeValue; @@ -146,7 +147,7 @@ public void testReadBlobWithRetries() throws Exception { } }); - try (InputStream inputStream = blobContainer.readBlob(randomPurpose(), "read_blob_max_retries")) { + try (InputStream inputStream = blobContainer.readBlob(randomRetryingPurpose(), "read_blob_max_retries")) { final int readLimit; final InputStream wrappedStream; if (randomBoolean()) { @@ -212,7 +213,7 @@ public void testReadRangeBlobWithRetries() throws Exception { final int position = randomIntBetween(0, bytes.length - 1); final int length = randomIntBetween(0, randomBoolean() ? bytes.length : Integer.MAX_VALUE); - try (InputStream inputStream = blobContainer.readBlob(randomPurpose(), "read_range_blob_max_retries", position, length)) { + try (InputStream inputStream = blobContainer.readBlob(randomRetryingPurpose(), "read_range_blob_max_retries", position, length)) { final int readLimit; final InputStream wrappedStream; if (randomBoolean()) { @@ -269,8 +270,8 @@ public void testReadBlobWithReadTimeouts() { exception = expectThrows(Exception.class, () -> { try ( InputStream stream = randomBoolean() - ? blobContainer.readBlob(randomPurpose(), "read_blob_incomplete") - : blobContainer.readBlob(randomPurpose(), "read_blob_incomplete", position, length) + ? blobContainer.readBlob(randomRetryingPurpose(), "read_blob_incomplete") + : blobContainer.readBlob(randomRetryingPurpose(), "read_blob_incomplete", position, length) ) { Streams.readFully(stream); } @@ -289,6 +290,10 @@ protected org.hamcrest.Matcher getMaxRetriesMatcher(int maxRetries) { return equalTo(maxRetries); } + protected OperationPurpose randomRetryingPurpose() { + return randomPurpose(); + } + public void testReadBlobWithNoHttpResponse() { final TimeValue readTimeout = TimeValue.timeValueMillis(between(100, 200)); final BlobContainer blobContainer = createBlobContainer(randomInt(5), readTimeout, null, null); @@ -323,8 +328,8 @@ public void testReadBlobWithPrematureConnectionClose() { final Exception exception = expectThrows(Exception.class, () -> { try ( InputStream stream = randomBoolean() - ? blobContainer.readBlob(randomPurpose(), "read_blob_incomplete", 0, 1) - : blobContainer.readBlob(randomPurpose(), "read_blob_incomplete") + ? blobContainer.readBlob(randomRetryingPurpose(), "read_blob_incomplete", 0, 1) + : blobContainer.readBlob(randomRetryingPurpose(), "read_blob_incomplete") ) { Streams.readFully(stream); } From d73f1ac4837539ce749dd81eaa51af4f552dea56 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 4 Dec 2023 11:39:17 +0000 Subject: [PATCH 112/181] AwaitsFix for #102920 --- .../action/admin/cluster/stats/SearchUsageStatsTests.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java index 10419719a5ed1..cc4509500f9c1 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.admin.cluster.stats; +import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.TransportVersion; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable.Reader; @@ -19,6 +20,7 @@ import java.util.List; import java.util.Map; +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102920") // failing test is final, mute whole suite public class SearchUsageStatsTests extends AbstractWireSerializingTestCase { private static final List QUERY_TYPES = List.of( From e1fceae5e4c51233415c21049e3b45a3983e8ec2 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 4 Dec 2023 12:31:40 +0000 Subject: [PATCH 113/181] Distinguish blob store ops for data and metadata (#102836) Today all snapshot operations have an operation purpose of `SNAPSHOT`, but in fact some repository implementations may want to configure operations against snapshot metadata differently from those against snapshot data. This commit divides the purpose into `SNAPSHOT_DATA` and `SNAPSHOT_METADATA` to support this distinction. Relates https://github.com/elastic/elasticsearch/issues/81352 --- .../s3/S3BlobStoreRepositoryTests.java | 35 +-- .../repositories/s3/S3BlobContainer.java | 3 + .../s3/S3BlobContainerRetriesTests.java | 3 +- .../BlobStoreRepositoryCleanupIT.java | 6 +- ...BlobStoreRepositoryOperationPurposeIT.java | 243 ++++++++++++++++++ .../common/blobstore/BlobContainer.java | 31 +++ .../common/blobstore/OperationPurpose.java | 3 +- .../common/blobstore/fs/FsBlobContainer.java | 6 + .../recovery/SnapshotFilesProvider.java | 2 +- .../blobstore/BlobStoreRepository.java | 67 +++-- .../blobstore/ChecksumBlobStoreFormat.java | 4 +- .../blobstore/fs/FsBlobContainerTests.java | 12 +- .../blobstore/BlobStoreRepositoryTests.java | 4 +- .../snapshots/SnapshotResiliencyTests.java | 6 +- .../AbstractThirdPartyRepositoryTestCase.java | 7 +- .../blobstore/BlobStoreTestUtil.java | 9 +- .../ESBlobStoreRepositoryIntegTestCase.java | 6 +- .../input/DirectBlobContainerIndexInput.java | 2 +- .../input/MetadataCachingIndexInput.java | 4 +- 19 files changed, 386 insertions(+), 67 deletions(-) create mode 100644 server/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryOperationPurposeIT.java diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index 5a445a1524da5..c76364f48c081 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -75,7 +75,7 @@ import java.util.stream.StreamSupport; import static org.elasticsearch.repositories.RepositoriesModule.METRIC_REQUESTS_COUNT; -import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomNonDataPurpose; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.allOf; @@ -85,8 +85,6 @@ import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.startsWith; @@ -271,8 +269,12 @@ public void testMetrics() throws Exception { final List metrics = Measurement.combine(plugins.get(0).getLongCounterMeasurement(METRIC_REQUESTS_COUNT)); assertThat( - statsCollectors.size(), - equalTo(metrics.stream().map(m -> m.attributes().get("operation")).collect(Collectors.toSet()).size()) + statsCollectors.keySet().stream().map(S3BlobStore.StatsKey::operation).collect(Collectors.toSet()), + equalTo( + metrics.stream() + .map(m -> S3BlobStore.Operation.parse((String) m.attributes().get("operation"))) + .collect(Collectors.toSet()) + ) ); metrics.forEach(metric -> { assertThat( @@ -303,23 +305,24 @@ public void testRequestStatsWithOperationPurposes() throws IOException { final String repoName = createRepository(randomRepositoryName()); final RepositoriesService repositoriesService = internalCluster().getCurrentMasterNodeInstance(RepositoriesService.class); final BlobStoreRepository repository = (BlobStoreRepository) repositoriesService.repository(repoName); - final BlobStore blobStore = repository.blobStore(); - assertThat(blobStore, instanceOf(BlobStoreWrapper.class)); - final BlobStore delegateBlobStore = ((BlobStoreWrapper) blobStore).delegate(); - assertThat(delegateBlobStore, instanceOf(S3BlobStore.class)); - final S3BlobStore.StatsCollectors statsCollectors = ((S3BlobStore) delegateBlobStore).getStatsCollectors(); + final BlobStoreWrapper blobStore = asInstanceOf(BlobStoreWrapper.class, repository.blobStore()); + final S3BlobStore delegateBlobStore = asInstanceOf(S3BlobStore.class, blobStore.delegate()); + final S3BlobStore.StatsCollectors statsCollectors = delegateBlobStore.getStatsCollectors(); - // Initial stats are collected with the default operation purpose + // Initial stats are collected for repository verification, which counts as SNAPSHOT_METADATA final Set allOperations = EnumSet.allOf(S3BlobStore.Operation.class) .stream() .map(S3BlobStore.Operation::getKey) .collect(Collectors.toUnmodifiableSet()); - statsCollectors.collectors.keySet().forEach(statsKey -> assertThat(statsKey.purpose(), is(OperationPurpose.SNAPSHOT))); + assertThat( + statsCollectors.collectors.keySet().stream().map(S3BlobStore.StatsKey::purpose).collect(Collectors.toUnmodifiableSet()), + equalTo(Set.of(OperationPurpose.SNAPSHOT_METADATA)) + ); final Map initialStats = blobStore.stats(); assertThat(initialStats.keySet(), equalTo(allOperations)); // Collect more stats with an operation purpose other than the default - final OperationPurpose purpose = randomValueOtherThan(OperationPurpose.SNAPSHOT, BlobStoreTestUtil::randomPurpose); + final OperationPurpose purpose = randomValueOtherThan(OperationPurpose.SNAPSHOT_METADATA, BlobStoreTestUtil::randomPurpose); final BlobPath blobPath = repository.basePath().add(randomAlphaOfLength(10)); final BlobContainer blobContainer = blobStore.blobContainer(blobPath); final BytesArray whatToWrite = new BytesArray(randomByteArrayOfLength(randomIntBetween(100, 1000))); @@ -332,7 +335,7 @@ public void testRequestStatsWithOperationPurposes() throws IOException { // Internal stats collection is fine-grained and records different purposes assertThat( statsCollectors.collectors.keySet().stream().map(S3BlobStore.StatsKey::purpose).collect(Collectors.toUnmodifiableSet()), - equalTo(Set.of(OperationPurpose.SNAPSHOT, purpose)) + equalTo(Set.of(OperationPurpose.SNAPSHOT_METADATA, purpose)) ); // The stats report aggregates over different purposes final Map newStats = blobStore.stats(); @@ -341,7 +344,7 @@ public void testRequestStatsWithOperationPurposes() throws IOException { final Set operationsSeenForTheNewPurpose = statsCollectors.collectors.keySet() .stream() - .filter(sk -> sk.purpose() != OperationPurpose.SNAPSHOT) + .filter(sk -> sk.purpose() != OperationPurpose.SNAPSHOT_METADATA) .map(sk -> sk.operation().getKey()) .collect(Collectors.toUnmodifiableSet()); @@ -396,7 +399,7 @@ public void testEnforcedCooldownPeriod() throws IOException { () -> repository.blobStore() .blobContainer(repository.basePath()) .writeBlobAtomic( - randomPurpose(), + randomNonDataPurpose(), BlobStoreRepository.INDEX_FILE_PREFIX + modifiedRepositoryData.getGenId(), serialized, true diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java index 87b3c17bfd91c..93b8ef7e57389 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java @@ -129,6 +129,7 @@ public long readBlobPreferredLength() { @Override public void writeBlob(OperationPurpose purpose, String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { + assert BlobContainer.assertPurposeConsistency(purpose, blobName); assert inputStream.markSupported() : "No mark support on inputStream breaks the S3 SDK's ability to retry requests"; SocketAccess.doPrivilegedIOException(() -> { if (blobSize <= getLargeBlobThresholdInBytes()) { @@ -148,6 +149,7 @@ public void writeMetadataBlob( boolean atomic, CheckedConsumer writer ) throws IOException { + assert purpose != OperationPurpose.SNAPSHOT_DATA && BlobContainer.assertPurposeConsistency(purpose, blobName) : purpose; final String absoluteBlobKey = buildKey(blobName); try ( AmazonS3Reference clientReference = blobStore.clientReference(); @@ -273,6 +275,7 @@ long getLargeBlobThresholdInBytes() { @Override public void writeBlobAtomic(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { + assert BlobContainer.assertPurposeConsistency(purpose, blobName); writeBlob(purpose, blobName, bytes, failIfAlreadyExists); } diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java index 9ed68976aac8a..b4b136338923f 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java @@ -57,6 +57,7 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomNonDataPurpose; import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.elasticsearch.repositories.s3.S3ClientSettings.DISABLE_CHUNKED_ENCODING; import static org.elasticsearch.repositories.s3.S3ClientSettings.ENDPOINT_SETTING; @@ -446,7 +447,7 @@ public void testWriteLargeBlobStreaming() throws Exception { } }); - blobContainer.writeMetadataBlob(randomPurpose(), "write_large_blob_streaming", false, randomBoolean(), out -> { + blobContainer.writeMetadataBlob(randomNonDataPurpose(), "write_large_blob_streaming", false, randomBoolean(), out -> { final byte[] buffer = new byte[16 * 1024]; long outstanding = blobSize; while (outstanding > 0) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryCleanupIT.java b/server/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryCleanupIT.java index 7886e628b26ad..bf937a9d57f02 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryCleanupIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryCleanupIT.java @@ -23,7 +23,7 @@ import java.io.IOException; import java.util.concurrent.ExecutionException; -import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomNonDataPurpose; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFutureThrows; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; @@ -98,7 +98,7 @@ private ActionFuture startBlockedCleanup(String repoN garbageFuture, () -> repository.blobStore() .blobContainer(repository.basePath()) - .writeBlob(randomPurpose(), "snap-foo.dat", new BytesArray(new byte[1]), true) + .writeBlob(randomNonDataPurpose(), "snap-foo.dat", new BytesArray(new byte[1]), true) ) ); garbageFuture.get(); @@ -147,7 +147,7 @@ public void testCleanupOldIndexN() throws ExecutionException, InterruptedExcepti () -> repository.blobStore() .blobContainer(repository.basePath()) .writeBlob( - randomPurpose(), + randomNonDataPurpose(), BlobStoreRepository.INDEX_FILE_PREFIX + generation, new BytesArray(new byte[1]), true diff --git a/server/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryOperationPurposeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryOperationPurposeIT.java new file mode 100644 index 0000000000000..91eb1dc6eb01b --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryOperationPurposeIT.java @@ -0,0 +1,243 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.repositories.blobstore; + +import org.elasticsearch.cluster.metadata.RepositoryMetadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.blobstore.OperationPurpose; +import org.elasticsearch.common.blobstore.support.BlobMetadata; +import org.elasticsearch.common.blobstore.support.FilterBlobContainer; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.env.Environment; +import org.elasticsearch.indices.recovery.RecoverySettings; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.RepositoryPlugin; +import org.elasticsearch.repositories.Repository; +import org.elasticsearch.repositories.fs.FsRepository; +import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; +import org.elasticsearch.xcontent.NamedXContentRegistry; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Collection; +import java.util.Iterator; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.startsWith; + +public class BlobStoreRepositoryOperationPurposeIT extends AbstractSnapshotIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return CollectionUtils.appendToCopy(super.nodePlugins(), TestPlugin.class); + } + + public void testSnapshotOperationPurposes() throws Exception { + // Perform some simple operations on the repository in order to exercise the checks that the purpose is set correctly for various + // operations + + final var repoName = randomIdentifier(); + createRepository(repoName, TestPlugin.ASSERTING_REPO_TYPE); + + final var count = between(1, 3); + + for (int i = 0; i < count; i++) { + createIndexWithContent("index-" + i); + createFullSnapshot(repoName, "snap-" + i); + } + + final var timeout = TimeValue.timeValueSeconds(10); + clusterAdmin().prepareCleanupRepository(repoName).get(timeout); + clusterAdmin().prepareCloneSnapshot(repoName, "snap-0", "clone-0").setIndices("index-0").get(timeout); + + // restart to ensure that the reads which happen when starting a node on a nonempty repository use the expected purposes + internalCluster().fullRestart(); + + clusterAdmin().prepareGetSnapshots(repoName).get(timeout); + + clusterAdmin().prepareRestoreSnapshot(repoName, "clone-0") + .setRenamePattern("index-0") + .setRenameReplacement("restored-0") + .setWaitForCompletion(true) + .get(timeout); + + for (int i = 0; i < count; i++) { + assertTrue(startDeleteSnapshot(repoName, "snap-" + i).get(10, TimeUnit.SECONDS).isAcknowledged()); + } + + clusterAdmin().prepareDeleteRepository(repoName).get(timeout); + } + + public static class TestPlugin extends Plugin implements RepositoryPlugin { + static final String ASSERTING_REPO_TYPE = "asserting"; + + @Override + public Map getRepositories( + Environment env, + NamedXContentRegistry namedXContentRegistry, + ClusterService clusterService, + BigArrays bigArrays, + RecoverySettings recoverySettings + ) { + return Map.of( + ASSERTING_REPO_TYPE, + metadata -> new AssertingRepository(metadata, env, namedXContentRegistry, clusterService, bigArrays, recoverySettings) + ); + } + } + + private static class AssertingRepository extends FsRepository { + AssertingRepository( + RepositoryMetadata metadata, + Environment environment, + NamedXContentRegistry namedXContentRegistry, + ClusterService clusterService, + BigArrays bigArrays, + RecoverySettings recoverySettings + ) { + super(metadata, environment, namedXContentRegistry, clusterService, bigArrays, recoverySettings); + } + + @Override + protected BlobStore createBlobStore() throws Exception { + return new AssertingBlobStore(super.createBlobStore()); + } + } + + private static class AssertingBlobStore implements BlobStore { + private final BlobStore delegateBlobStore; + + AssertingBlobStore(BlobStore delegateBlobStore) { + this.delegateBlobStore = delegateBlobStore; + } + + @Override + public BlobContainer blobContainer(BlobPath path) { + return new AssertingBlobContainer(delegateBlobStore.blobContainer(path)); + } + + @Override + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) throws IOException { + delegateBlobStore.deleteBlobsIgnoringIfNotExists(purpose, blobNames); + } + + @Override + public void close() throws IOException { + delegateBlobStore.close(); + } + } + + private static class AssertingBlobContainer extends FilterBlobContainer { + + AssertingBlobContainer(BlobContainer delegate) { + super(delegate); + } + + @Override + protected BlobContainer wrapChild(BlobContainer child) { + return new AssertingBlobContainer(child); + } + + @Override + public void writeBlob(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) + throws IOException { + assertPurposeConsistency(purpose, blobName); + super.writeBlob(purpose, blobName, bytes, failIfAlreadyExists); + } + + @Override + public void writeBlob( + OperationPurpose purpose, + String blobName, + InputStream inputStream, + long blobSize, + boolean failIfAlreadyExists + ) throws IOException { + assertPurposeConsistency(purpose, blobName); + super.writeBlob(purpose, blobName, inputStream, blobSize, failIfAlreadyExists); + } + + @Override + public void writeMetadataBlob( + OperationPurpose purpose, + String blobName, + boolean failIfAlreadyExists, + boolean atomic, + CheckedConsumer writer + ) throws IOException { + assertEquals(blobName, OperationPurpose.SNAPSHOT_METADATA, purpose); + assertPurposeConsistency(purpose, blobName); + super.writeMetadataBlob(purpose, blobName, failIfAlreadyExists, atomic, writer); + } + + @Override + public void writeBlobAtomic(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) + throws IOException { + assertEquals(blobName, OperationPurpose.SNAPSHOT_METADATA, purpose); + assertPurposeConsistency(purpose, blobName); + super.writeBlobAtomic(purpose, blobName, bytes, failIfAlreadyExists); + } + + @Override + public boolean blobExists(OperationPurpose purpose, String blobName) throws IOException { + assertEquals(blobName, OperationPurpose.SNAPSHOT_METADATA, purpose); + assertPurposeConsistency(purpose, blobName); + return super.blobExists(purpose, blobName); + } + + @Override + public InputStream readBlob(OperationPurpose purpose, String blobName) throws IOException { + assertPurposeConsistency(purpose, blobName); + return super.readBlob(purpose, blobName); + } + + @Override + public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { + assertPurposeConsistency(purpose, blobName); + return super.readBlob(purpose, blobName, position, length); + } + + @Override + public Map listBlobsByPrefix(OperationPurpose purpose, String blobNamePrefix) throws IOException { + assertEquals(OperationPurpose.SNAPSHOT_METADATA, purpose); + return super.listBlobsByPrefix(purpose, blobNamePrefix); + } + } + + private static void assertPurposeConsistency(OperationPurpose purpose, String blobName) { + if (blobName.startsWith(BlobStoreRepository.UPLOADED_DATA_BLOB_PREFIX)) { + assertEquals(blobName, OperationPurpose.SNAPSHOT_DATA, purpose); + } else { + assertThat( + blobName, + anyOf( + startsWith(BlobStoreRepository.INDEX_FILE_PREFIX), + startsWith(BlobStoreRepository.METADATA_PREFIX), + startsWith(BlobStoreRepository.SNAPSHOT_PREFIX), + equalTo(BlobStoreRepository.INDEX_LATEST_BLOB), + // verification + equalTo("master.dat"), + startsWith("data-") + ) + ); + assertEquals(blobName, OperationPurpose.SNAPSHOT_METADATA, purpose); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java b/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java index c832f222ecc69..77c225f5d94cb 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import java.io.IOException; import java.io.InputStream; @@ -116,6 +117,7 @@ void writeBlob(OperationPurpose purpose, String blobName, InputStream inputStrea */ default void writeBlob(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { + assert assertPurposeConsistency(purpose, blobName); writeBlob(purpose, blobName, bytes.streamInput(), bytes.length(), failIfAlreadyExists); } @@ -261,4 +263,33 @@ default void getRegister(OperationPurpose purpose, String key, ActionListener + *
  • {@link OperationPurpose#SNAPSHOT_DATA} is not used for blobs that look like metadata blobs.
  • + *
  • {@link OperationPurpose#SNAPSHOT_METADATA} is not used for blobs that look like data blobs.
  • + * + */ + // This is fairly lenient because we use a wide variety of blob names and purposes in tests in order to get good coverage. See + // BlobStoreRepositoryOperationPurposeIT for some stricter checks which apply during genuine snapshot operations. + static boolean assertPurposeConsistency(OperationPurpose purpose, String blobName) { + switch (purpose) { + case SNAPSHOT_DATA -> { + // must not be used for blobs with names that look like metadata blobs + assert (blobName.startsWith(BlobStoreRepository.INDEX_FILE_PREFIX) + || blobName.startsWith(BlobStoreRepository.METADATA_PREFIX) + || blobName.startsWith(BlobStoreRepository.SNAPSHOT_PREFIX) + || blobName.equals(BlobStoreRepository.INDEX_LATEST_BLOB)) == false : blobName + " should not use purpose " + purpose; + } + case SNAPSHOT_METADATA -> { + // must not be used for blobs with names that look like data blobs + assert blobName.startsWith(BlobStoreRepository.UPLOADED_DATA_BLOB_PREFIX) == false + : blobName + " should not use purpose " + purpose; + } + case REPOSITORY_ANALYSIS, CLUSTER_STATE, INDICES, TRANSLOG -> { + // no specific requirements + } + } + return true; + } } diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/OperationPurpose.java b/server/src/main/java/org/elasticsearch/common/blobstore/OperationPurpose.java index 568f2968c9e61..5df17c1948870 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/OperationPurpose.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/OperationPurpose.java @@ -15,7 +15,8 @@ * as well as other things that requires further differentiation for the same blob operation. */ public enum OperationPurpose { - SNAPSHOT("Snapshot"), + SNAPSHOT_DATA("SnapshotData"), + SNAPSHOT_METADATA("SnapshotMetadata"), REPOSITORY_ANALYSIS("RepositoryAnalysis"), CLUSTER_STATE("ClusterState"), INDICES("Indices"), diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java index 9f2971e24cbf3..e40ca70460b13 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java @@ -183,6 +183,7 @@ public boolean blobExists(OperationPurpose purpose, String blobName) { @Override public InputStream readBlob(OperationPurpose purpose, String name) throws IOException { + assert BlobContainer.assertPurposeConsistency(purpose, name); final Path resolvedPath = path.resolve(name); try { return Files.newInputStream(resolvedPath); @@ -193,6 +194,7 @@ public InputStream readBlob(OperationPurpose purpose, String name) throws IOExce @Override public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { + assert BlobContainer.assertPurposeConsistency(purpose, blobName); final SeekableByteChannel channel = Files.newByteChannel(path.resolve(blobName)); if (position > 0L) { channel.position(position); @@ -210,6 +212,7 @@ public long readBlobPreferredLength() { @Override public void writeBlob(OperationPurpose purpose, String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { + assert BlobContainer.assertPurposeConsistency(purpose, blobName); final Path file = path.resolve(blobName); try { writeToPath(inputStream, file, blobSize); @@ -225,6 +228,7 @@ public void writeBlob(OperationPurpose purpose, String blobName, InputStream inp @Override public void writeBlob(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { + assert BlobContainer.assertPurposeConsistency(purpose, blobName); final Path file = path.resolve(blobName); try { writeToPath(bytes, file); @@ -246,6 +250,7 @@ public void writeMetadataBlob( boolean atomic, CheckedConsumer writer ) throws IOException { + assert purpose != OperationPurpose.SNAPSHOT_DATA && BlobContainer.assertPurposeConsistency(purpose, blobName) : purpose; if (atomic) { final String tempBlob = tempBlobName(blobName); try { @@ -291,6 +296,7 @@ private void writeToPath( @Override public void writeBlobAtomic(OperationPurpose purpose, final String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { + assert purpose != OperationPurpose.SNAPSHOT_DATA && BlobContainer.assertPurposeConsistency(purpose, blobName) : purpose; final String tempBlob = tempBlobName(blobName); final Path tempBlobPath = path.resolve(tempBlob); try { diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/SnapshotFilesProvider.java b/server/src/main/java/org/elasticsearch/indices/recovery/SnapshotFilesProvider.java index daf9a809dcf07..1424ef160657b 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/SnapshotFilesProvider.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/SnapshotFilesProvider.java @@ -50,7 +50,7 @@ public InputStream getInputStreamForSnapshotFile( inputStream = new SlicedInputStream(fileInfo.numberOfParts()) { @Override protected InputStream openSlice(int slice) throws IOException { - return container.readBlob(OperationPurpose.SNAPSHOT, fileInfo.partName(slice)); + return container.readBlob(OperationPurpose.SNAPSHOT_DATA, fileInfo.partName(slice)); } }; } diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index cd2b8c73fe90b..c45a048480383 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -513,7 +513,7 @@ public void cloneShardSnapshot( final ShardGeneration existingShardGen; if (shardGeneration == null) { Tuple tuple = buildBlobStoreIndexShardSnapshots( - shardContainer.listBlobsByPrefix(OperationPurpose.SNAPSHOT, INDEX_FILE_PREFIX).keySet(), + shardContainer.listBlobsByPrefix(OperationPurpose.SNAPSHOT_METADATA, INDEX_FILE_PREFIX).keySet(), shardContainer ); existingShardGen = new ShardGeneration(tuple.v2()); @@ -883,7 +883,7 @@ private void createSnapshotsDeletion( listener.onFailure(new RepositoryException(metadata.name(), "repository is readonly")); } else { threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(ActionRunnable.supply(listener, () -> { - final var originalRootBlobs = blobContainer().listBlobs(OperationPurpose.SNAPSHOT); + final var originalRootBlobs = blobContainer().listBlobs(OperationPurpose.SNAPSHOT_METADATA); // One final best-effort check for other clusters concurrently writing to the repository: final var originalRepositoryData = safeRepositoryData(repositoryDataGeneration, originalRootBlobs); @@ -893,7 +893,7 @@ private void createSnapshotsDeletion( repositoryDataGeneration, SnapshotsService.minCompatibleVersion(minimumNodeVersion, originalRepositoryData, snapshotIds), originalRootBlobs, - blobStore().blobContainer(indicesPath()).children(OperationPurpose.SNAPSHOT), + blobStore().blobContainer(indicesPath()).children(OperationPurpose.SNAPSHOT_DATA), originalRepositoryData ); })); @@ -1243,7 +1243,7 @@ private class ShardSnapshotsDeletion extends AbstractRunnable { @Override protected void doRun() throws Exception { shardContainer = shardContainer(indexId, shardId); - originalShardBlobs = shardContainer.listBlobs(OperationPurpose.SNAPSHOT).keySet(); + originalShardBlobs = shardContainer.listBlobs(OperationPurpose.SNAPSHOT_DATA).keySet(); final BlobStoreIndexShardSnapshots blobStoreIndexShardSnapshots; final long newGen; if (useShardGenerations) { @@ -1380,7 +1380,7 @@ private void cleanupUnlinkedShardLevelBlobs( } snapshotExecutor.execute(ActionRunnable.wrap(listener, l -> { try { - deleteFromContainer(blobContainer(), filesToDelete); + deleteFromContainer(OperationPurpose.SNAPSHOT_DATA, blobContainer(), filesToDelete); l.onResponse(null); } catch (Exception e) { logger.warn(() -> format("%s Failed to delete some blobs during snapshot delete", snapshotIds), e); @@ -1425,7 +1425,7 @@ private void cleanupUnlinkedRootAndIndicesBlobs(RepositoryData newRepositoryData staleBlobDeleteRunner.enqueueTask(listeners.acquire(ref -> { try (ref) { logStaleRootLevelBlobs(newRepositoryData.getGenId() - 1, snapshotIds, staleRootBlobs); - deleteFromContainer(blobContainer(), staleRootBlobs.iterator()); + deleteFromContainer(OperationPurpose.SNAPSHOT_METADATA, blobContainer(), staleRootBlobs.iterator()); for (final var staleRootBlob : staleRootBlobs) { bytesDeleted.addAndGet(originalRootBlobs.get(staleRootBlob).length()); } @@ -1456,7 +1456,7 @@ private void cleanupUnlinkedRootAndIndicesBlobs(RepositoryData newRepositoryData staleBlobDeleteRunner.enqueueTask(listeners.acquire(ref -> { try (ref) { logger.debug("[{}] Found stale index [{}]. Cleaning it up", metadata.name(), indexId); - final var deleteResult = indexEntry.getValue().delete(OperationPurpose.SNAPSHOT); + final var deleteResult = indexEntry.getValue().delete(OperationPurpose.SNAPSHOT_DATA); blobsDeleted.addAndGet(deleteResult.blobsDeleted()); bytesDeleted.addAndGet(deleteResult.bytesDeleted()); logger.debug("[{}] Cleaned up stale index [{}]", metadata.name(), indexId); @@ -1757,7 +1757,7 @@ private void cleanupOldMetadata( threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(new AbstractRunnable() { @Override protected void doRun() throws Exception { - deleteFromContainer(blobContainer(), toDelete.iterator()); + deleteFromContainer(OperationPurpose.SNAPSHOT_METADATA, blobContainer(), toDelete.iterator()); } @Override @@ -1854,7 +1854,7 @@ public IndexMetadata getSnapshotIndexMetaData(RepositoryData repositoryData, Sna } } - private void deleteFromContainer(BlobContainer container, Iterator blobs) throws IOException { + private void deleteFromContainer(OperationPurpose purpose, BlobContainer container, Iterator blobs) throws IOException { final Iterator wrappedIterator; if (logger.isTraceEnabled()) { wrappedIterator = new Iterator<>() { @@ -1873,7 +1873,7 @@ public String next() { } else { wrappedIterator = blobs; } - container.deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, wrappedIterator); + container.deleteBlobsIgnoringIfNotExists(purpose, wrappedIterator); } private BlobPath indicesPath() { @@ -2001,7 +2001,7 @@ public String startVerification() { String seed = UUIDs.randomBase64UUID(); byte[] testBytes = Strings.toUTF8Bytes(seed); BlobContainer testContainer = blobStore().blobContainer(basePath().add(testBlobPrefix(seed))); - testContainer.writeBlobAtomic(OperationPurpose.SNAPSHOT, "master.dat", new BytesArray(testBytes), true); + testContainer.writeBlobAtomic(OperationPurpose.SNAPSHOT_METADATA, "master.dat", new BytesArray(testBytes), true); return seed; } } catch (Exception exp) { @@ -2014,7 +2014,7 @@ public void endVerification(String seed) { if (isReadOnly() == false) { try { final String testPrefix = testBlobPrefix(seed); - blobStore().blobContainer(basePath().add(testPrefix)).delete(OperationPurpose.SNAPSHOT); + blobStore().blobContainer(basePath().add(testPrefix)).delete(OperationPurpose.SNAPSHOT_METADATA); } catch (Exception exp) { throw new RepositoryVerificationException(metadata.name(), "cannot delete test data at " + basePath(), exp); } @@ -2434,7 +2434,7 @@ private RepositoryData getRepositoryData(long indexGen) { // EMPTY is safe here because RepositoryData#fromXContent calls namedObject try ( - InputStream blob = blobContainer().readBlob(OperationPurpose.SNAPSHOT, snapshotsIndexBlobName); + InputStream blob = blobContainer().readBlob(OperationPurpose.SNAPSHOT_METADATA, snapshotsIndexBlobName); XContentParser parser = XContentType.JSON.xContent() .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, blob) ) { @@ -2660,7 +2660,7 @@ public void onFailure(Exception e) { } final String indexBlob = INDEX_FILE_PREFIX + newGen; logger.debug("Repository [{}] writing new index generational blob [{}]", metadata.name(), indexBlob); - writeAtomic(blobContainer(), indexBlob, out -> { + writeAtomic(OperationPurpose.SNAPSHOT_METADATA, blobContainer(), indexBlob, out -> { try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder(org.elasticsearch.core.Streams.noCloseStream(out))) { newRepositoryData.snapshotsToXContent(xContentBuilder, version); } @@ -2750,7 +2750,13 @@ private void maybeWriteIndexLatest(long newGen) { if (supportURLRepo) { logger.debug("Repository [{}] updating index.latest with generation [{}]", metadata.name(), newGen); try { - writeAtomic(blobContainer(), INDEX_LATEST_BLOB, out -> out.write(Numbers.longToBytes(newGen)), false); + writeAtomic( + OperationPurpose.SNAPSHOT_METADATA, + blobContainer(), + INDEX_LATEST_BLOB, + out -> out.write(Numbers.longToBytes(newGen)), + false + ); } catch (Exception e) { logger.warn( () -> format( @@ -2777,7 +2783,7 @@ private void maybeWriteIndexLatest(long newGen) { private boolean ensureSafeGenerationExists(long safeGeneration, Consumer onFailure) throws IOException { logger.debug("Ensure generation [{}] that is the basis for this write exists in [{}]", safeGeneration, metadata.name()); if (safeGeneration != RepositoryData.EMPTY_REPO_GEN - && blobContainer().blobExists(OperationPurpose.SNAPSHOT, INDEX_FILE_PREFIX + safeGeneration) == false) { + && blobContainer().blobExists(OperationPurpose.SNAPSHOT_METADATA, INDEX_FILE_PREFIX + safeGeneration) == false) { Tuple previousWriterInfo = null; Exception readRepoDataEx = null; try { @@ -2907,7 +2913,7 @@ long latestIndexBlobId() throws IOException { // package private for testing long readSnapshotIndexLatestBlob() throws IOException { final BytesReference content = Streams.readFully( - Streams.limitStream(blobContainer().readBlob(OperationPurpose.SNAPSHOT, INDEX_LATEST_BLOB), Long.BYTES + 1) + Streams.limitStream(blobContainer().readBlob(OperationPurpose.SNAPSHOT_METADATA, INDEX_LATEST_BLOB), Long.BYTES + 1) ); if (content.length() != Long.BYTES) { throw new RepositoryException( @@ -2922,7 +2928,7 @@ long readSnapshotIndexLatestBlob() throws IOException { } private long listBlobsToGetLatestIndexId() throws IOException { - return latestGeneration(blobContainer().listBlobsByPrefix(OperationPurpose.SNAPSHOT, INDEX_FILE_PREFIX).keySet()); + return latestGeneration(blobContainer().listBlobsByPrefix(OperationPurpose.SNAPSHOT_METADATA, INDEX_FILE_PREFIX).keySet()); } private long latestGeneration(Collection rootBlobs) { @@ -2944,13 +2950,14 @@ private long latestGeneration(Collection rootBlobs) { } private void writeAtomic( + OperationPurpose purpose, BlobContainer container, final String blobName, CheckedConsumer writer, boolean failIfAlreadyExists ) throws IOException { logger.trace(() -> format("[%s] Writing [%s] to %s atomically", metadata.name(), blobName, container.path())); - container.writeMetadataBlob(OperationPurpose.SNAPSHOT, blobName, failIfAlreadyExists, true, writer); + container.writeMetadataBlob(purpose, blobName, failIfAlreadyExists, true, writer); } @Override @@ -2976,7 +2983,7 @@ private void doSnapshotShard(SnapshotShardContext context) { if (generation == null) { snapshotStatus.ensureNotAborted(); try { - blobs = shardContainer.listBlobsByPrefix(OperationPurpose.SNAPSHOT, INDEX_FILE_PREFIX).keySet(); + blobs = shardContainer.listBlobsByPrefix(OperationPurpose.SNAPSHOT_METADATA, INDEX_FILE_PREFIX).keySet(); } catch (IOException e) { throw new IndexShardSnapshotFailedException(shardId, "failed to list blobs", e); } @@ -3168,7 +3175,7 @@ private void doSnapshotShard(SnapshotShardContext context) { } snapshotStatus.addProcessedFiles(finalFilesInShardMetadataCount, finalFilesInShardMetadataSize); try { - deleteFromContainer(shardContainer, blobsToDelete.iterator()); + deleteFromContainer(OperationPurpose.SNAPSHOT_METADATA, shardContainer, blobsToDelete.iterator()); } catch (IOException e) { logger.warn( () -> format("[%s][%s] failed to delete old index-N blobs during finalization", snapshotId, shardId), @@ -3223,7 +3230,7 @@ private void doSnapshotShard(SnapshotShardContext context) { }, e -> { try { shardContainer.deleteBlobsIgnoringIfNotExists( - OperationPurpose.SNAPSHOT, + OperationPurpose.SNAPSHOT_DATA, Iterators.flatMap(fileToCleanUp.get().iterator(), f -> Iterators.forRange(0, f.numberOfParts(), f::partName)) ); } catch (Exception innerException) { @@ -3388,7 +3395,7 @@ private void restoreFile(BlobStoreIndexShardSnapshot.FileInfo fileInfo, Store st @Override protected InputStream openSlice(int slice) throws IOException { ensureNotClosing(store); - return container.readBlob(OperationPurpose.SNAPSHOT, fileInfo.partName(slice)); + return container.readBlob(OperationPurpose.SNAPSHOT_DATA, fileInfo.partName(slice)); } })) { final byte[] buffer = new byte[Math.toIntExact(Math.min(bufferSize, fileInfo.length()))]; @@ -3527,7 +3534,12 @@ public void verify(String seed, DiscoveryNode localNode) { } else { BlobContainer testBlobContainer = blobStore().blobContainer(basePath().add(testBlobPrefix(seed))); try { - testBlobContainer.writeBlob(OperationPurpose.SNAPSHOT, "data-" + localNode.getId() + ".dat", new BytesArray(seed), true); + testBlobContainer.writeBlob( + OperationPurpose.SNAPSHOT_METADATA, + "data-" + localNode.getId() + ".dat", + new BytesArray(seed), + true + ); } catch (Exception exp) { throw new RepositoryVerificationException( metadata.name(), @@ -3535,7 +3547,7 @@ public void verify(String seed, DiscoveryNode localNode) { exp ); } - try (InputStream masterDat = testBlobContainer.readBlob(OperationPurpose.SNAPSHOT, "master.dat")) { + try (InputStream masterDat = testBlobContainer.readBlob(OperationPurpose.SNAPSHOT_METADATA, "master.dat")) { final String seedRead = Streams.readFully(masterDat).utf8ToString(); if (seedRead.equals(seed) == false) { throw new RepositoryVerificationException( @@ -3582,6 +3594,7 @@ private void writeShardIndexBlobAtomic( logger.trace(() -> format("[%s] Writing shard index [%s] to [%s]", metadata.name(), indexGeneration, shardContainer.path())); final String blobName = INDEX_SHARD_SNAPSHOTS_FORMAT.blobName(String.valueOf(indexGeneration)); writeAtomic( + OperationPurpose.SNAPSHOT_METADATA, shardContainer, blobName, out -> INDEX_SHARD_SNAPSHOTS_FORMAT.serialize(updatedSnapshots, blobName, compress, serializationParams, out), @@ -3617,7 +3630,7 @@ public BlobStoreIndexShardSnapshots getBlobStoreIndexShardSnapshots(IndexId inde Set blobs = Collections.emptySet(); if (shardGen == null) { - blobs = shardContainer.listBlobsByPrefix(OperationPurpose.SNAPSHOT, INDEX_FILE_PREFIX).keySet(); + blobs = shardContainer.listBlobsByPrefix(OperationPurpose.SNAPSHOT_METADATA, INDEX_FILE_PREFIX).keySet(); } return buildBlobStoreIndexShardSnapshots(blobs, shardContainer, shardGen).v1(); @@ -3719,7 +3732,7 @@ private void checkAborted() { final String partName = fileInfo.partName(i); logger.trace("[{}] Writing [{}] to [{}]", metadata.name(), partName, shardContainer.path()); final long startMS = threadPool.relativeTimeInMillis(); - shardContainer.writeBlob(OperationPurpose.SNAPSHOT, partName, inputStream, partBytes, false); + shardContainer.writeBlob(OperationPurpose.SNAPSHOT_DATA, partName, inputStream, partBytes, false); logger.trace( "[{}] Writing [{}] of size [{}b] to [{}] took [{}ms]", metadata.name(), diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java index 54cb6fe7c45d3..ca3ff799436c2 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java @@ -118,7 +118,7 @@ public ChecksumBlobStoreFormat( public T read(String repoName, BlobContainer blobContainer, String name, NamedXContentRegistry namedXContentRegistry) throws IOException { String blobName = blobName(name); - try (InputStream in = blobContainer.readBlob(OperationPurpose.SNAPSHOT, blobName)) { + try (InputStream in = blobContainer.readBlob(OperationPurpose.SNAPSHOT_METADATA, blobName)) { return deserialize(repoName, namedXContentRegistry, in); } } @@ -345,7 +345,7 @@ public void write(T obj, BlobContainer blobContainer, String name, boolean compr throws IOException { final String blobName = blobName(name); blobContainer.writeMetadataBlob( - OperationPurpose.SNAPSHOT, + OperationPurpose.SNAPSHOT_METADATA, blobName, false, false, diff --git a/server/src/test/java/org/elasticsearch/common/blobstore/fs/FsBlobContainerTests.java b/server/src/test/java/org/elasticsearch/common/blobstore/fs/FsBlobContainerTests.java index 1f54046630cf8..67712af9ef57b 100644 --- a/server/src/test/java/org/elasticsearch/common/blobstore/fs/FsBlobContainerTests.java +++ b/server/src/test/java/org/elasticsearch/common/blobstore/fs/FsBlobContainerTests.java @@ -46,6 +46,7 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomNonDataPurpose; import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -228,14 +229,19 @@ private static void checkAtomicWrite() throws IOException { BlobPath.EMPTY, path ); - container.writeBlobAtomic(randomPurpose(), blobName, new BytesArray(randomByteArrayOfLength(randomIntBetween(1, 512))), true); + container.writeBlobAtomic( + randomNonDataPurpose(), + blobName, + new BytesArray(randomByteArrayOfLength(randomIntBetween(1, 512))), + true + ); final var blobData = new BytesArray(randomByteArrayOfLength(randomIntBetween(1, 512))); - container.writeBlobAtomic(randomPurpose(), blobName, blobData, false); + container.writeBlobAtomic(randomNonDataPurpose(), blobName, blobData, false); assertEquals(blobData, Streams.readFully(container.readBlob(randomPurpose(), blobName))); expectThrows( FileAlreadyExistsException.class, () -> container.writeBlobAtomic( - randomPurpose(), + randomNonDataPurpose(), blobName, new BytesArray(randomByteArrayOfLength(randomIntBetween(1, 512))), true diff --git a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java index ef625706ffffe..adfc333e9dc7e 100644 --- a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Numbers; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -67,7 +68,6 @@ import java.util.stream.Collectors; import static org.elasticsearch.repositories.RepositoryDataTests.generateRandomRepoData; -import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; @@ -204,7 +204,7 @@ public void testCorruptIndexLatestFile() throws Exception { for (int i = 0; i < 16; i++) { repository.blobContainer() - .writeBlob(randomPurpose(), BlobStoreRepository.INDEX_LATEST_BLOB, new BytesArray(buffer, 0, i), false); + .writeBlob(OperationPurpose.SNAPSHOT_METADATA, BlobStoreRepository.INDEX_LATEST_BLOB, new BytesArray(buffer, 0, i), false); if (i == 8) { assertThat(repository.readSnapshotIndexLatestBlob(), equalTo(generation)); } else { diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index 5b59040bbb04d..19f0d1e2e88a0 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -273,7 +273,11 @@ public void verifyReposThenStopServices() { (BlobStoreRepository) testClusterNodes.randomMasterNodeSafe().repositoriesService.repository("repo") ); deterministicTaskQueue.runAllRunnableTasks(); - assertNull(future.result()); + assertTrue(future.isDone()); + final var result = future.result(); + if (result != null) { + fail(result); + } } finally { testClusterNodes.nodes.values().forEach(TestClusterNodes.TestClusterNode::stop); } diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/AbstractThirdPartyRepositoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/AbstractThirdPartyRepositoryTestCase.java index 15f33131fa114..3d4dea430a9b5 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/AbstractThirdPartyRepositoryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/AbstractThirdPartyRepositoryTestCase.java @@ -36,6 +36,7 @@ import java.util.Set; import java.util.concurrent.Executor; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomNonDataPurpose; import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.contains; @@ -275,7 +276,7 @@ private static void createDanglingIndex(final BlobStoreRepository repo, final Ex .writeBlob(randomPurpose(), "bar", new ByteArrayInputStream(new byte[3]), 3, false); for (String prefix : Arrays.asList("snap-", "meta-")) { blobStore.blobContainer(repo.basePath()) - .writeBlob(randomPurpose(), prefix + "foo.dat", new ByteArrayInputStream(new byte[3]), 3, false); + .writeBlob(randomNonDataPurpose(), prefix + "foo.dat", new ByteArrayInputStream(new byte[3]), 3, false); } })); future.get(); @@ -285,8 +286,8 @@ private static void createDanglingIndex(final BlobStoreRepository repo, final Ex final BlobStore blobStore = repo.blobStore(); return blobStore.blobContainer(repo.basePath().add("indices")).children(randomPurpose()).containsKey("foo") && blobStore.blobContainer(repo.basePath().add("indices").add("foo")).blobExists(randomPurpose(), "bar") - && blobStore.blobContainer(repo.basePath()).blobExists(randomPurpose(), "meta-foo.dat") - && blobStore.blobContainer(repo.basePath()).blobExists(randomPurpose(), "snap-foo.dat"); + && blobStore.blobContainer(repo.basePath()).blobExists(randomNonDataPurpose(), "meta-foo.dat") + && blobStore.blobContainer(repo.basePath()).blobExists(randomNonDataPurpose(), "snap-foo.dat"); })); assertTrue(corruptionFuture.get()); } diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java index 383c2b3c2d13b..79e4a8da713c5 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java @@ -67,6 +67,7 @@ import static org.apache.lucene.tests.util.LuceneTestCase.random; import static org.elasticsearch.test.ESTestCase.randomFrom; import static org.elasticsearch.test.ESTestCase.randomIntBetween; +import static org.elasticsearch.test.ESTestCase.randomValueOtherThan; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasKey; @@ -105,7 +106,7 @@ public static PlainActionFuture assertConsistencyAsync(BlobStore try { final BlobContainer blobContainer = repository.blobContainer(); final long latestGen; - try (DataInputStream inputStream = new DataInputStream(blobContainer.readBlob(randomPurpose(), "index.latest"))) { + try (DataInputStream inputStream = new DataInputStream(blobContainer.readBlob(randomNonDataPurpose(), "index.latest"))) { latestGen = inputStream.readLong(); } catch (NoSuchFileException e) { throw new AssertionError("Could not find index.latest blob for repo [" + repository + "]"); @@ -113,7 +114,7 @@ public static PlainActionFuture assertConsistencyAsync(BlobStore assertIndexGenerations(blobContainer, latestGen); final RepositoryData repositoryData; try ( - InputStream blob = blobContainer.readBlob(randomPurpose(), BlobStoreRepository.INDEX_FILE_PREFIX + latestGen); + InputStream blob = blobContainer.readBlob(randomNonDataPurpose(), BlobStoreRepository.INDEX_FILE_PREFIX + latestGen); XContentParser parser = XContentType.JSON.xContent() .createParser(XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE), blob) ) { @@ -462,4 +463,8 @@ private static ClusterService mockClusterService(ClusterState initialState) { public static OperationPurpose randomPurpose() { return randomFrom(OperationPurpose.values()); } + + public static OperationPurpose randomNonDataPurpose() { + return randomValueOtherThan(OperationPurpose.SNAPSHOT_DATA, BlobStoreTestUtil::randomPurpose); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java index 578a7898bcd1e..a2499c06d6ccc 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.support.BlobMetadata; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; @@ -62,6 +63,7 @@ import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.READONLY_SETTING_KEY; import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.SNAPSHOT_INDEX_NAME_FORMAT; import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.SNAPSHOT_NAME_FORMAT; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomNonDataPurpose; import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; @@ -228,7 +230,7 @@ public static void writeBlob( if (randomBoolean()) { container.writeBlob(randomPurpose(), blobName, bytesArray, failIfAlreadyExists); } else { - container.writeBlobAtomic(randomPurpose(), blobName, bytesArray, failIfAlreadyExists); + container.writeBlobAtomic(randomNonDataPurpose(), blobName, bytesArray, failIfAlreadyExists); } } @@ -556,7 +558,7 @@ public void testDanglingShardLevelBlobCleanup() throws Exception { // Create an extra dangling blob as if from an earlier snapshot that failed to clean up shardContainer.writeBlob( - randomPurpose(), + OperationPurpose.SNAPSHOT_DATA, BlobStoreRepository.UPLOADED_DATA_BLOB_PREFIX + UUIDs.randomBase64UUID(random()), BytesArray.EMPTY, true diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/DirectBlobContainerIndexInput.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/DirectBlobContainerIndexInput.java index aab3e83a4f496..ea85a91677c46 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/DirectBlobContainerIndexInput.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/DirectBlobContainerIndexInput.java @@ -341,7 +341,7 @@ public String toString() { private InputStream openBlobStream(int part, long pos, long length) throws IOException { assert MetadataCachingIndexInput.assertCurrentThreadMayAccessBlobStore(); stats.addBlobStoreBytesRequested(length); - return blobContainer.readBlob(OperationPurpose.SNAPSHOT, fileInfo.partName(part), pos, length); + return blobContainer.readBlob(OperationPurpose.SNAPSHOT_DATA, fileInfo.partName(part), pos, length); } private static class StreamForSequentialReads implements Closeable { diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java index 2b61dc18e266c..e9f4ab11c9b7c 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java @@ -528,7 +528,7 @@ protected InputStream openInputStreamFromBlobStore(final long position, final lo assert position + readLength <= fileInfo.length() : "cannot read [" + position + "-" + (position + readLength) + "] from [" + fileInfo + "]"; stats.addBlobStoreBytesRequested(readLength); - return directory.blobContainer().readBlob(OperationPurpose.SNAPSHOT, fileInfo.name(), position, readLength); + return directory.blobContainer().readBlob(OperationPurpose.SNAPSHOT_DATA, fileInfo.name(), position, readLength); } return openInputStreamMultipleParts(position, readLength); } @@ -558,7 +558,7 @@ protected InputStream openSlice(int slice) throws IOException { ? getRelativePositionInPart(position + readLength - 1) + 1 : fileInfo.partBytes(currentPart); return directory.blobContainer() - .readBlob(OperationPurpose.SNAPSHOT, fileInfo.partName(currentPart), startInPart, endInPart - startInPart); + .readBlob(OperationPurpose.SNAPSHOT_DATA, fileInfo.partName(currentPart), startInPart, endInPart - startInPart); } }; } From d3fefde0a33031a4a35c33df8f5f9f3beb04cd8c Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Mon, 4 Dec 2023 13:58:23 +0100 Subject: [PATCH 114/181] ESQL: add unit tests for conversion functions. Extend `TEXT` type support for them (#102746) This adds the missing unit tests for the conversion functions. It also extends the type support by adding the `TEXT` type to those functions that support `KEYWORD` already (which also simplifies the testing, actually). Some functions did have it, some didn't; they now all do. The change also fixes two defects resulting from better testing coverage: `ToInteger` and `ToUnsignedLong` had some missing necessary exceptions declarations in the decorators for the evaluators. It also updates `ToInteger`'s `fromDouble()` conversion to use a newly added utility, so that the failed conversions contain the right message (`out of [integer] range`, instead of the confusing `out of [long] range`). Related: #102488, #102552. --- .../esql/functions/signature/to_boolean.svg | 1 + .../esql/functions/signature/to_datetime.svg | 1 + .../esql/functions/signature/to_degrees.svg | 1 + .../esql/functions/signature/to_double.svg | 1 + .../esql/functions/signature/to_integer.svg | 1 + .../esql/functions/signature/to_ip.svg | 2 +- .../esql/functions/signature/to_long.svg | 1 + .../esql/functions/signature/to_radians.svg | 1 + .../functions/signature/to_unsigned_long.svg | 1 + .../esql/functions/types/mv_count.asciidoc | 2 + .../esql/functions/types/to_boolean.asciidoc | 11 + .../esql/functions/types/to_datetime.asciidoc | 11 + .../esql/functions/types/to_degrees.asciidoc | 8 + .../esql/functions/types/to_double.asciidoc | 12 + .../esql/functions/types/to_integer.asciidoc | 12 + .../esql/functions/types/to_ip.asciidoc | 3 +- .../esql/functions/types/to_long.asciidoc | 14 + .../esql/functions/types/to_radians.asciidoc | 8 + .../esql/functions/types/to_string.asciidoc | 2 + .../functions/types/to_unsigned_long.asciidoc | 12 + .../xpack/esql/CsvTestUtils.java | 3 +- .../src/main/resources/ints.csv-spec | 28 +- .../src/main/resources/show.csv-spec | 183 ++++++------ .../convert/ToIntegerFromDoubleEvaluator.java | 7 +- .../convert/ToIntegerFromLongEvaluator.java | 7 +- .../convert/ToIntegerFromStringEvaluator.java | 7 +- .../ToIntegerFromUnsignedLongEvaluator.java | 7 +- .../ToUnsignedLongFromIntEvaluator.java | 31 +- .../ToUnsignedLongFromLongEvaluator.java | 31 +- .../ToUnsignedLongFromStringEvaluator.java | 7 +- .../convert/AbstractConvertFunction.java | 43 ++- .../function/scalar/convert/ToBoolean.java | 10 +- .../function/scalar/convert/ToDatetime.java | 10 +- .../function/scalar/convert/ToDegrees.java | 5 +- .../function/scalar/convert/ToDouble.java | 10 +- .../function/scalar/convert/ToIP.java | 9 +- .../function/scalar/convert/ToInteger.java | 32 +- .../function/scalar/convert/ToLong.java | 13 +- .../function/scalar/convert/ToRadians.java | 5 +- .../function/scalar/convert/ToString.java | 2 + .../scalar/convert/ToUnsignedLong.java | 16 +- .../function/scalar/convert/ToVersion.java | 2 + .../xpack/esql/analysis/AnalyzerTests.java | 3 +- .../function/AbstractFunctionTestCase.java | 63 +++- .../expression/function/TestCaseSupplier.java | 158 ++++++---- .../scalar/convert/ToBooleanTests.java | 90 ++++++ .../scalar/convert/ToDatetimeTests.java | 152 ++++++++++ .../scalar/convert/ToDegreesTests.java | 80 +++++ .../scalar/convert/ToDoubleTests.java | 122 ++++++++ .../function/scalar/convert/ToIPTests.java | 48 ++- .../scalar/convert/ToIntegerTests.java | 277 ++++++++++++++++++ .../function/scalar/convert/ToLongTests.java | 217 ++++++++++++++ .../scalar/convert/ToRadiansTests.java | 80 +++++ .../scalar/convert/ToUnsignedLongTests.java | 258 ++++++++++++++++ .../scalar/convert/ToVersionTests.java | 33 +-- .../xpack/ql/type/DataTypeConverter.java | 8 + 56 files changed, 1892 insertions(+), 270 deletions(-) create mode 100644 docs/reference/esql/functions/signature/to_boolean.svg create mode 100644 docs/reference/esql/functions/signature/to_datetime.svg create mode 100644 docs/reference/esql/functions/signature/to_degrees.svg create mode 100644 docs/reference/esql/functions/signature/to_double.svg create mode 100644 docs/reference/esql/functions/signature/to_integer.svg create mode 100644 docs/reference/esql/functions/signature/to_long.svg create mode 100644 docs/reference/esql/functions/signature/to_radians.svg create mode 100644 docs/reference/esql/functions/signature/to_unsigned_long.svg create mode 100644 docs/reference/esql/functions/types/to_boolean.asciidoc create mode 100644 docs/reference/esql/functions/types/to_datetime.asciidoc create mode 100644 docs/reference/esql/functions/types/to_degrees.asciidoc create mode 100644 docs/reference/esql/functions/types/to_double.asciidoc create mode 100644 docs/reference/esql/functions/types/to_integer.asciidoc create mode 100644 docs/reference/esql/functions/types/to_long.asciidoc create mode 100644 docs/reference/esql/functions/types/to_radians.asciidoc create mode 100644 docs/reference/esql/functions/types/to_unsigned_long.asciidoc create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongTests.java diff --git a/docs/reference/esql/functions/signature/to_boolean.svg b/docs/reference/esql/functions/signature/to_boolean.svg new file mode 100644 index 0000000000000..43c2aac2bca53 --- /dev/null +++ b/docs/reference/esql/functions/signature/to_boolean.svg @@ -0,0 +1 @@ +TO_BOOLEAN(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_datetime.svg b/docs/reference/esql/functions/signature/to_datetime.svg new file mode 100644 index 0000000000000..eb9e74248471a --- /dev/null +++ b/docs/reference/esql/functions/signature/to_datetime.svg @@ -0,0 +1 @@ +TO_DATETIME(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_degrees.svg b/docs/reference/esql/functions/signature/to_degrees.svg new file mode 100644 index 0000000000000..01fe0a4770156 --- /dev/null +++ b/docs/reference/esql/functions/signature/to_degrees.svg @@ -0,0 +1 @@ +TO_DEGREES(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_double.svg b/docs/reference/esql/functions/signature/to_double.svg new file mode 100644 index 0000000000000..e785e30ce5f81 --- /dev/null +++ b/docs/reference/esql/functions/signature/to_double.svg @@ -0,0 +1 @@ +TO_DOUBLE(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_integer.svg b/docs/reference/esql/functions/signature/to_integer.svg new file mode 100644 index 0000000000000..beb2e94039e53 --- /dev/null +++ b/docs/reference/esql/functions/signature/to_integer.svg @@ -0,0 +1 @@ +TO_INTEGER(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_ip.svg b/docs/reference/esql/functions/signature/to_ip.svg index c049964b254f3..c1669c9376c8b 100644 --- a/docs/reference/esql/functions/signature/to_ip.svg +++ b/docs/reference/esql/functions/signature/to_ip.svg @@ -1 +1 @@ -TO_IP(arg1) \ No newline at end of file +TO_IP(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_long.svg b/docs/reference/esql/functions/signature/to_long.svg new file mode 100644 index 0000000000000..464d4a001cb35 --- /dev/null +++ b/docs/reference/esql/functions/signature/to_long.svg @@ -0,0 +1 @@ +TO_LONG(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_radians.svg b/docs/reference/esql/functions/signature/to_radians.svg new file mode 100644 index 0000000000000..712431fb32497 --- /dev/null +++ b/docs/reference/esql/functions/signature/to_radians.svg @@ -0,0 +1 @@ +TO_RADIANS(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_unsigned_long.svg b/docs/reference/esql/functions/signature/to_unsigned_long.svg new file mode 100644 index 0000000000000..da07b3a4c7349 --- /dev/null +++ b/docs/reference/esql/functions/signature/to_unsigned_long.svg @@ -0,0 +1 @@ +TO_UNSIGNED_LONG(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/mv_count.asciidoc b/docs/reference/esql/functions/types/mv_count.asciidoc index 21794bcb1b959..440e66d11096e 100644 --- a/docs/reference/esql/functions/types/mv_count.asciidoc +++ b/docs/reference/esql/functions/types/mv_count.asciidoc @@ -2,8 +2,10 @@ |=== v | result boolean | integer +cartesian_point | integer datetime | integer double | integer +geo_point | integer integer | integer ip | integer keyword | integer diff --git a/docs/reference/esql/functions/types/to_boolean.asciidoc b/docs/reference/esql/functions/types/to_boolean.asciidoc new file mode 100644 index 0000000000000..7f543963eb090 --- /dev/null +++ b/docs/reference/esql/functions/types/to_boolean.asciidoc @@ -0,0 +1,11 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +v | result +boolean | boolean +double | boolean +integer | boolean +keyword | boolean +long | boolean +text | boolean +unsigned_long | boolean +|=== diff --git a/docs/reference/esql/functions/types/to_datetime.asciidoc b/docs/reference/esql/functions/types/to_datetime.asciidoc new file mode 100644 index 0000000000000..bbd755f81f4da --- /dev/null +++ b/docs/reference/esql/functions/types/to_datetime.asciidoc @@ -0,0 +1,11 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +v | result +datetime | datetime +double | datetime +integer | datetime +keyword | datetime +long | datetime +text | datetime +unsigned_long | datetime +|=== diff --git a/docs/reference/esql/functions/types/to_degrees.asciidoc b/docs/reference/esql/functions/types/to_degrees.asciidoc new file mode 100644 index 0000000000000..7cb7ca46022c2 --- /dev/null +++ b/docs/reference/esql/functions/types/to_degrees.asciidoc @@ -0,0 +1,8 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +v | result +double | double +integer | double +long | double +unsigned_long | double +|=== diff --git a/docs/reference/esql/functions/types/to_double.asciidoc b/docs/reference/esql/functions/types/to_double.asciidoc new file mode 100644 index 0000000000000..38e8482b77544 --- /dev/null +++ b/docs/reference/esql/functions/types/to_double.asciidoc @@ -0,0 +1,12 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +v | result +boolean | double +datetime | double +double | double +integer | double +keyword | double +long | double +text | double +unsigned_long | double +|=== diff --git a/docs/reference/esql/functions/types/to_integer.asciidoc b/docs/reference/esql/functions/types/to_integer.asciidoc new file mode 100644 index 0000000000000..bcea15b9ec80b --- /dev/null +++ b/docs/reference/esql/functions/types/to_integer.asciidoc @@ -0,0 +1,12 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +v | result +boolean | integer +datetime | integer +double | integer +integer | integer +keyword | integer +long | integer +text | integer +unsigned_long | integer +|=== diff --git a/docs/reference/esql/functions/types/to_ip.asciidoc b/docs/reference/esql/functions/types/to_ip.asciidoc index a21bbf14d87ca..6d7f9338a9aeb 100644 --- a/docs/reference/esql/functions/types/to_ip.asciidoc +++ b/docs/reference/esql/functions/types/to_ip.asciidoc @@ -1,6 +1,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | result +v | result ip | ip keyword | ip +text | ip |=== diff --git a/docs/reference/esql/functions/types/to_long.asciidoc b/docs/reference/esql/functions/types/to_long.asciidoc new file mode 100644 index 0000000000000..5c063739fc5b1 --- /dev/null +++ b/docs/reference/esql/functions/types/to_long.asciidoc @@ -0,0 +1,14 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +v | result +boolean | long +cartesian_point | long +datetime | long +double | long +geo_point | long +integer | long +keyword | long +long | long +text | long +unsigned_long | long +|=== diff --git a/docs/reference/esql/functions/types/to_radians.asciidoc b/docs/reference/esql/functions/types/to_radians.asciidoc new file mode 100644 index 0000000000000..7cb7ca46022c2 --- /dev/null +++ b/docs/reference/esql/functions/types/to_radians.asciidoc @@ -0,0 +1,8 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +v | result +double | double +integer | double +long | double +unsigned_long | double +|=== diff --git a/docs/reference/esql/functions/types/to_string.asciidoc b/docs/reference/esql/functions/types/to_string.asciidoc index b8fcd4477aa70..4de4af735b07f 100644 --- a/docs/reference/esql/functions/types/to_string.asciidoc +++ b/docs/reference/esql/functions/types/to_string.asciidoc @@ -2,8 +2,10 @@ |=== v | result boolean | keyword +cartesian_point | keyword datetime | keyword double | keyword +geo_point | keyword integer | keyword ip | keyword keyword | keyword diff --git a/docs/reference/esql/functions/types/to_unsigned_long.asciidoc b/docs/reference/esql/functions/types/to_unsigned_long.asciidoc new file mode 100644 index 0000000000000..76d9cf44f4dd2 --- /dev/null +++ b/docs/reference/esql/functions/types/to_unsigned_long.asciidoc @@ -0,0 +1,12 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +v | result +boolean | unsigned_long +datetime | unsigned_long +double | unsigned_long +integer | unsigned_long +keyword | unsigned_long +long | unsigned_long +text | unsigned_long +unsigned_long | unsigned_long +|=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java index 060a137b69b7c..ebe27225becb1 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java @@ -327,8 +327,7 @@ public static ExpectedResults loadCsvSpecValues(String csv) { for (int i = 0; i < row.size(); i++) { String value = row.get(i); if (value == null || value.trim().equalsIgnoreCase(NULL_VALUE)) { - value = null; - rowValues.add(columnTypes.get(i).convert(value)); + rowValues.add(null); continue; } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec index 0f6fc42860750..3e28c8bc2cb9b 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec @@ -197,8 +197,7 @@ long:long |int:integer convertULToInt#[skip:-8.11.99, reason:ql exceptions were updated in 8.12] row ul = [2147483647, 9223372036854775808] | eval int = to_int(ul); warning:Line 1:57: evaluation of [to_int(ul)] failed, treating result as null. Only first 20 failures recorded. -// UL conversion to int dips into long; not the most efficient, but it's how SQL does it too. -warning:Line 1:57: org.elasticsearch.xpack.ql.InvalidArgumentException: [9223372036854775808] out of [long] range +warning:Line 1:57: org.elasticsearch.xpack.ql.InvalidArgumentException: [9223372036854775808] out of [integer] range ul:ul |int:integer [2147483647, 9223372036854775808]|2147483647 @@ -219,20 +218,29 @@ tf:boolean |t2i:integer |f2i:integer |tf2i:integer ; convertStringToInt -row int_str = "2147483647", int_dbl_str = "2147483647.2" | eval is2i = to_integer(int_str), ids2i = to_integer(int_dbl_str), overflow = to_integer("2147483648"), no_number = to_integer("foo"); -warning:Line 1:137: evaluation of [to_integer(\"2147483648\")] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:137: java.lang.NumberFormatException: For input string: \"2147483648\" -warning:Line 1:175: evaluation of [to_integer(\"foo\")] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:175: java.lang.NumberFormatException: For input string: \"foo\" +row int_str = "2147483647", int_dbl_str = "2147483646.2" | eval is2i = to_integer(int_str), ids2i = to_integer(int_dbl_str); -int_str:keyword |int_dbl_str:keyword |is2i:integer|ids2i:integer |overflow:integer |no_number:integer -2147483647 |2147483647.2 |2147483647 |2147483647 |null |null +int_str:keyword |int_dbl_str:keyword |is2i:integer|ids2i:integer +2147483647 |2147483646.2 |2147483647 |2147483646 +; + +convertStringToIntFail#[skip:-8.11.99, reason:double rounding in conversion updated in 8.12] +row str1 = "2147483647.2", str2 = "2147483648", non = "no number" | eval i1 = to_integer(str1), i2 = to_integer(str2), noi = to_integer(non); +warning:Line 1:79: evaluation of [to_integer(str1)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:79: java.lang.NumberFormatException: For input string: \"2147483647.2\" +warning:Line 1:102: evaluation of [to_integer(str2)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:102: java.lang.NumberFormatException: For input string: \"2147483648\" +warning:Line 1:126: evaluation of [to_integer(non)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:126: java.lang.NumberFormatException: For input string: \"no number\" + +str1:keyword |str2:keyword |non:keyword |i1:integer |i2:integer |noi:integer +2147483647.2 |2147483648 |no number |null |null |null ; convertDoubleToInt#[skip:-8.11.99, reason:ql exceptions were updated in 8.12] row d = 123.4 | eval d2i = to_integer(d), overflow = to_integer(1e19); warning:Line 1:54: evaluation of [to_integer(1e19)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:54: org.elasticsearch.xpack.ql.InvalidArgumentException: [1.0E19] out of [long] range +warning:Line 1:54: org.elasticsearch.xpack.ql.InvalidArgumentException: [1.0E19] out of [integer] range d:double |d2i:integer |overflow:integer 123.4 |123 |null diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec index b23e4d87fe52f..ffad468790998 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec @@ -5,6 +5,7 @@ v:long 1 ; +# TODO: switch this test to ``&format=csv&delimiter=|` output showFunctions#[skip:-8.11.99] show functions; @@ -71,27 +72,27 @@ sum |? sum(arg1:?) tan |"double tan(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |double | "" | false | false tanh |"double tanh(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |double | "" | false | false tau |? tau() | null | null | null |? | "" | null | false -to_bool |? to_bool(arg1:?) |arg1 |? | "" |? | "" | false | false -to_boolean |? to_boolean(arg1:?) |arg1 |? | "" |? | "" | false | false +to_bool |"boolean to_bool(v:boolean|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | |false |false +to_boolean |"boolean to_boolean(v:boolean|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | |false |false to_cartesianpoint |? to_cartesianpoint(arg1:?) |arg1 |? | "" |? | "" | false | false -to_datetime |? to_datetime(arg1:?) |arg1 |? | "" |? | "" | false | false -to_dbl |? to_dbl(arg1:?) |arg1 |? | "" |? | "" | false | false -to_degrees |? to_degrees(arg1:?) |arg1 |? | "" |? | "" | false | false -to_double |? to_double(arg1:?) |arg1 |? | "" |? | "" | false | false -to_dt |? to_dt(arg1:?) |arg1 |? | "" |? | "" | false | false +to_datetime |"date to_datetime(v:date|keyword|text|double|long|unsigned_long|integer)" |v |"date|keyword|text|double|long|unsigned_long|integer" | |date | |false |false +to_dbl |"double to_dbl(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | |false |false +to_degrees |"double to_degrees(v:double|long|unsigned_long|integer)" |v |"double|long|unsigned_long|integer" | |double | |false |false +to_double |"double to_double(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | |false |false +to_dt |"date to_dt(v:date|keyword|text|double|long|unsigned_long|integer)" |v |"date|keyword|text|double|long|unsigned_long|integer" | |date | |false |false to_geopoint |? to_geopoint(arg1:?) |arg1 |? | "" |? | "" | false | false -to_int |? to_int(arg1:?) |arg1 |? | "" |? | "" | false | false -to_integer |? to_integer(arg1:?) |arg1 |? | "" |? | "" | false | false -to_ip |? to_ip(arg1:?) |arg1 |? | "" |? | "" | false | false -to_long |? to_long(arg1:?) |arg1 |? | "" |? | "" | false | false -to_radians |? to_radians(arg1:?) |arg1 |? | "" |? | "" | false | false -to_str |"? to_str(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)"|v |"unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point" | "" |? | "" | false | false -to_string |"? to_string(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)"|v |"unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point" | "" |? | "" | false | false -to_ul |? to_ul(arg1:?) |arg1 |? | "" |? | "" | false | false -to_ulong |? to_ulong(arg1:?) |arg1 |? | "" |? | "" | false | false -to_unsigned_long |? to_unsigned_long(arg1:?) |arg1 |? | "" |? | "" | false | false -to_ver |"? to_ver(v:keyword|text|version)" |v |"keyword|text|version"| "" |? | "" | false | false -to_version |"? to_version(v:keyword|text|version)" |v |"keyword|text|version"| "" |? | "" | false | false +to_int |"integer to_int(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | |false |false +to_integer |"integer to_integer(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | |false |false +to_ip |"ip to_ip(v:ip|keyword|text)" |v |"ip|keyword|text" | |ip | |false |false +to_long |"long to_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer|geo_point|cartesian_point)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer|geo_point|cartesian_point" | |long | |false |false +to_radians |"double to_radians(v:double|long|unsigned_long|integer)" |v |"double|long|unsigned_long|integer" | |double | |false |false +to_str |"keyword to_str(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" |v |"unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point" | |keyword | |false |false +to_string |"keyword to_string(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" |v |"unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point" | |keyword | |false |false +to_ul |"unsigned_long to_ul(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | |false |false +to_ulong |"unsigned_long to_ulong(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | |false |false +to_unsigned_long |"unsigned_long to_unsigned_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | |false |false +to_ver |"version to_ver(v:keyword|text|version)" |v |"keyword|text|version" | |version | |false |false +to_version |"version to_version(v:keyword|text|version)" |v |"keyword|text|version" | |version | |false |false trim |"keyword|text trim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" |Removes leading and trailing whitespaces from a string.| false | false ; @@ -99,90 +100,90 @@ trim |"keyword|text trim(str:keyword|text)" showFunctionsSynopsis#[skip:-8.11.99] show functions | keep synopsis; -synopsis:keyword -"integer|long|double|unsigned_long abs(n:integer|long|double|unsigned_long)" -"double acos(n:integer|long|double|unsigned_long)" -"double asin(n:integer|long|double|unsigned_long)" -"double atan(n:integer|long|double|unsigned_long)" -"double atan2(y:integer|long|double|unsigned_long, x:integer|long|double|unsigned_long)" +synopsis:keyword +"integer|long|double|unsigned_long abs(n:integer|long|double|unsigned_long)" +"double acos(n:integer|long|double|unsigned_long)" +"double asin(n:integer|long|double|unsigned_long)" +"double atan(n:integer|long|double|unsigned_long)" +"double atan2(y:integer|long|double|unsigned_long, x:integer|long|double|unsigned_long)" "double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date, to:integer|long|double|date)" -? avg(arg1:?) -? case(arg1:?, arg2...:?) -"? ceil(n:integer|long|double|unsigned_long)" -? cidr_match(arg1:?, arg2...:?) -? coalesce(arg1:?, arg2...:?) -? concat(arg1:?, arg2...:?) -"double cos(n:integer|long|double|unsigned_long)" -"double cosh(n:integer|long|double|unsigned_long)" -? count(arg1:?) -? count_distinct(arg1:?, arg2:?) -? date_extract(arg1:?, arg2:?) -? date_format(arg1:?, arg2:?) +? avg(arg1:?) +? case(arg1:?, arg2...:?) +"? ceil(n:integer|long|double|unsigned_long)" +? cidr_match(arg1:?, arg2...:?) +? coalesce(arg1:?, arg2...:?) +? concat(arg1:?, arg2...:?) +"double cos(n:integer|long|double|unsigned_long)" +"double cosh(n:integer|long|double|unsigned_long)" +? count(arg1:?) +? count_distinct(arg1:?, arg2:?) +? date_extract(arg1:?, arg2:?) +? date_format(arg1:?, arg2:?) "date date_parse(?datePattern:keyword, dateString:keyword|text)" -? date_trunc(arg1:?, arg2:?) -? e() -? ends_with(arg1:?, arg2:?) -"? floor(n:integer|long|double|unsigned_long)" -"? greatest(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" -? is_finite(arg1:?) -? is_infinite(arg1:?) -? is_nan(arg1:?) -"? least(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" -"? left(string:keyword, length:integer)" -? length(arg1:?) -"? log10(n:integer|long|double|unsigned_long)" +? date_trunc(arg1:?, arg2:?) +? e() +? ends_with(arg1:?, arg2:?) +"? floor(n:integer|long|double|unsigned_long)" +"? greatest(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" +? is_finite(arg1:?) +? is_infinite(arg1:?) +? is_nan(arg1:?) +"? least(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" +? left(string:keyword, length:integer) +? length(arg1:?) +"? log10(n:integer|long|double|unsigned_long)" "keyword|text ltrim(str:keyword|text)" -? max(arg1:?) -? median(arg1:?) -? median_absolute_deviation(arg1:?) -? min(arg1:?) -? mv_avg(arg1:?) +? max(arg1:?) +? median(arg1:?) +? median_absolute_deviation(arg1:?) +? min(arg1:?) +? mv_avg(arg1:?) "keyword mv_concat(v:text|keyword, delim:text|keyword)" "integer mv_count(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" "? mv_dedupe(v:boolean|date|double|ip|text|integer|keyword|version|long)" "? mv_max(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" -? mv_median(arg1:?) +? mv_median(arg1:?) "? mv_min(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" -? mv_sum(arg1:?) -? now() -? percentile(arg1:?, arg2:?) -? pi() -"? pow(base:integer|unsigned_long|long|double, exponent:integer|unsigned_long|long|double)" -"? replace(arg1:?, arg2:?, arg3:?)" -"? right(string:keyword, length:integer)" -? round(arg1:?, arg2:?) +? mv_sum(arg1:?) +? now() +? percentile(arg1:?, arg2:?) +? pi() +"? pow(base:integer|unsigned_long|long|double, exponent:integer|unsigned_long|long|double)" +? replace(arg1:?, arg2:?, arg3:?) +? right(string:keyword, length:integer) +? round(arg1:?, arg2:?) "keyword|text rtrim(str:keyword|text)" -"double sin(n:integer|long|double|unsigned_long)" +"double sin(n:integer|long|double|unsigned_long)" "double sinh(n:integer|long|double|unsigned_long)" -? split(arg1:?, arg2:?) -"? sqrt(n:integer|long|double|unsigned_long)" -? starts_with(arg1:?, arg2:?) -? substring(arg1:?, arg2:?, arg3:?) -? sum(arg1:?) -"double tan(n:integer|long|double|unsigned_long)" -"double tanh(n:integer|long|double|unsigned_long)" -? tau() -? to_bool(arg1:?) -? to_boolean(arg1:?) +? split(arg1:?, arg2:?) +"? sqrt(n:integer|long|double|unsigned_long)" +? starts_with(arg1:?, arg2:?) +? substring(arg1:?, arg2:?, arg3:?) +? sum(arg1:?) +"double tan(n:integer|long|double|unsigned_long)" +"double tanh(n:integer|long|double|unsigned_long)" +? tau() +"boolean to_bool(v:boolean|keyword|text|double|long|unsigned_long|integer)" +"boolean to_boolean(v:boolean|keyword|text|double|long|unsigned_long|integer)" ? to_cartesianpoint(arg1:?) -? to_datetime(arg1:?) -? to_dbl(arg1:?) -? to_degrees(arg1:?) -? to_double(arg1:?) -? to_dt(arg1:?) +"date to_datetime(v:date|keyword|text|double|long|unsigned_long|integer)" +"double to_dbl(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"double to_degrees(v:double|long|unsigned_long|integer)" +"double to_double(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"date to_dt(v:date|keyword|text|double|long|unsigned_long|integer)" ? to_geopoint(arg1:?) -? to_int(arg1:?) -? to_integer(arg1:?) -? to_ip(arg1:?) -? to_long(arg1:?) -? to_radians(arg1:?) -"? to_str(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" -"? to_string(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" -? to_ul(arg1:?) -? to_ulong(arg1:?) -? to_unsigned_long(arg1:?) -"? to_ver(v:keyword|text|version)" -"? to_version(v:keyword|text|version)" +"integer to_int(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"integer to_integer(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"ip to_ip(v:ip|keyword|text)" +"long to_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer|geo_point|cartesian_point)" +"double to_radians(v:double|long|unsigned_long|integer)" +"keyword to_str(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" +"keyword to_string(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" +"unsigned_long to_ul(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"unsigned_long to_ulong(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"unsigned_long to_unsigned_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"version to_ver(v:keyword|text|version)" +"version to_version(v:keyword|text|version)" "keyword|text trim(str:keyword|text)" ; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java index b7ff410d07c15..329269bafd9ba 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java @@ -14,7 +14,6 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.InvalidArgumentException; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.tree.Source; /** @@ -39,7 +38,7 @@ public Block evalVector(Vector v) { if (vector.isConstant()) { try { return driverContext.blockFactory().newConstantIntBlockWith(evalValue(vector, 0), positionCount); - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); return driverContext.blockFactory().newConstantNullBlock(positionCount); } @@ -48,7 +47,7 @@ public Block evalVector(Vector v) { for (int p = 0; p < positionCount; p++) { try { builder.appendInt(evalValue(vector, p)); - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); builder.appendNull(); } @@ -82,7 +81,7 @@ public Block evalBlock(Block b) { } builder.appendInt(value); valuesAppended = true; - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java index 742b057c06799..f9b3cb60dad2c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java @@ -14,7 +14,6 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.InvalidArgumentException; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.tree.Source; /** @@ -39,7 +38,7 @@ public Block evalVector(Vector v) { if (vector.isConstant()) { try { return driverContext.blockFactory().newConstantIntBlockWith(evalValue(vector, 0), positionCount); - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); return driverContext.blockFactory().newConstantNullBlock(positionCount); } @@ -48,7 +47,7 @@ public Block evalVector(Vector v) { for (int p = 0; p < positionCount; p++) { try { builder.appendInt(evalValue(vector, p)); - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); builder.appendNull(); } @@ -82,7 +81,7 @@ public Block evalBlock(Block b) { } builder.appendInt(value); valuesAppended = true; - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java index bff4d46b09dff..600fa293394f9 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java @@ -15,6 +15,7 @@ import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.tree.Source; /** @@ -40,7 +41,7 @@ public Block evalVector(Vector v) { if (vector.isConstant()) { try { return driverContext.blockFactory().newConstantIntBlockWith(evalValue(vector, 0, scratchPad), positionCount); - } catch (NumberFormatException e) { + } catch (InvalidArgumentException | NumberFormatException e) { registerException(e); return driverContext.blockFactory().newConstantNullBlock(positionCount); } @@ -49,7 +50,7 @@ public Block evalVector(Vector v) { for (int p = 0; p < positionCount; p++) { try { builder.appendInt(evalValue(vector, p, scratchPad)); - } catch (NumberFormatException e) { + } catch (InvalidArgumentException | NumberFormatException e) { registerException(e); builder.appendNull(); } @@ -84,7 +85,7 @@ public Block evalBlock(Block b) { } builder.appendInt(value); valuesAppended = true; - } catch (NumberFormatException e) { + } catch (InvalidArgumentException | NumberFormatException e) { registerException(e); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java index ccd1edc4aa6c2..34128e44f1500 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java @@ -14,7 +14,6 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.InvalidArgumentException; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.tree.Source; /** @@ -39,7 +38,7 @@ public Block evalVector(Vector v) { if (vector.isConstant()) { try { return driverContext.blockFactory().newConstantIntBlockWith(evalValue(vector, 0), positionCount); - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); return driverContext.blockFactory().newConstantNullBlock(positionCount); } @@ -48,7 +47,7 @@ public Block evalVector(Vector v) { for (int p = 0; p < positionCount; p++) { try { builder.appendInt(evalValue(vector, p)); - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); builder.appendNull(); } @@ -82,7 +81,7 @@ public Block evalBlock(Block b) { } builder.appendInt(value); valuesAppended = true; - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java index d3ccf82f2cb05..703f0729654a8 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.tree.Source; /** @@ -35,11 +36,21 @@ public Block evalVector(Vector v) { IntVector vector = (IntVector) v; int positionCount = v.getPositionCount(); if (vector.isConstant()) { - return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); + try { + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); + } catch (InvalidArgumentException e) { + registerException(e); + return driverContext.blockFactory().newConstantNullBlock(positionCount); + } } try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { for (int p = 0; p < positionCount; p++) { - builder.appendLong(evalValue(vector, p)); + try { + builder.appendLong(evalValue(vector, p)); + } catch (InvalidArgumentException e) { + registerException(e); + builder.appendNull(); + } } return builder.build(); } @@ -62,13 +73,17 @@ public Block evalBlock(Block b) { boolean positionOpened = false; boolean valuesAppended = false; for (int i = start; i < end; i++) { - long value = evalValue(block, i); - if (positionOpened == false && valueCount > 1) { - builder.beginPositionEntry(); - positionOpened = true; + try { + long value = evalValue(block, i); + if (positionOpened == false && valueCount > 1) { + builder.beginPositionEntry(); + positionOpened = true; + } + builder.appendLong(value); + valuesAppended = true; + } catch (InvalidArgumentException e) { + registerException(e); } - builder.appendLong(value); - valuesAppended = true; } if (valuesAppended == false) { builder.appendNull(); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java index 2f01aef20edde..b43b961f5d34a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.tree.Source; /** @@ -34,11 +35,21 @@ public Block evalVector(Vector v) { LongVector vector = (LongVector) v; int positionCount = v.getPositionCount(); if (vector.isConstant()) { - return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); + try { + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); + } catch (InvalidArgumentException e) { + registerException(e); + return driverContext.blockFactory().newConstantNullBlock(positionCount); + } } try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { for (int p = 0; p < positionCount; p++) { - builder.appendLong(evalValue(vector, p)); + try { + builder.appendLong(evalValue(vector, p)); + } catch (InvalidArgumentException e) { + registerException(e); + builder.appendNull(); + } } return builder.build(); } @@ -61,13 +72,17 @@ public Block evalBlock(Block b) { boolean positionOpened = false; boolean valuesAppended = false; for (int i = start; i < end; i++) { - long value = evalValue(block, i); - if (positionOpened == false && valueCount > 1) { - builder.beginPositionEntry(); - positionOpened = true; + try { + long value = evalValue(block, i); + if (positionOpened == false && valueCount > 1) { + builder.beginPositionEntry(); + positionOpened = true; + } + builder.appendLong(value); + valuesAppended = true; + } catch (InvalidArgumentException e) { + registerException(e); } - builder.appendLong(value); - valuesAppended = true; } if (valuesAppended == false) { builder.appendNull(); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java index 4552154560421..5b46fe2bfc9bf 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java @@ -15,6 +15,7 @@ import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.tree.Source; /** @@ -40,7 +41,7 @@ public Block evalVector(Vector v) { if (vector.isConstant()) { try { return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0, scratchPad), positionCount); - } catch (NumberFormatException e) { + } catch (InvalidArgumentException | NumberFormatException e) { registerException(e); return driverContext.blockFactory().newConstantNullBlock(positionCount); } @@ -49,7 +50,7 @@ public Block evalVector(Vector v) { for (int p = 0; p < positionCount; p++) { try { builder.appendLong(evalValue(vector, p, scratchPad)); - } catch (NumberFormatException e) { + } catch (InvalidArgumentException | NumberFormatException e) { registerException(e); builder.appendNull(); } @@ -84,7 +85,7 @@ public Block evalBlock(Block b) { } builder.appendLong(value); valuesAppended = true; - } catch (NumberFormatException e) { + } catch (InvalidArgumentException | NumberFormatException e) { registerException(e); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java index 0da3717f758bf..1772916ba801c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; +import joptsimple.internal.Strings; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.elasticsearch.compute.data.Block; @@ -20,12 +22,18 @@ import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Set; import java.util.function.Function; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; @@ -35,6 +43,15 @@ */ public abstract class AbstractConvertFunction extends UnaryScalarFunction implements EvaluatorMapper { + // the numeric types convert functions need to handle; the other numeric types are converted upstream to one of these + private static final List NUMERIC_TYPES = List.of( + DataTypes.INTEGER, + DataTypes.LONG, + DataTypes.UNSIGNED_LONG, + DataTypes.DOUBLE + ); + public static final List STRING_TYPES = DataTypes.types().stream().filter(EsqlDataTypes::isString).toList(); + protected AbstractConvertFunction(Source source, Expression field) { super(source, field); } @@ -56,13 +73,25 @@ protected final TypeResolution resolveType() { if (childrenResolved() == false) { return new TypeResolution("Unresolved children"); } - return isType( - field(), - factories()::containsKey, - sourceText(), - null, - factories().keySet().stream().map(dt -> dt.name().toLowerCase(Locale.ROOT)).sorted().toArray(String[]::new) - ); + return isType(field(), factories()::containsKey, sourceText(), null, supportedTypesNames(factories().keySet())); + } + + public static String supportedTypesNames(Set types) { + List supportedTypesNames = new ArrayList<>(types.size()); + HashSet supportTypes = new HashSet<>(types); + if (supportTypes.containsAll(NUMERIC_TYPES)) { + supportedTypesNames.add("numeric"); + NUMERIC_TYPES.forEach(supportTypes::remove); + } + + if (types.containsAll(STRING_TYPES)) { + supportedTypesNames.add("string"); + STRING_TYPES.forEach(supportTypes::remove); + } + + supportTypes.forEach(t -> supportedTypesNames.add(t.name().toLowerCase(Locale.ROOT))); + supportedTypesNames.sort(String::compareTo); + return Strings.join(supportedTypesNames, " or "); } @FunctionalInterface diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java index 442c106042fa0..3a33e086d8fdd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,6 +25,7 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; +import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; @@ -31,13 +34,18 @@ public class ToBoolean extends AbstractConvertFunction { private static final Map EVALUATORS = Map.ofEntries( Map.entry(BOOLEAN, (field, source) -> field), Map.entry(KEYWORD, ToBooleanFromStringEvaluator.Factory::new), + Map.entry(TEXT, ToBooleanFromStringEvaluator.Factory::new), Map.entry(DOUBLE, ToBooleanFromDoubleEvaluator.Factory::new), Map.entry(LONG, ToBooleanFromLongEvaluator.Factory::new), Map.entry(UNSIGNED_LONG, ToBooleanFromUnsignedLongEvaluator.Factory::new), Map.entry(INTEGER, ToBooleanFromIntEvaluator.Factory::new) ); - public ToBoolean(Source source, Expression field) { + @FunctionInfo(returnType = "boolean") + public ToBoolean( + Source source, + @Param(name = "v", type = { "boolean", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field + ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java index 9910447708b44..c2f621433ca21 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateParse; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -23,6 +25,7 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; +import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; public class ToDatetime extends AbstractConvertFunction { @@ -31,12 +34,17 @@ public class ToDatetime extends AbstractConvertFunction { Map.entry(DATETIME, (field, source) -> field), Map.entry(LONG, (field, source) -> field), Map.entry(KEYWORD, ToDatetimeFromStringEvaluator.Factory::new), + Map.entry(TEXT, ToDatetimeFromStringEvaluator.Factory::new), Map.entry(DOUBLE, ToLongFromDoubleEvaluator.Factory::new), Map.entry(UNSIGNED_LONG, ToLongFromUnsignedLongEvaluator.Factory::new), Map.entry(INTEGER, ToLongFromIntEvaluator.Factory::new) // CastIntToLongEvaluator would be a candidate, but not MV'd ); - public ToDatetime(Source source, Expression field) { + @FunctionInfo(returnType = "date") + public ToDatetime( + Source source, + @Param(name = "v", type = { "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field + ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java index 6b0d638e875a0..44f8507d880d8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java @@ -9,6 +9,8 @@ import org.elasticsearch.compute.ann.ConvertEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -37,7 +39,8 @@ public class ToDegrees extends AbstractConvertFunction implements EvaluatorMappe ) ); - public ToDegrees(Source source, Expression field) { + @FunctionInfo(returnType = "double") + public ToDegrees(Source source, @Param(name = "v", type = { "double", "long", "unsigned_long", "integer" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java index e83a0eae8d7a8..7711f55d667ba 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,6 +25,7 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; +import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; @@ -33,12 +36,17 @@ public class ToDouble extends AbstractConvertFunction { Map.entry(BOOLEAN, ToDoubleFromBooleanEvaluator.Factory::new), Map.entry(DATETIME, ToDoubleFromLongEvaluator.Factory::new), // CastLongToDoubleEvaluator would be a candidate, but not MV'd Map.entry(KEYWORD, ToDoubleFromStringEvaluator.Factory::new), + Map.entry(TEXT, ToDoubleFromStringEvaluator.Factory::new), Map.entry(UNSIGNED_LONG, ToDoubleFromUnsignedLongEvaluator.Factory::new), Map.entry(LONG, ToDoubleFromLongEvaluator.Factory::new), // CastLongToDoubleEvaluator would be a candidate, but not MV'd Map.entry(INTEGER, ToDoubleFromIntEvaluator.Factory::new) // CastIntToDoubleEvaluator would be a candidate, but not MV'd ); - public ToDouble(Source source, Expression field) { + @FunctionInfo(returnType = "double") + public ToDouble( + Source source, + @Param(name = "v", type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field + ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java index 4829d39b09d65..97512a03fe2ec 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -19,16 +21,19 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.IP; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; +import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.util.StringUtils.parseIP; public class ToIP extends AbstractConvertFunction { private static final Map EVALUATORS = Map.ofEntries( Map.entry(IP, (field, source) -> field), - Map.entry(KEYWORD, ToIPFromStringEvaluator.Factory::new) + Map.entry(KEYWORD, ToIPFromStringEvaluator.Factory::new), + Map.entry(TEXT, ToIPFromStringEvaluator.Factory::new) ); - public ToIP(Source source, Expression field) { + @FunctionInfo(returnType = "ip") + public ToIP(Source source, @Param(name = "v", type = { "ip", "keyword", "text" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java index 480962ca27f86..a8e4ef804a2ba 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java @@ -9,8 +9,9 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.InvalidArgumentException; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -19,7 +20,6 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeDoubleToLong; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToInt; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; @@ -27,7 +27,9 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; +import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; +import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; public class ToInteger extends AbstractConvertFunction { @@ -36,12 +38,17 @@ public class ToInteger extends AbstractConvertFunction { Map.entry(BOOLEAN, ToIntegerFromBooleanEvaluator.Factory::new), Map.entry(DATETIME, ToIntegerFromLongEvaluator.Factory::new), Map.entry(KEYWORD, ToIntegerFromStringEvaluator.Factory::new), + Map.entry(TEXT, ToIntegerFromStringEvaluator.Factory::new), Map.entry(DOUBLE, ToIntegerFromDoubleEvaluator.Factory::new), Map.entry(UNSIGNED_LONG, ToIntegerFromUnsignedLongEvaluator.Factory::new), Map.entry(LONG, ToIntegerFromLongEvaluator.Factory::new) ); - public ToInteger(Source source, Expression field) { + @FunctionInfo(returnType = "integer") + public ToInteger( + Source source, + @Param(name = "v", type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field + ) { super(source, field); } @@ -70,7 +77,7 @@ static int fromBoolean(boolean bool) { return bool ? 1 : 0; } - @ConvertEvaluator(extraName = "FromString", warnExceptions = { NumberFormatException.class }) + @ConvertEvaluator(extraName = "FromString", warnExceptions = { InvalidArgumentException.class, NumberFormatException.class }) static int fromKeyword(BytesRef in) { String asString = in.utf8ToString(); try { @@ -84,17 +91,22 @@ static int fromKeyword(BytesRef in) { } } - @ConvertEvaluator(extraName = "FromDouble", warnExceptions = { InvalidArgumentException.class, QlIllegalArgumentException.class }) + @ConvertEvaluator(extraName = "FromDouble", warnExceptions = { InvalidArgumentException.class }) static int fromDouble(double dbl) { - return fromLong(safeDoubleToLong(dbl)); + return safeToInt(dbl); } - @ConvertEvaluator(extraName = "FromUnsignedLong", warnExceptions = { InvalidArgumentException.class, QlIllegalArgumentException.class }) - static int fromUnsignedLong(long lng) { - return fromLong(ToLong.fromUnsignedLong(lng)); + @ConvertEvaluator(extraName = "FromUnsignedLong", warnExceptions = { InvalidArgumentException.class }) + static int fromUnsignedLong(long ul) { + Number n = unsignedLongAsNumber(ul); + int i = n.intValue(); + if (i != n.longValue()) { + throw new InvalidArgumentException("[{}] out of [integer] range", n); + } + return i; } - @ConvertEvaluator(extraName = "FromLong", warnExceptions = { InvalidArgumentException.class, QlIllegalArgumentException.class }) + @ConvertEvaluator(extraName = "FromLong", warnExceptions = { InvalidArgumentException.class }) static int fromLong(long lng) { return safeToInt(lng); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java index b66ad4f359607..0a2546297f038 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; @@ -29,6 +31,7 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; +import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; @@ -41,12 +44,20 @@ public class ToLong extends AbstractConvertFunction { Map.entry(CARTESIAN_POINT, (fieldEval, source) -> fieldEval), Map.entry(BOOLEAN, ToLongFromBooleanEvaluator.Factory::new), Map.entry(KEYWORD, ToLongFromStringEvaluator.Factory::new), + Map.entry(TEXT, ToLongFromStringEvaluator.Factory::new), Map.entry(DOUBLE, ToLongFromDoubleEvaluator.Factory::new), Map.entry(UNSIGNED_LONG, ToLongFromUnsignedLongEvaluator.Factory::new), Map.entry(INTEGER, ToLongFromIntEvaluator.Factory::new) // CastIntToLongEvaluator would be a candidate, but not MV'd ); - public ToLong(Source source, Expression field) { + @FunctionInfo(returnType = "long") + public ToLong( + Source source, + @Param( + name = "v", + type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer", "geo_point", "cartesian_point" } + ) Expression field + ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java index 9f39015a8e063..a1d2e1381109d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java @@ -9,6 +9,8 @@ import org.elasticsearch.compute.ann.ConvertEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -37,7 +39,8 @@ public class ToRadians extends AbstractConvertFunction implements EvaluatorMappe ) ); - public ToRadians(Source source, Expression field) { + @FunctionInfo(returnType = "double") + public ToRadians(Source source, @Param(name = "v", type = { "double", "long", "unsigned_long", "integer" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java index a37b2becc8595..41d8f87aee436 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.ann.ConvertEvaluator; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -55,6 +56,7 @@ public class ToString extends AbstractConvertFunction implements EvaluatorMapper Map.entry(CARTESIAN_POINT, ToStringFromCartesianPointEvaluator.Factory::new) ); + @FunctionInfo(returnType = "keyword") public ToString( Source source, @Param( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java index 1b7ee01e50c54..cfa24cd6d8ff8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; @@ -26,6 +28,7 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; +import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.util.NumericUtils.ONE_AS_UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.util.NumericUtils.ZERO_AS_UNSIGNED_LONG; @@ -38,12 +41,17 @@ public class ToUnsignedLong extends AbstractConvertFunction { Map.entry(DATETIME, ToUnsignedLongFromLongEvaluator.Factory::new), Map.entry(BOOLEAN, ToUnsignedLongFromBooleanEvaluator.Factory::new), Map.entry(KEYWORD, ToUnsignedLongFromStringEvaluator.Factory::new), + Map.entry(TEXT, ToUnsignedLongFromStringEvaluator.Factory::new), Map.entry(DOUBLE, ToUnsignedLongFromDoubleEvaluator.Factory::new), Map.entry(LONG, ToUnsignedLongFromLongEvaluator.Factory::new), Map.entry(INTEGER, ToUnsignedLongFromIntEvaluator.Factory::new) ); - public ToUnsignedLong(Source source, Expression field) { + @FunctionInfo(returnType = "unsigned_long") + public ToUnsignedLong( + Source source, + @Param(name = "v", type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field + ) { super(source, field); } @@ -72,7 +80,7 @@ static long fromBoolean(boolean bool) { return bool ? ONE_AS_UNSIGNED_LONG : ZERO_AS_UNSIGNED_LONG; } - @ConvertEvaluator(extraName = "FromString", warnExceptions = { NumberFormatException.class }) + @ConvertEvaluator(extraName = "FromString", warnExceptions = { InvalidArgumentException.class, NumberFormatException.class }) static long fromKeyword(BytesRef in) { String asString = in.utf8ToString(); return asLongUnsigned(safeToUnsignedLong(asString)); @@ -83,12 +91,12 @@ static long fromDouble(double dbl) { return asLongUnsigned(safeToUnsignedLong(dbl)); } - @ConvertEvaluator(extraName = "FromLong") + @ConvertEvaluator(extraName = "FromLong", warnExceptions = { InvalidArgumentException.class }) static long fromLong(long lng) { return asLongUnsigned(safeToUnsignedLong(lng)); } - @ConvertEvaluator(extraName = "FromInt") + @ConvertEvaluator(extraName = "FromInt", warnExceptions = { InvalidArgumentException.class }) static long fromInt(int i) { return fromLong(i); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java index ad7712f33d947..34e8f695b23c3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java @@ -9,6 +9,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -31,6 +32,7 @@ public class ToVersion extends AbstractConvertFunction { Map.entry(TEXT, ToVersionFromStringEvaluator.Factory::new) ); + @FunctionInfo(returnType = "version") public ToVersion(Source source, @Param(name = "v", type = { "keyword", "text", "version" }) Expression v) { super(source, v); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index ba63afd8f1e4b..03a385592ac63 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -1292,8 +1292,7 @@ public void testRegexOnInt() { public void testUnsupportedTypesWithToString() { // DATE_PERIOD and TIME_DURATION types have been added, but not really patched through the engine; i.e. supported. - final String supportedTypes = "boolean, cartesian_point, datetime, double, geo_point, integer, ip, keyword, long, text, " - + "unsigned_long or version"; + final String supportedTypes = "boolean or cartesian_point or datetime or geo_point or ip or numeric or string or version"; verifyUnsupported( "row period = 1 year | eval to_string(period)", "line 1:28: argument of [to_string(period)] must be [" + supportedTypes + "], found value [period] type [date_period]" diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 81f2fa98be8cc..f003170a7551d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -798,13 +798,70 @@ private static String typeErrorMessage(boolean includeOrdinal, List validTypes) { String named = NAMED_EXPECTED_TYPES.get(validTypes); if (named == null) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index c1e9494541636..faf10d499127a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -11,6 +11,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.AbstractConvertFunction; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Literal; @@ -219,19 +220,30 @@ public static void forUnaryInt( IntFunction expectedValue, int lowerBound, int upperBound, - List warnings + Function> expectedWarnings ) { unaryNumeric( suppliers, expectedEvaluatorToString, - DataTypes.INTEGER, intCases(lowerBound, upperBound), expectedType, n -> expectedValue.apply(n.intValue()), - warnings + n -> expectedWarnings.apply(n.intValue()) ); } + public static void forUnaryInt( + List suppliers, + String expectedEvaluatorToString, + DataType expectedType, + IntFunction expectedValue, + int lowerBound, + int upperBound, + List warnings + ) { + forUnaryInt(suppliers, expectedEvaluatorToString, expectedType, expectedValue, lowerBound, upperBound, unused -> warnings); + } + /** * Generate positive test cases for a unary function operating on an {@link DataTypes#LONG}. */ @@ -242,19 +254,30 @@ public static void forUnaryLong( LongFunction expectedValue, long lowerBound, long upperBound, - List warnings + Function> expectedWarnings ) { unaryNumeric( suppliers, expectedEvaluatorToString, - DataTypes.LONG, longCases(lowerBound, upperBound), expectedType, n -> expectedValue.apply(n.longValue()), - warnings + expectedWarnings ); } + public static void forUnaryLong( + List suppliers, + String expectedEvaluatorToString, + DataType expectedType, + LongFunction expectedValue, + long lowerBound, + long upperBound, + List warnings + ) { + forUnaryLong(suppliers, expectedEvaluatorToString, expectedType, expectedValue, lowerBound, upperBound, unused -> warnings); + } + /** * Generate positive test cases for a unary function operating on an {@link DataTypes#UNSIGNED_LONG}. */ @@ -265,19 +288,30 @@ public static void forUnaryUnsignedLong( Function expectedValue, BigInteger lowerBound, BigInteger upperBound, - List warnings + Function> expectedWarnings ) { unaryNumeric( suppliers, expectedEvaluatorToString, - DataTypes.UNSIGNED_LONG, ulongCases(lowerBound, upperBound), expectedType, n -> expectedValue.apply((BigInteger) n), - warnings + n -> expectedWarnings.apply((BigInteger) n) ); } + public static void forUnaryUnsignedLong( + List suppliers, + String expectedEvaluatorToString, + DataType expectedType, + Function expectedValue, + BigInteger lowerBound, + BigInteger upperBound, + List warnings + ) { + forUnaryUnsignedLong(suppliers, expectedEvaluatorToString, expectedType, expectedValue, lowerBound, upperBound, unused -> warnings); + } + /** * Generate positive test cases for a unary function operating on an {@link DataTypes#DOUBLE}. */ @@ -289,15 +323,26 @@ public static void forUnaryDouble( double lowerBound, double upperBound, List warnings + ) { + forUnaryDouble(suppliers, expectedEvaluatorToString, expectedType, expectedValue, lowerBound, upperBound, unused -> warnings); + } + + public static void forUnaryDouble( + List suppliers, + String expectedEvaluatorToString, + DataType expectedType, + DoubleFunction expectedValue, + double lowerBound, + double upperBound, + DoubleFunction> expectedWarnings ) { unaryNumeric( suppliers, expectedEvaluatorToString, - DataTypes.DOUBLE, doubleCases(lowerBound, upperBound), expectedType, n -> expectedValue.apply(n.doubleValue()), - warnings + n -> expectedWarnings.apply(n.doubleValue()) ); } @@ -311,15 +356,7 @@ public static void forUnaryBoolean( Function expectedValue, List warnings ) { - unary( - suppliers, - expectedEvaluatorToString, - DataTypes.BOOLEAN, - booleanCases(), - expectedType, - v -> expectedValue.apply((Boolean) v), - warnings - ); + unary(suppliers, expectedEvaluatorToString, booleanCases(), expectedType, v -> expectedValue.apply((Boolean) v), warnings); } /** @@ -335,7 +372,6 @@ public static void forUnaryDatetime( unaryNumeric( suppliers, expectedEvaluatorToString, - DataTypes.DATETIME, dateCases(), expectedType, n -> expectedValue.apply(Instant.ofEpochMilli(n.longValue())), @@ -356,7 +392,6 @@ public static void forUnaryGeoPoint( unaryNumeric( suppliers, expectedEvaluatorToString, - EsqlDataTypes.GEO_POINT, geoPointCases(), expectedType, n -> expectedValue.apply(n.longValue()), @@ -377,7 +412,6 @@ public static void forUnaryCartesianPoint( unaryNumeric( suppliers, expectedEvaluatorToString, - EsqlDataTypes.CARTESIAN_POINT, cartesianPointCases(), expectedType, n -> expectedValue.apply(n.longValue()), @@ -395,15 +429,7 @@ public static void forUnaryIp( Function expectedValue, List warnings ) { - unary( - suppliers, - expectedEvaluatorToString, - DataTypes.IP, - ipCases(), - expectedType, - v -> expectedValue.apply((BytesRef) v), - warnings - ); + unary(suppliers, expectedEvaluatorToString, ipCases(), expectedType, v -> expectedValue.apply((BytesRef) v), warnings); } /** @@ -414,21 +440,30 @@ public static void forUnaryStrings( String expectedEvaluatorToString, DataType expectedType, Function expectedValue, - List warnings + Function> expectedWarnings ) { - for (DataType type : EsqlDataTypes.types().stream().filter(EsqlDataTypes::isString).toList()) { + for (DataType type : AbstractConvertFunction.STRING_TYPES) { unary( suppliers, expectedEvaluatorToString, - type, stringCases(type), expectedType, v -> expectedValue.apply((BytesRef) v), - warnings + v -> expectedWarnings.apply((BytesRef) v) ); } } + public static void forUnaryStrings( + List suppliers, + String expectedEvaluatorToString, + DataType expectedType, + Function expectedValue, + List warnings + ) { + forUnaryStrings(suppliers, expectedEvaluatorToString, expectedType, expectedValue, unused -> warnings); + } + /** * Generate positive test cases for a unary function operating on an {@link DataTypes#VERSION}. */ @@ -442,7 +477,6 @@ public static void forUnaryVersion( unary( suppliers, expectedEvaluatorToString, - DataTypes.VERSION, versionCases(""), expectedType, v -> expectedValue.apply(new Version((BytesRef) v)), @@ -453,31 +487,39 @@ public static void forUnaryVersion( private static void unaryNumeric( List suppliers, String expectedEvaluatorToString, - DataType inputType, List valueSuppliers, DataType expectedOutputType, - Function expected, - List warnings + Function expectedValue, + Function> expectedWarnings ) { unary( suppliers, expectedEvaluatorToString, - inputType, valueSuppliers, expectedOutputType, - v -> expected.apply((Number) v), - warnings + v -> expectedValue.apply((Number) v), + v -> expectedWarnings.apply((Number) v) ); } - private static void unary( + private static void unaryNumeric( List suppliers, String expectedEvaluatorToString, - DataType inputType, List valueSuppliers, DataType expectedOutputType, - Function expected, + Function expected, List warnings + ) { + unaryNumeric(suppliers, expectedEvaluatorToString, valueSuppliers, expectedOutputType, expected, unused -> warnings); + } + + public static void unary( + List suppliers, + String expectedEvaluatorToString, + List valueSuppliers, + DataType expectedOutputType, + Function expectedValue, + Function> expectedWarnings ) { for (TypedDataSupplier supplier : valueSuppliers) { suppliers.add(new TestCaseSupplier(supplier.name(), List.of(supplier.type()), () -> { @@ -492,17 +534,29 @@ private static void unary( List.of(typed), expectedEvaluatorToString, expectedOutputType, - equalTo(expected.apply(value)) + equalTo(expectedValue.apply(value)) ); - for (String warning : warnings) { + for (String warning : expectedWarnings.apply(value)) { testCase = testCase.withWarning(warning); } return testCase; })); } + + } + + public static void unary( + List suppliers, + String expectedEvaluatorToString, + List valueSuppliers, + DataType expectedOutputType, + Function expected, + List warnings + ) { + unary(suppliers, expectedEvaluatorToString, valueSuppliers, expectedOutputType, expected, unused -> warnings); } - private static List intCases(int min, int max) { + public static List intCases(int min, int max) { List cases = new ArrayList<>(); if (0 <= max && 0 >= min) { cases.add(new TypedDataSupplier("<0 int>", () -> 0, DataTypes.INTEGER)); @@ -526,7 +580,7 @@ private static List intCases(int min, int max) { return cases; } - private static List longCases(long min, long max) { + public static List longCases(long min, long max) { List cases = new ArrayList<>(); if (0L <= max && 0L >= min) { cases.add(new TypedDataSupplier("<0 long>", () -> 0L, DataTypes.LONG)); @@ -551,7 +605,7 @@ private static List longCases(long min, long max) { return cases; } - private static List ulongCases(BigInteger min, BigInteger max) { + public static List ulongCases(BigInteger min, BigInteger max) { List cases = new ArrayList<>(); // Zero @@ -591,7 +645,7 @@ private static List ulongCases(BigInteger min, BigInteger max return cases; } - private static List doubleCases(double min, double max) { + public static List doubleCases(double min, double max) { List cases = new ArrayList<>(); // Zeros diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanTests.java new file mode 100644 index 0000000000000..b00cecd3f4ccc --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanTests.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.function.Supplier; + +import static java.util.Collections.emptyList; + +public class ToBooleanTests extends AbstractFunctionTestCase { + public ToBooleanTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + final String read = "Attribute[channel=0]"; + final List suppliers = new ArrayList<>(); + + TestCaseSupplier.forUnaryBoolean(suppliers, read, DataTypes.BOOLEAN, b -> b, emptyList()); + + TestCaseSupplier.forUnaryInt( + suppliers, + "ToBooleanFromIntEvaluator[field=" + read + "]", + DataTypes.BOOLEAN, + i -> i != 0, + Integer.MIN_VALUE, + Integer.MAX_VALUE, + emptyList() + ); + TestCaseSupplier.forUnaryLong( + suppliers, + "ToBooleanFromLongEvaluator[field=" + read + "]", + DataTypes.BOOLEAN, + l -> l != 0, + Long.MIN_VALUE, + Long.MAX_VALUE, + emptyList() + ); + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + "ToBooleanFromUnsignedLongEvaluator[field=" + read + "]", + DataTypes.BOOLEAN, + ul -> ul.compareTo(BigInteger.ZERO) != 0, + BigInteger.ZERO, + UNSIGNED_LONG_MAX, + emptyList() + ); + TestCaseSupplier.forUnaryDouble( + suppliers, + "ToBooleanFromDoubleEvaluator[field=" + read + "]", + DataTypes.BOOLEAN, + d -> d != 0d, + Double.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY, + emptyList() + ); + TestCaseSupplier.forUnaryStrings( + suppliers, + "ToBooleanFromStringEvaluator[field=" + read + "]", + DataTypes.BOOLEAN, + bytesRef -> String.valueOf(bytesRef).toLowerCase(Locale.ROOT).equals("true"), + emptyList() + ); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new ToBoolean(source, args.get(0)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java new file mode 100644 index 0000000000000..c92c8712d1697 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java @@ -0,0 +1,152 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateParse; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.math.BigInteger; +import java.time.Instant; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +import static java.util.Collections.emptyList; + +public class ToDatetimeTests extends AbstractFunctionTestCase { + public ToDatetimeTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + final String read = "Attribute[channel=0]"; + final List suppliers = new ArrayList<>(); + + TestCaseSupplier.forUnaryDatetime(suppliers, read, DataTypes.DATETIME, Instant::toEpochMilli, emptyList()); + + TestCaseSupplier.forUnaryInt( + suppliers, + "ToLongFromIntEvaluator[field=" + read + "]", + DataTypes.DATETIME, + i -> ((Integer) i).longValue(), + Integer.MIN_VALUE, + Integer.MAX_VALUE, + emptyList() + ); + TestCaseSupplier.forUnaryLong(suppliers, read, DataTypes.DATETIME, l -> l, Long.MIN_VALUE, Long.MAX_VALUE, emptyList()); + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + "ToLongFromUnsignedLongEvaluator[field=" + read + "]", + DataTypes.DATETIME, + BigInteger::longValueExact, + BigInteger.ZERO, + BigInteger.valueOf(Long.MAX_VALUE), + emptyList() + ); + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + "ToLongFromUnsignedLongEvaluator[field=" + read + "]", + DataTypes.DATETIME, + bi -> null, + BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.TWO), + UNSIGNED_LONG_MAX, + bi -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + bi + "] out of [long] range" + ) + ); + TestCaseSupplier.forUnaryDouble( + suppliers, + "ToLongFromDoubleEvaluator[field=" + read + "]", + DataTypes.DATETIME, + d -> null, + Double.NEGATIVE_INFINITY, + -9.223372036854777E18, // a "convenient" value smaller than `(double) Long.MIN_VALUE` (== ...776E18) + d -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + d + "] out of [long] range" + ) + ); + TestCaseSupplier.forUnaryDouble( + suppliers, + "ToLongFromDoubleEvaluator[field=" + read + "]", + DataTypes.DATETIME, + d -> null, + 9.223372036854777E18, // a "convenient" value larger than `(double) Long.MAX_VALUE` (== ...776E18) + Double.POSITIVE_INFINITY, + d -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + d + "] out of [long] range" + ) + ); + TestCaseSupplier.forUnaryStrings( + suppliers, + "ToDatetimeFromStringEvaluator[field=" + read + "]", + DataTypes.DATETIME, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.IllegalArgumentException: " + + (bytesRef.utf8ToString().isEmpty() + ? "cannot parse empty date" + : ("failed to parse date field [" + bytesRef.utf8ToString() + "] with format [yyyy-MM-dd'T'HH:mm:ss.SSS'Z']")) + ) + ); + TestCaseSupplier.unary( + suppliers, + "ToDatetimeFromStringEvaluator[field=" + read + "]", + List.of( + new TestCaseSupplier.TypedDataSupplier( + "", + // millis past "0001-01-01T00:00:00.000Z" to match the default formatter + () -> new BytesRef(Instant.ofEpochMilli(randomLongBetween(-62135596800000L, Long.MAX_VALUE)).toString()), + DataTypes.KEYWORD + ) + ), + DataTypes.DATETIME, + bytesRef -> DateParse.DEFAULT_FORMATTER.parseMillis(((BytesRef) bytesRef).utf8ToString()), + emptyList() + ); + TestCaseSupplier.unary( + suppliers, + "ToDatetimeFromStringEvaluator[field=" + read + "]", + List.of( + new TestCaseSupplier.TypedDataSupplier( + "", + // millis before "0001-01-01T00:00:00.000Z" + () -> new BytesRef(Instant.ofEpochMilli(randomLongBetween(Long.MIN_VALUE, -62135596800001L)).toString()), + DataTypes.KEYWORD + ) + ), + DataTypes.DATETIME, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.IllegalArgumentException: failed to parse date field [" + + ((BytesRef) bytesRef).utf8ToString() + + "] with format [yyyy-MM-dd'T'HH:mm:ss.SSS'Z']" + ) + ); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new ToDatetime(source, args.get(0)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesTests.java new file mode 100644 index 0000000000000..a1c3c1f38aac5 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesTests.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; +import java.util.function.Supplier; + +public class ToDegreesTests extends AbstractFunctionTestCase { + public ToDegreesTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + // TODO multivalue fields + Function evaluatorName = eval -> "ToDegreesEvaluator[field=" + eval + "[field=Attribute[channel=0]]]"; + List suppliers = new ArrayList<>(); + + TestCaseSupplier.forUnaryInt( + suppliers, + evaluatorName.apply("ToDoubleFromIntEvaluator"), + DataTypes.DOUBLE, + Math::toDegrees, + Integer.MIN_VALUE, + Integer.MAX_VALUE, + List.of() + ); + TestCaseSupplier.forUnaryLong( + suppliers, + evaluatorName.apply("ToDoubleFromLongEvaluator"), + DataTypes.DOUBLE, + Math::toDegrees, + Long.MIN_VALUE, + Long.MAX_VALUE, + List.of() + ); + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + evaluatorName.apply("ToDoubleFromUnsignedLongEvaluator"), + DataTypes.DOUBLE, + ul -> Math.toDegrees(ul.doubleValue()), + BigInteger.ZERO, + UNSIGNED_LONG_MAX, + List.of() + ); + TestCaseSupplier.forUnaryDouble( + suppliers, + "ToDegreesEvaluator[field=Attribute[channel=0]]", + DataTypes.DOUBLE, + Math::toDegrees, + Double.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY, + List.of() + ); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new ToDegrees(source, args.get(0)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java new file mode 100644 index 0000000000000..ebcaf367b1226 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java @@ -0,0 +1,122 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; +import java.util.function.Supplier; + +public class ToDoubleTests extends AbstractFunctionTestCase { + public ToDoubleTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + // TODO multivalue fields + String read = "Attribute[channel=0]"; + Function evaluatorName = s -> "ToDoubleFrom" + s + "Evaluator[field=" + read + "]"; + List suppliers = new ArrayList<>(); + + TestCaseSupplier.forUnaryDouble( + suppliers, + read, + DataTypes.DOUBLE, + d -> d, + Double.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY, + List.of() + ); + + TestCaseSupplier.forUnaryBoolean(suppliers, evaluatorName.apply("Boolean"), DataTypes.DOUBLE, b -> b ? 1d : 0d, List.of()); + TestCaseSupplier.forUnaryDatetime( + suppliers, + evaluatorName.apply("Long"), + DataTypes.DOUBLE, + i -> (double) i.toEpochMilli(), + List.of() + ); + // random strings that don't look like a double + TestCaseSupplier.forUnaryStrings( + suppliers, + evaluatorName.apply("String"), + DataTypes.DOUBLE, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.NumberFormatException: " + + (bytesRef.utf8ToString().isEmpty() ? "empty String" : ("For input string: \"" + bytesRef.utf8ToString() + "\"")) + ) + ); + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + evaluatorName.apply("UnsignedLong"), + DataTypes.DOUBLE, + BigInteger::doubleValue, + BigInteger.ZERO, + UNSIGNED_LONG_MAX, + List.of() + ); + TestCaseSupplier.forUnaryLong( + suppliers, + evaluatorName.apply("Long"), + DataTypes.DOUBLE, + l -> (double) l, + Long.MIN_VALUE, + Long.MAX_VALUE, + List.of() + ); + TestCaseSupplier.forUnaryInt( + suppliers, + evaluatorName.apply("Int"), + DataTypes.DOUBLE, + i -> (double) i, + Integer.MIN_VALUE, + Integer.MAX_VALUE, + List.of() + ); + + // strings of random numbers + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("String"), + TestCaseSupplier.castToDoubleSuppliersFromRange(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY) + .stream() + .map( + tds -> new TestCaseSupplier.TypedDataSupplier( + tds.name() + "as string", + () -> new BytesRef(tds.supplier().get().toString()), + DataTypes.KEYWORD + ) + ) + .toList(), + DataTypes.DOUBLE, + bytesRef -> Double.valueOf(((BytesRef) bytesRef).utf8ToString()), + List.of() + ); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new ToDouble(source, args.get(0)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPTests.java index 33a85f593ee6f..4294144e1cefe 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPTests.java @@ -17,16 +17,14 @@ import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; -import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; -import static org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier.stringCases; +import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.ql.util.StringUtils.parseIP; -import static org.hamcrest.Matchers.equalTo; public class ToIPTests extends AbstractFunctionTestCase { public ToIPTests(@Name("TestCase") Supplier testCaseSupplier) { @@ -42,33 +40,27 @@ public static Iterable parameters() { // convert from IP to IP TestCaseSupplier.forUnaryIp(suppliers, read, DataTypes.IP, v -> v, List.of()); - // convert any kind of string to IP, with warnings. - for (TestCaseSupplier.TypedDataSupplier supplier : stringCases(DataTypes.KEYWORD)) { - suppliers.add(new TestCaseSupplier(supplier.name(), List.of(supplier.type()), () -> { - BytesRef value = (BytesRef) supplier.supplier().get(); - TestCaseSupplier.TypedData typed = new TestCaseSupplier.TypedData(value, supplier.type(), "value"); - TestCaseSupplier.TestCase testCase = new TestCaseSupplier.TestCase( - List.of(typed), - stringEvaluator, - DataTypes.IP, - equalTo(null) - ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") - .withWarning( - "Line -1:-1: java.lang.IllegalArgumentException: '" + value.utf8ToString() + "' is not an IP string literal." - ); - return testCase; - })); - } + // convert random string (i.e. not an IP representation) to IP `null`, with warnings. + TestCaseSupplier.forUnaryStrings( + suppliers, + stringEvaluator, + DataTypes.IP, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.IllegalArgumentException: '" + bytesRef.utf8ToString() + "' is not an IP string literal." + ) + ); // convert valid IPs shaped as strings - DataType inputType = DataTypes.KEYWORD; - for (TestCaseSupplier.TypedDataSupplier ipGen : validIPsAsStrings()) { - suppliers.add(new TestCaseSupplier(ipGen.name(), List.of(inputType), () -> { - BytesRef ip = (BytesRef) ipGen.supplier().get(); - TestCaseSupplier.TypedData typed = new TestCaseSupplier.TypedData(ip, inputType, "value"); - return new TestCaseSupplier.TestCase(List.of(typed), stringEvaluator, DataTypes.IP, equalTo(parseIP(ip.utf8ToString()))); - })); - } + TestCaseSupplier.unary( + suppliers, + stringEvaluator, + validIPsAsStrings(), + DataTypes.IP, + bytesRef -> parseIP(((BytesRef) bytesRef).utf8ToString()), + emptyList() + ); // add null as parameter return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java new file mode 100644 index 0000000000000..4402c6d8529b4 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java @@ -0,0 +1,277 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToInt; + +public class ToIntegerTests extends AbstractFunctionTestCase { + public ToIntegerTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + // TODO multivalue fields + String read = "Attribute[channel=0]"; + Function evaluatorName = s -> "ToIntegerFrom" + s + "Evaluator[field=" + read + "]"; + List suppliers = new ArrayList<>(); + + TestCaseSupplier.forUnaryInt(suppliers, read, DataTypes.INTEGER, i -> i, Integer.MIN_VALUE, Integer.MAX_VALUE, List.of()); + + TestCaseSupplier.forUnaryBoolean(suppliers, evaluatorName.apply("Boolean"), DataTypes.INTEGER, b -> b ? 1 : 0, List.of()); + + // datetimes that fall within Integer's range + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("Long"), + dateCases(0, Integer.MAX_VALUE), + DataTypes.INTEGER, + l -> ((Long) l).intValue(), + List.of() + ); + // datetimes that fall outside Integer's range + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("Long"), + dateCases(Integer.MAX_VALUE + 1L, Long.MAX_VALUE), + DataTypes.INTEGER, + l -> null, + l -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + l + "] out of [integer] range" + ) + ); + // random strings that don't look like an Integer + TestCaseSupplier.forUnaryStrings( + suppliers, + evaluatorName.apply("String"), + DataTypes.INTEGER, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.NumberFormatException: For input string: \"" + bytesRef.utf8ToString() + "\"" + ) + ); + // from doubles within Integer's range + TestCaseSupplier.forUnaryDouble( + suppliers, + evaluatorName.apply("Double"), + DataTypes.INTEGER, + d -> safeToInt(Math.round(d)), + Integer.MIN_VALUE, + Integer.MAX_VALUE, + List.of() + ); + // from doubles outside Integer's range, negative + TestCaseSupplier.forUnaryDouble( + suppliers, + evaluatorName.apply("Double"), + DataTypes.INTEGER, + d -> null, + Double.NEGATIVE_INFINITY, + Integer.MIN_VALUE - 1d, + d -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + d + "] out of [integer] range" + ) + ); + // from doubles outside Integer's range, positive + TestCaseSupplier.forUnaryDouble( + suppliers, + evaluatorName.apply("Double"), + DataTypes.INTEGER, + d -> null, + Integer.MAX_VALUE + 1d, + Double.POSITIVE_INFINITY, + d -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + d + "] out of [integer] range" + ) + ); + + // from unsigned_long within Integer's range + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + evaluatorName.apply("UnsignedLong"), + DataTypes.INTEGER, + BigInteger::intValue, + BigInteger.ZERO, + BigInteger.valueOf(Integer.MAX_VALUE), + List.of() + ); + // from unsigned_long outside Integer's range + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + evaluatorName.apply("UnsignedLong"), + DataTypes.INTEGER, + ul -> null, + BigInteger.valueOf(Integer.MAX_VALUE).add(BigInteger.ONE), + UNSIGNED_LONG_MAX, + ul -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + ul + "] out of [integer] range" + + ) + ); + + // from long, within Integer's range + TestCaseSupplier.forUnaryLong( + suppliers, + evaluatorName.apply("Long"), + DataTypes.INTEGER, + l -> (int) l, + Integer.MIN_VALUE, + Integer.MAX_VALUE, + List.of() + ); + // from long, outside Integer's range, negative + TestCaseSupplier.forUnaryLong( + suppliers, + evaluatorName.apply("Long"), + DataTypes.INTEGER, + l -> null, + Long.MIN_VALUE, + Integer.MIN_VALUE - 1L, + l -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + l + "] out of [integer] range" + + ) + ); + // from long, outside Integer's range, positive + TestCaseSupplier.forUnaryLong( + suppliers, + evaluatorName.apply("Long"), + DataTypes.INTEGER, + l -> null, + Integer.MAX_VALUE + 1L, + Long.MAX_VALUE, + l -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + l + "] out of [integer] range" + ) + ); + + // strings of random ints within Integer's range + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("String"), + TestCaseSupplier.intCases(Integer.MIN_VALUE, Integer.MAX_VALUE) + .stream() + .map( + tds -> new TestCaseSupplier.TypedDataSupplier( + tds.name() + "as string", + () -> new BytesRef(tds.supplier().get().toString()), + DataTypes.KEYWORD + ) + ) + .toList(), + DataTypes.INTEGER, + bytesRef -> Integer.valueOf(((BytesRef) bytesRef).utf8ToString()), + List.of() + ); + // strings of random doubles within Integer's range + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("String"), + TestCaseSupplier.doubleCases(Integer.MIN_VALUE, Integer.MAX_VALUE) + .stream() + .map( + tds -> new TestCaseSupplier.TypedDataSupplier( + tds.name() + "as string", + () -> new BytesRef(tds.supplier().get().toString()), + DataTypes.KEYWORD + ) + ) + .toList(), + DataTypes.INTEGER, + bytesRef -> safeToInt(Math.round(Double.parseDouble(((BytesRef) bytesRef).utf8ToString()))), + List.of() + ); + // strings of random doubles outside Integer's range, negative + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("String"), + TestCaseSupplier.doubleCases(Double.NEGATIVE_INFINITY, Integer.MIN_VALUE - 1d) + .stream() + .map( + tds -> new TestCaseSupplier.TypedDataSupplier( + tds.name() + "as string", + () -> new BytesRef(tds.supplier().get().toString()), + DataTypes.KEYWORD + ) + ) + .toList(), + DataTypes.INTEGER, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.NumberFormatException: For input string: \"" + ((BytesRef) bytesRef).utf8ToString() + "\"" + ) + ); + // strings of random doubles outside Integer's range, positive + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("String"), + TestCaseSupplier.doubleCases(Integer.MAX_VALUE + 1d, Double.POSITIVE_INFINITY) + .stream() + .map( + tds -> new TestCaseSupplier.TypedDataSupplier( + tds.name() + "as string", + () -> new BytesRef(tds.supplier().get().toString()), + DataTypes.KEYWORD + ) + ) + .toList(), + DataTypes.INTEGER, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.NumberFormatException: For input string: \"" + ((BytesRef) bytesRef).utf8ToString() + "\"" + ) + ); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new ToInteger(source, args.get(0)); + } + + private static List dateCases(long min, long max) { + List dataSuppliers = new ArrayList<>(2); + if (min == 0L) { + dataSuppliers.add(new TestCaseSupplier.TypedDataSupplier("<1970-01-01T00:00:00Z>", () -> 0L, DataTypes.DATETIME)); + } + if (max <= Integer.MAX_VALUE) { + dataSuppliers.add(new TestCaseSupplier.TypedDataSupplier("<1970-01-25T20:31:23.647Z>", () -> 2147483647L, DataTypes.DATETIME)); + } + dataSuppliers.add( + new TestCaseSupplier.TypedDataSupplier("", () -> ESTestCase.randomLongBetween(min, max), DataTypes.DATETIME) + ); + return dataSuppliers; + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java new file mode 100644 index 0000000000000..b153fa8489dee --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java @@ -0,0 +1,217 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.math.BigInteger; +import java.time.Instant; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; +import java.util.function.Supplier; + +public class ToLongTests extends AbstractFunctionTestCase { + public ToLongTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + // TODO multivalue fields + String read = "Attribute[channel=0]"; + Function evaluatorName = s -> "ToLongFrom" + s + "Evaluator[field=" + read + "]"; + List suppliers = new ArrayList<>(); + + TestCaseSupplier.forUnaryLong(suppliers, read, DataTypes.LONG, l -> l, Long.MIN_VALUE, Long.MAX_VALUE, List.of()); + + TestCaseSupplier.forUnaryBoolean(suppliers, evaluatorName.apply("Boolean"), DataTypes.LONG, b -> b ? 1L : 0L, List.of()); + + // geo types + TestCaseSupplier.forUnaryGeoPoint(suppliers, read, DataTypes.LONG, i -> i, List.of()); + TestCaseSupplier.forUnaryCartesianPoint(suppliers, read, DataTypes.LONG, i -> i, List.of()); + // datetimes + TestCaseSupplier.forUnaryDatetime(suppliers, read, DataTypes.LONG, Instant::toEpochMilli, List.of()); + // random strings that don't look like a long + TestCaseSupplier.forUnaryStrings( + suppliers, + evaluatorName.apply("String"), + DataTypes.LONG, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.NumberFormatException: For input string: \"" + bytesRef.utf8ToString() + "\"" + ) + ); + // from doubles within long's range + TestCaseSupplier.forUnaryDouble( + suppliers, + evaluatorName.apply("Double"), + DataTypes.LONG, + Math::round, + Long.MIN_VALUE, + Long.MAX_VALUE, + List.of() + ); + // from doubles outside long's range, negative + TestCaseSupplier.forUnaryDouble( + suppliers, + evaluatorName.apply("Double"), + DataTypes.LONG, + d -> null, + Double.NEGATIVE_INFINITY, + Long.MIN_VALUE - 1d, + d -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + d + "] out of [long] range" + ) + ); + // from doubles outside long's range, positive + TestCaseSupplier.forUnaryDouble( + suppliers, + evaluatorName.apply("Double"), + DataTypes.LONG, + d -> null, + Long.MAX_VALUE + 1d, + Double.POSITIVE_INFINITY, + d -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + d + "] out of [long] range" + ) + ); + + // from unsigned_long within long's range + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + evaluatorName.apply("UnsignedLong"), + DataTypes.LONG, + BigInteger::longValue, + BigInteger.ZERO, + BigInteger.valueOf(Long.MAX_VALUE), + List.of() + ); + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + evaluatorName.apply("UnsignedLong"), + DataTypes.LONG, + ul -> null, + BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.ONE), + UNSIGNED_LONG_MAX, + ul -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + ul + "] out of [long] range" + + ) + ); + + // from integer + TestCaseSupplier.forUnaryInt( + suppliers, + evaluatorName.apply("Int"), + DataTypes.LONG, + l -> (long) l, + Integer.MIN_VALUE, + Integer.MAX_VALUE, + List.of() + ); + + // strings of random longs + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("String"), + TestCaseSupplier.longCases(Long.MIN_VALUE, Long.MAX_VALUE) + .stream() + .map( + tds -> new TestCaseSupplier.TypedDataSupplier( + tds.name() + "as string", + () -> new BytesRef(tds.supplier().get().toString()), + DataTypes.KEYWORD + ) + ) + .toList(), + DataTypes.LONG, + bytesRef -> Long.valueOf(((BytesRef) bytesRef).utf8ToString()), + List.of() + ); + // strings of random doubles within long's range + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("String"), + TestCaseSupplier.doubleCases(Long.MIN_VALUE, Long.MAX_VALUE) + .stream() + .map( + tds -> new TestCaseSupplier.TypedDataSupplier( + tds.name() + "as string", + () -> new BytesRef(tds.supplier().get().toString()), + DataTypes.KEYWORD + ) + ) + .toList(), + DataTypes.LONG, + bytesRef -> Math.round(Double.parseDouble(((BytesRef) bytesRef).utf8ToString())), + List.of() + ); + // strings of random doubles outside integer's range, negative + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("String"), + TestCaseSupplier.doubleCases(Double.NEGATIVE_INFINITY, Long.MIN_VALUE - 1d) + .stream() + .map( + tds -> new TestCaseSupplier.TypedDataSupplier( + tds.name() + "as string", + () -> new BytesRef(tds.supplier().get().toString()), + DataTypes.KEYWORD + ) + ) + .toList(), + DataTypes.LONG, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.NumberFormatException: For input string: \"" + ((BytesRef) bytesRef).utf8ToString() + "\"" + ) + ); + // strings of random doubles outside integer's range, positive + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("String"), + TestCaseSupplier.doubleCases(Long.MAX_VALUE + 1d, Double.POSITIVE_INFINITY) + .stream() + .map( + tds -> new TestCaseSupplier.TypedDataSupplier( + tds.name() + "as string", + () -> new BytesRef(tds.supplier().get().toString()), + DataTypes.KEYWORD + ) + ) + .toList(), + DataTypes.LONG, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.NumberFormatException: For input string: \"" + ((BytesRef) bytesRef).utf8ToString() + "\"" + ) + ); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new ToLong(source, args.get(0)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansTests.java new file mode 100644 index 0000000000000..ffd1a2734d75f --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansTests.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; +import java.util.function.Supplier; + +public class ToRadiansTests extends AbstractFunctionTestCase { + public ToRadiansTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + // TODO multivalue fields + Function evaluatorName = eval -> "ToRadiansEvaluator[field=" + eval + "[field=Attribute[channel=0]]]"; + List suppliers = new ArrayList<>(); + + TestCaseSupplier.forUnaryInt( + suppliers, + evaluatorName.apply("ToDoubleFromIntEvaluator"), + DataTypes.DOUBLE, + Math::toRadians, + Integer.MIN_VALUE, + Integer.MAX_VALUE, + List.of() + ); + TestCaseSupplier.forUnaryLong( + suppliers, + evaluatorName.apply("ToDoubleFromLongEvaluator"), + DataTypes.DOUBLE, + Math::toRadians, + Long.MIN_VALUE, + Long.MAX_VALUE, + List.of() + ); + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + evaluatorName.apply("ToDoubleFromUnsignedLongEvaluator"), + DataTypes.DOUBLE, + ul -> Math.toRadians(ul.doubleValue()), + BigInteger.ZERO, + UNSIGNED_LONG_MAX, + List.of() + ); + TestCaseSupplier.forUnaryDouble( + suppliers, + "ToRadiansEvaluator[field=Attribute[channel=0]]", + DataTypes.DOUBLE, + Math::toRadians, + Double.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY, + List.of() + ); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new ToRadians(source, args.get(0)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongTests.java new file mode 100644 index 0000000000000..080424602703d --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongTests.java @@ -0,0 +1,258 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.util.NumericUtils; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToUnsignedLong; +import static org.elasticsearch.xpack.ql.util.NumericUtils.ONE_AS_UNSIGNED_LONG; +import static org.elasticsearch.xpack.ql.util.NumericUtils.UNSIGNED_LONG_MAX_AS_DOUBLE; +import static org.elasticsearch.xpack.ql.util.NumericUtils.ZERO_AS_UNSIGNED_LONG; +import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; + +public class ToUnsignedLongTests extends AbstractFunctionTestCase { + public ToUnsignedLongTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + // TODO multivalue fields + String read = "Attribute[channel=0]"; + Function evaluatorName = s -> "ToUnsignedLongFrom" + s + "Evaluator[field=" + read + "]"; + List suppliers = new ArrayList<>(); + + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + read, + DataTypes.UNSIGNED_LONG, + NumericUtils::asLongUnsigned, + BigInteger.ZERO, + UNSIGNED_LONG_MAX, + List.of() + ); + + TestCaseSupplier.forUnaryBoolean( + suppliers, + evaluatorName.apply("Boolean"), + DataTypes.UNSIGNED_LONG, + b -> b ? ONE_AS_UNSIGNED_LONG : ZERO_AS_UNSIGNED_LONG, + List.of() + ); + + // datetimes + TestCaseSupplier.forUnaryDatetime( + suppliers, + evaluatorName.apply("Long"), + DataTypes.UNSIGNED_LONG, + instant -> asLongUnsigned(instant.toEpochMilli()), + List.of() + ); + // random strings that don't look like an unsigned_long + TestCaseSupplier.forUnaryStrings(suppliers, evaluatorName.apply("String"), DataTypes.UNSIGNED_LONG, bytesRef -> null, bytesRef -> { + // BigDecimal, used to parse unsigned_longs will throw NFEs with different messages depending on empty string, first + // non-number character after a number-looking like prefix, or string starting with "e", maybe others -- safer to take + // this shortcut here. + Exception e = expectThrows(NumberFormatException.class, () -> new BigDecimal(bytesRef.utf8ToString())); + return List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.NumberFormatException: " + e.getMessage() + ); + }); + // from doubles within unsigned_long's range + TestCaseSupplier.forUnaryDouble( + suppliers, + evaluatorName.apply("Double"), + DataTypes.UNSIGNED_LONG, + d -> asLongUnsigned(BigDecimal.valueOf(d).toBigInteger()), // note: not: new BigDecimal(d).toBigInteger + 0d, + UNSIGNED_LONG_MAX_AS_DOUBLE, + List.of() + ); + // from doubles outside unsigned_long's range, negative + TestCaseSupplier.forUnaryDouble( + suppliers, + evaluatorName.apply("Double"), + DataTypes.UNSIGNED_LONG, + d -> null, + Double.NEGATIVE_INFINITY, + -1d, + d -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + d + "] out of [unsigned_long] range" + ) + ); + // from doubles outside Long's range, positive + TestCaseSupplier.forUnaryDouble( + suppliers, + evaluatorName.apply("Double"), + DataTypes.UNSIGNED_LONG, + d -> null, + UNSIGNED_LONG_MAX_AS_DOUBLE + 10e5, + Double.POSITIVE_INFINITY, + d -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + d + "] out of [unsigned_long] range" + ) + ); + + // from long within unsigned_long's range + TestCaseSupplier.forUnaryLong( + suppliers, + evaluatorName.apply("Long"), + DataTypes.UNSIGNED_LONG, + NumericUtils::asLongUnsigned, + 0L, + Long.MAX_VALUE, + List.of() + ); + // from long outside unsigned_long's range + TestCaseSupplier.forUnaryLong( + suppliers, + evaluatorName.apply("Long"), + DataTypes.UNSIGNED_LONG, + unused -> null, + Long.MIN_VALUE, + -1L, + l -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + l + "] out of [unsigned_long] range" + ) + ); + + // from int within unsigned_long's range + TestCaseSupplier.forUnaryInt( + suppliers, + evaluatorName.apply("Int"), + DataTypes.UNSIGNED_LONG, + NumericUtils::asLongUnsigned, + 0, + Integer.MAX_VALUE, + List.of() + ); + // from int outside unsigned_long's range + TestCaseSupplier.forUnaryInt( + suppliers, + evaluatorName.apply("Int"), + DataTypes.UNSIGNED_LONG, + unused -> null, + Integer.MIN_VALUE, + -1, + l -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + l + "] out of [unsigned_long] range" + ) + ); + + // strings of random unsigned_longs + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("String"), + TestCaseSupplier.ulongCases(BigInteger.ZERO, UNSIGNED_LONG_MAX) + .stream() + .map( + tds -> new TestCaseSupplier.TypedDataSupplier( + tds.name() + "as string", + () -> new BytesRef(tds.supplier().get().toString()), + DataTypes.KEYWORD + ) + ) + .toList(), + DataTypes.UNSIGNED_LONG, + bytesRef -> asLongUnsigned(safeToUnsignedLong(((BytesRef) bytesRef).utf8ToString())), + List.of() + ); + // strings of random doubles within unsigned_long's range + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("String"), + TestCaseSupplier.doubleCases(0, UNSIGNED_LONG_MAX_AS_DOUBLE) + .stream() + .map( + tds -> new TestCaseSupplier.TypedDataSupplier( + tds.name() + "as string", + () -> new BytesRef(tds.supplier().get().toString()), + DataTypes.KEYWORD + ) + ) + .toList(), + DataTypes.UNSIGNED_LONG, + bytesRef -> asLongUnsigned(safeToUnsignedLong(((BytesRef) bytesRef).utf8ToString())), + List.of() + ); + // strings of random doubles outside unsigned_long's range, negative + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("String"), + TestCaseSupplier.doubleCases(Double.NEGATIVE_INFINITY, -1d) + .stream() + .map( + tds -> new TestCaseSupplier.TypedDataSupplier( + tds.name() + "as string", + () -> new BytesRef(tds.supplier().get().toString()), + DataTypes.KEYWORD + ) + ) + .toList(), + DataTypes.UNSIGNED_LONG, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + + ((BytesRef) bytesRef).utf8ToString() + + "] out of [unsigned_long] range" + ) + ); + // strings of random doubles outside Integer's range, positive + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("String"), + TestCaseSupplier.doubleCases(UNSIGNED_LONG_MAX_AS_DOUBLE + 10e5, Double.POSITIVE_INFINITY) + .stream() + .map( + tds -> new TestCaseSupplier.TypedDataSupplier( + tds.name() + "as string", + () -> new BytesRef(tds.supplier().get().toString()), + DataTypes.KEYWORD + ) + ) + .toList(), + DataTypes.UNSIGNED_LONG, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + + ((BytesRef) bytesRef).utf8ToString() + + "] out of [unsigned_long] range" + ) + ); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new ToUnsignedLong(source, args.get(0)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionTests.java index fefa397f7c77f..c6e2abae14443 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; -import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; @@ -24,8 +23,6 @@ import java.util.List; import java.util.function.Supplier; -import static org.hamcrest.Matchers.equalTo; - public class ToVersionTests extends AbstractFunctionTestCase { public ToVersionTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); @@ -37,9 +34,12 @@ public static Iterable parameters() { String read = "Attribute[channel=0]"; String stringEvaluator = "ToVersionFromStringEvaluator[field=" + read + "]"; List suppliers = new ArrayList<>(); + // Converting and IP to an IP doesn't change anything. Everything should succeed. - TestCaseSupplier.forUnaryVersion(suppliers, read, DataTypes.VERSION, v -> v.toBytesRef(), List.of()); - // None of the random strings ever look like versions so they should all become "invalid" versions + TestCaseSupplier.forUnaryVersion(suppliers, read, DataTypes.VERSION, Version::toBytesRef, List.of()); + + // None of the random strings ever look like versions so they should all become "invalid" versions: + // https://github.com/elastic/elasticsearch/issues/98989 // TODO should this return null with warnings? they aren't version shaped at all. TestCaseSupplier.forUnaryStrings( suppliers, @@ -48,20 +48,19 @@ public static Iterable parameters() { bytesRef -> new Version(bytesRef.utf8ToString()).toBytesRef(), List.of() ); + // But strings that are shaped like versions do parse to valid versions - for (DataType inputType : EsqlDataTypes.types().stream().filter(EsqlDataTypes::isString).toList()) { - for (TestCaseSupplier.TypedDataSupplier versionGen : TestCaseSupplier.versionCases(inputType.typeName() + " ")) { - suppliers.add(new TestCaseSupplier(versionGen.name(), List.of(inputType), () -> { - BytesRef encodedVersion = (BytesRef) versionGen.supplier().get(); - TestCaseSupplier.TypedData typed = new TestCaseSupplier.TypedData( - new BytesRef(new Version(encodedVersion).toString()), - inputType, - "value" - ); - return new TestCaseSupplier.TestCase(List.of(typed), stringEvaluator, DataTypes.VERSION, equalTo(encodedVersion)); - })); - } + for (DataType inputType : AbstractConvertFunction.STRING_TYPES) { + TestCaseSupplier.unary( + suppliers, + read, + TestCaseSupplier.versionCases(inputType.typeName() + " "), + DataTypes.VERSION, + bytesRef -> new Version((BytesRef) bytesRef).toBytesRef(), + List.of() + ); } + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DataTypeConverter.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DataTypeConverter.java index bb7fa9cf8c03a..87f30a89577c2 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DataTypeConverter.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DataTypeConverter.java @@ -382,6 +382,14 @@ public static int safeToInt(long x) { return (int) x; } + public static int safeToInt(double x) { + if (x > Integer.MAX_VALUE || x < Integer.MIN_VALUE) { + throw new InvalidArgumentException("[{}] out of [integer] range", x); + } + // cast is safe, double can represent all of int's range + return (int) Math.round(x); + } + public static long safeDoubleToLong(double x) { if (x > Long.MAX_VALUE || x < Long.MIN_VALUE) { throw new InvalidArgumentException("[{}] out of [long] range", x); From 237db902d20dd017d89bd2f9c9299b6190a12d12 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Mon, 4 Dec 2023 13:03:55 +0000 Subject: [PATCH 115/181] Update to 9.9.0 RC --- build.gradle | 5 ----- 1 file changed, 5 deletions(-) diff --git a/build.gradle b/build.gradle index d10f836db4024..c0b613beefea4 100644 --- a/build.gradle +++ b/build.gradle @@ -195,11 +195,6 @@ if (project.gradle.startParameter.taskNames.any { it.startsWith("checkPart") || subprojects { proj -> apply plugin: 'elasticsearch.base' - - repositories { - // TODO: Temporary for Lucene RC builds. REMOVE - maven { url "https://dist.apache.org/repos/dist/dev/lucene/lucene-9.9.0-RC2-rev-06070c0dceba07f0d33104192d9ac98ca16fc500/lucene/maven" } - } } allprojects { From 84dad0279c728e2b0567e29cf84bb526cccc82bc Mon Sep 17 00:00:00 2001 From: Kathleen DeRusso Date: Mon, 4 Dec 2023 08:09:59 -0500 Subject: [PATCH 116/181] [Query Rules] Fix bug where combining the same metadata with text/numeric values leads to error (#102891) * Fix issue where query rule criteria with matching metadata but different types returns error * Update docs/changelog/102891.yaml --- docs/changelog/102891.yaml | 7 ++++ .../test/entsearch/260_rule_query_search.yml | 42 +++++++++++++++++++ .../xpack/application/rules/QueryRule.java | 2 +- .../application/rules/QueryRuleCriteria.java | 9 +++- .../rules/QueryRuleCriteriaType.java | 9 +++- 5 files changed, 65 insertions(+), 4 deletions(-) create mode 100644 docs/changelog/102891.yaml diff --git a/docs/changelog/102891.yaml b/docs/changelog/102891.yaml new file mode 100644 index 0000000000000..c5d5ed8c6758e --- /dev/null +++ b/docs/changelog/102891.yaml @@ -0,0 +1,7 @@ +pr: 102891 +summary: "[Query Rules] Fix bug where combining the same metadata with text/numeric\ + \ values leads to error" +area: Application +type: bug +issues: + - 102827 diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/260_rule_query_search.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/260_rule_query_search.yml index b41636e624674..c287209da5bed 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/260_rule_query_search.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/260_rule_query_search.yml @@ -194,4 +194,46 @@ setup: - match: { hits.hits.0._id: 'doc2' } - match: { hits.hits.1._id: 'doc3' } +--- +"Perform a rule query over a ruleset with combined numeric and text rule matching": + + - do: + query_ruleset.put: + ruleset_id: combined-ruleset + body: + rules: + - rule_id: rule1 + type: pinned + criteria: + - type: fuzzy + metadata: foo + values: [ bar ] + actions: + ids: + - 'doc1' + - rule_id: rule2 + type: pinned + criteria: + - type: lte + metadata: foo + values: [ 100 ] + actions: + ids: + - 'doc2' + - do: + search: + body: + query: + rule_query: + organic: + query_string: + default_field: text + query: blah blah blah + match_criteria: + foo: baz + ruleset_id: combined-ruleset + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: 'doc1' } + diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRule.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRule.java index 9b2ce393e5b04..9cca42b0402bf 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRule.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRule.java @@ -294,7 +294,7 @@ public AppliedQueryRules applyRule(AppliedQueryRules appliedRules, Map Date: Mon, 4 Dec 2023 14:27:54 +0100 Subject: [PATCH 117/181] Inference telemetry (#102877) * Empty infenrece usage wiring. * Add fake data * Fix NPE for secretSettings == null * Real inference model stats * New transport version * Code polish * Lint fixes * Update docs/changelog/102877.yaml * Update 102877.yaml * Add inference to yamlRestTest * Declare inference usage action as non-operator * TransportInferenceUsageActionTests * Lint fixes * Replace map by ToXContentObject/Writeable * Polish code * AbstractWireSerializingTestCase --------- Co-authored-by: Elastic Machine --- docs/changelog/102877.yaml | 5 + docs/reference/rest-api/usage.asciidoc | 5 + .../org/elasticsearch/TransportVersions.java | 1 + .../core/src/main/java/module-info.java | 1 + .../xpack/core/XPackClientPlugin.java | 3 + .../elasticsearch/xpack/core/XPackField.java | 2 + .../core/action/XPackUsageFeatureAction.java | 2 + .../inference/InferenceFeatureSetUsage.java | 116 +++++++++++++++++ .../InferenceFeatureSetUsageTests.java | 41 ++++++ .../xpack/inference/InferencePlugin.java | 5 +- .../action/TransportInferenceUsageAction.java | 81 ++++++++++++ .../TransportInferenceUsageActionTests.java | 121 ++++++++++++++++++ .../xpack/security/operator/Constants.java | 1 + 13 files changed, 383 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/102877.yaml create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/InferenceFeatureSetUsage.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/InferenceFeatureSetUsageTests.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceUsageAction.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceUsageActionTests.java diff --git a/docs/changelog/102877.yaml b/docs/changelog/102877.yaml new file mode 100644 index 0000000000000..da2de19b19a90 --- /dev/null +++ b/docs/changelog/102877.yaml @@ -0,0 +1,5 @@ +pr: 102877 +summary: Add basic telelemetry for the inference feature +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/reference/rest-api/usage.asciidoc b/docs/reference/rest-api/usage.asciidoc index 959a798378fc6..e2529de75f0e7 100644 --- a/docs/reference/rest-api/usage.asciidoc +++ b/docs/reference/rest-api/usage.asciidoc @@ -197,6 +197,11 @@ GET /_xpack/usage }, "node_count" : 1 }, + "inference": { + "available" : true, + "enabled" : true, + "models" : [] + }, "logstash" : { "available" : true, "enabled" : true diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index c392d3b6b4e29..57dc307a75841 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -184,6 +184,7 @@ static TransportVersion def(int id) { public static final TransportVersion ESQL_PROFILE = def(8_551_00_0); public static final TransportVersion CLUSTER_STATS_RESCORER_USAGE_ADDED = def(8_552_00_0); public static final TransportVersion ML_INFERENCE_HF_SERVICE_ADDED = def(8_553_00_0); + public static final TransportVersion INFERENCE_USAGE_ADDED = def(8_554_00_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/x-pack/plugin/core/src/main/java/module-info.java b/x-pack/plugin/core/src/main/java/module-info.java index 4aa2e145228b8..f747d07224454 100644 --- a/x-pack/plugin/core/src/main/java/module-info.java +++ b/x-pack/plugin/core/src/main/java/module-info.java @@ -75,6 +75,7 @@ exports org.elasticsearch.xpack.core.indexing; exports org.elasticsearch.xpack.core.inference.action; exports org.elasticsearch.xpack.core.inference.results; + exports org.elasticsearch.xpack.core.inference; exports org.elasticsearch.xpack.core.logstash; exports org.elasticsearch.xpack.core.ml.action; exports org.elasticsearch.xpack.core.ml.annotations; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index ac16631bacb73..df19648307a0b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -55,6 +55,7 @@ import org.elasticsearch.xpack.core.ilm.TimeseriesLifecycleType; import org.elasticsearch.xpack.core.ilm.UnfollowAction; import org.elasticsearch.xpack.core.ilm.WaitForSnapshotAction; +import org.elasticsearch.xpack.core.inference.InferenceFeatureSetUsage; import org.elasticsearch.xpack.core.logstash.LogstashFeatureSetUsage; import org.elasticsearch.xpack.core.ml.MachineLearningFeatureSetUsage; import org.elasticsearch.xpack.core.ml.MlMetadata; @@ -133,6 +134,8 @@ public List getNamedWriteables() { new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.LOGSTASH, LogstashFeatureSetUsage::new), // ML new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.MACHINE_LEARNING, MachineLearningFeatureSetUsage::new), + // inference + new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.INFERENCE, InferenceFeatureSetUsage::new), // monitoring new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.MONITORING, MonitoringFeatureSetUsage::new), // security diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java index c8a78af429592..801ef2c463e95 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java @@ -18,6 +18,8 @@ public final class XPackField { public static final String GRAPH = "graph"; /** Name constant for the machine learning feature. */ public static final String MACHINE_LEARNING = "ml"; + /** Name constant for the inference feature. */ + public static final String INFERENCE = "inference"; /** Name constant for the Logstash feature. */ public static final String LOGSTASH = "logstash"; /** Name constant for the Beats feature. */ diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java index d96fd91ed3f22..c0e6d96c1569a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java @@ -27,6 +27,7 @@ public class XPackUsageFeatureAction extends ActionType modelStats; + + public InferenceFeatureSetUsage(Collection modelStats) { + super(XPackField.INFERENCE, true, true); + this.modelStats = modelStats; + } + + public InferenceFeatureSetUsage(StreamInput in) throws IOException { + super(in); + this.modelStats = in.readCollectionAsList(ModelStats::new); + } + + @Override + protected void innerXContent(XContentBuilder builder, Params params) throws IOException { + super.innerXContent(builder, params); + builder.xContentList("models", modelStats); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeCollection(modelStats); + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.INFERENCE_USAGE_ADDED; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/InferenceFeatureSetUsageTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/InferenceFeatureSetUsageTests.java new file mode 100644 index 0000000000000..8f64b521c64c9 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/InferenceFeatureSetUsageTests.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference; + +import com.carrotsearch.randomizedtesting.generators.RandomStrings; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.io.IOException; + +public class InferenceFeatureSetUsageTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return InferenceFeatureSetUsage.ModelStats::new; + } + + @Override + protected InferenceFeatureSetUsage.ModelStats createTestInstance() { + RandomStrings.randomAsciiLettersOfLength(random(), 10); + return new InferenceFeatureSetUsage.ModelStats( + randomIdentifier(), + TaskType.values()[randomInt(TaskType.values().length - 1)], + randomInt(10) + ); + } + + @Override + protected InferenceFeatureSetUsage.ModelStats mutateInstance(InferenceFeatureSetUsage.ModelStats modelStats) throws IOException { + InferenceFeatureSetUsage.ModelStats newModelStats = new InferenceFeatureSetUsage.ModelStats(modelStats); + newModelStats.add(); + return newModelStats; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 3adc63c9863cb..e08224aaffdd5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -32,6 +32,7 @@ import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ScalingExecutorBuilder; import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.core.inference.action.DeleteInferenceModelAction; import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; import org.elasticsearch.xpack.core.inference.action.InferenceAction; @@ -39,6 +40,7 @@ import org.elasticsearch.xpack.inference.action.TransportDeleteInferenceModelAction; import org.elasticsearch.xpack.inference.action.TransportGetInferenceModelAction; import org.elasticsearch.xpack.inference.action.TransportInferenceAction; +import org.elasticsearch.xpack.inference.action.TransportInferenceUsageAction; import org.elasticsearch.xpack.inference.action.TransportPutInferenceModelAction; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.HttpSettings; @@ -86,7 +88,8 @@ public InferencePlugin(Settings settings) { new ActionHandler<>(InferenceAction.INSTANCE, TransportInferenceAction.class), new ActionHandler<>(GetInferenceModelAction.INSTANCE, TransportGetInferenceModelAction.class), new ActionHandler<>(PutInferenceModelAction.INSTANCE, TransportPutInferenceModelAction.class), - new ActionHandler<>(DeleteInferenceModelAction.INSTANCE, TransportDeleteInferenceModelAction.class) + new ActionHandler<>(DeleteInferenceModelAction.INSTANCE, TransportDeleteInferenceModelAction.class), + new ActionHandler<>(XPackUsageFeatureAction.INFERENCE, TransportInferenceUsageAction.class) ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceUsageAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceUsageAction.java new file mode 100644 index 0000000000000..54452d8a7ed68 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceUsageAction.java @@ -0,0 +1,81 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureTransportAction; +import org.elasticsearch.xpack.core.inference.InferenceFeatureSetUsage; +import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; + +import java.util.Map; +import java.util.TreeMap; + +import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; + +public class TransportInferenceUsageAction extends XPackUsageFeatureTransportAction { + + private final Client client; + + @Inject + public TransportInferenceUsageAction( + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + Client client + ) { + super( + XPackUsageFeatureAction.INFERENCE.name(), + transportService, + clusterService, + threadPool, + actionFilters, + indexNameExpressionResolver + ); + this.client = new OriginSettingClient(client, ML_ORIGIN); + } + + @Override + protected void masterOperation( + Task task, + XPackUsageRequest request, + ClusterState state, + ActionListener listener + ) throws Exception { + GetInferenceModelAction.Request getInferenceModelAction = new GetInferenceModelAction.Request("_all", TaskType.ANY); + client.execute(GetInferenceModelAction.INSTANCE, getInferenceModelAction, ActionListener.wrap(response -> { + Map stats = new TreeMap<>(); + for (ModelConfigurations model : response.getModels()) { + String statKey = model.getService() + ":" + model.getTaskType().name(); + InferenceFeatureSetUsage.ModelStats stat = stats.computeIfAbsent( + statKey, + key -> new InferenceFeatureSetUsage.ModelStats(model.getService(), model.getTaskType()) + ); + stat.add(); + } + InferenceFeatureSetUsage usage = new InferenceFeatureSetUsage(stats.values()); + listener.onResponse(new XPackUsageFeatureResponse(usage)); + }, listener::onFailure)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceUsageActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceUsageActionTests.java new file mode 100644 index 0000000000000..b0c59fe160be3 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceUsageActionTests.java @@ -0,0 +1,121 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.MockUtils; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xpack.core.XPackFeatureSet; +import org.elasticsearch.xpack.core.XPackField; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; +import org.elasticsearch.xpack.core.inference.InferenceFeatureSetUsage; +import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; +import org.elasticsearch.xpack.core.watcher.support.xcontent.XContentSource; +import org.junit.After; +import org.junit.Before; + +import java.util.List; + +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.core.Is.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class TransportInferenceUsageActionTests extends ESTestCase { + + private Client client; + private TransportInferenceUsageAction action; + + @Before + public void init() { + client = mock(Client.class); + ThreadPool threadPool = new TestThreadPool("test"); + when(client.threadPool()).thenReturn(threadPool); + + TransportService transportService = MockUtils.setupTransportServiceWithThreadpoolExecutor(mock(ThreadPool.class)); + + action = new TransportInferenceUsageAction( + transportService, + mock(ClusterService.class), + mock(ThreadPool.class), + mock(ActionFilters.class), + mock(IndexNameExpressionResolver.class), + client + ); + } + + @After + public void close() { + client.threadPool().shutdown(); + } + + public void test() throws Exception { + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + var listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse( + new GetInferenceModelAction.Response( + List.of( + new ModelConfigurations("model-001", TaskType.TEXT_EMBEDDING, "openai", mock(ServiceSettings.class)), + new ModelConfigurations("model-002", TaskType.TEXT_EMBEDDING, "openai", mock(ServiceSettings.class)), + new ModelConfigurations("model-003", TaskType.SPARSE_EMBEDDING, "hugging_face_elser", mock(ServiceSettings.class)), + new ModelConfigurations("model-004", TaskType.TEXT_EMBEDDING, "openai", mock(ServiceSettings.class)), + new ModelConfigurations("model-005", TaskType.SPARSE_EMBEDDING, "openai", mock(ServiceSettings.class)), + new ModelConfigurations("model-006", TaskType.SPARSE_EMBEDDING, "hugging_face_elser", mock(ServiceSettings.class)) + ) + ) + ); + return Void.TYPE; + }).when(client).execute(any(GetInferenceModelAction.class), any(), any()); + + PlainActionFuture future = new PlainActionFuture<>(); + action.masterOperation(mock(Task.class), mock(XPackUsageRequest.class), mock(ClusterState.class), future); + + BytesStreamOutput out = new BytesStreamOutput(); + future.get().getUsage().writeTo(out); + XPackFeatureSet.Usage usage = new InferenceFeatureSetUsage(out.bytes().streamInput()); + + assertThat(usage.name(), is(XPackField.INFERENCE)); + assertTrue(usage.enabled()); + assertTrue(usage.available()); + + XContentBuilder builder = XContentFactory.jsonBuilder(); + usage.toXContent(builder, ToXContent.EMPTY_PARAMS); + XContentSource source = new XContentSource(builder); + assertThat(source.getValue("models"), hasSize(3)); + assertThat(source.getValue("models.0.service"), is("hugging_face_elser")); + assertThat(source.getValue("models.0.task_type"), is("SPARSE_EMBEDDING")); + assertThat(source.getValue("models.0.count"), is(2)); + assertThat(source.getValue("models.1.service"), is("openai")); + assertThat(source.getValue("models.1.task_type"), is("SPARSE_EMBEDDING")); + assertThat(source.getValue("models.1.count"), is(1)); + assertThat(source.getValue("models.2.service"), is("openai")); + assertThat(source.getValue("models.2.task_type"), is("TEXT_EMBEDDING")); + assertThat(source.getValue("models.2.count"), is(3)); + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 5412e7d05f27f..86640e2e1a784 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -416,6 +416,7 @@ public class Constants { "cluster:monitor/xpack/usage/graph", "cluster:monitor/xpack/usage/health_api", "cluster:monitor/xpack/usage/ilm", + "cluster:monitor/xpack/usage/inference", "cluster:monitor/xpack/usage/logstash", "cluster:monitor/xpack/usage/ml", "cluster:monitor/xpack/usage/monitoring", From 8be04463e4ae5795fc3fad45f2d01314eaf81035 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Mon, 4 Dec 2023 08:32:54 -0500 Subject: [PATCH 118/181] [ML] Fix text embedding response format for TransportCoordinatedInferenceAction (#102890) * Fix for response format * Adding tests --- .../inference/InferenceServiceResults.java | 11 +++++++++ .../results/SparseEmbeddingResults.java | 5 ++++ .../results/TextEmbeddingResults.java | 8 +++++++ .../results/SparseEmbeddingResultsTests.java | 21 ++++++++++++++++ .../results/TextEmbeddingResultsTests.java | 24 +++++++++++++++++++ .../TransportCoordinatedInferenceAction.java | 2 +- 6 files changed, 70 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceServiceResults.java b/server/src/main/java/org/elasticsearch/inference/InferenceServiceResults.java index 37990caeec097..ab5b74faa6530 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceServiceResults.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceServiceResults.java @@ -16,6 +16,17 @@ public interface InferenceServiceResults extends NamedWriteable, ToXContentFragment { + /** + * Transform the result to match the format required for the TransportCoordinatedInferenceAction. + * For the inference plugin TextEmbeddingResults, the {@link #transformToLegacyFormat()} transforms the + * results into an intermediate format only used by the plugin's return value. It doesn't align with what the + * TransportCoordinatedInferenceAction expects. TransportCoordinatedInferenceAction expects an ml plugin + * TextEmbeddingResults. + * + * For other results like SparseEmbeddingResults, this method can be a pass through to the transformToLegacyFormat. + */ + List transformToCoordinationFormat(); + /** * Transform the result to match the format required for versions prior to * {@link org.elasticsearch.TransportVersions#INFERENCE_SERVICE_RESULTS_ADDED} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java index 20279e82d6c09..910ea5cab214d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java @@ -81,6 +81,11 @@ public Map asMap() { return map; } + @Override + public List transformToCoordinationFormat() { + return transformToLegacyFormat(); + } + @Override public List transformToLegacyFormat() { return embeddings.stream() diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingResults.java index 7a7ccab2b4daa..ace5974866038 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingResults.java @@ -78,6 +78,14 @@ public String getWriteableName() { return NAME; } + @Override + public List transformToCoordinationFormat() { + return embeddings.stream() + .map(embedding -> embedding.values.stream().mapToDouble(value -> value).toArray()) + .map(values -> new org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults(TEXT_EMBEDDING, values, false)) + .toList(); + } + @Override @SuppressWarnings("deprecation") public List transformToLegacyFormat() { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/SparseEmbeddingResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/SparseEmbeddingResultsTests.java index 0a8bfd20caaf1..6f8fa0c453d09 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/SparseEmbeddingResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/SparseEmbeddingResultsTests.java @@ -11,12 +11,14 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; +import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; import static org.hamcrest.Matchers.is; public class SparseEmbeddingResultsTests extends AbstractWireSerializingTestCase { @@ -151,6 +153,25 @@ public void testToXContent_CreatesTheRightFormatForMultipleEmbeddings() throws I }""")); } + public void testTransformToCoordinationFormat() { + var results = createSparseResult( + List.of( + createEmbedding(List.of(new SparseEmbeddingResults.WeightedToken("token", 0.1F)), false), + createEmbedding(List.of(new SparseEmbeddingResults.WeightedToken("token2", 0.2F)), true) + ) + ).transformToCoordinationFormat(); + + assertThat( + results, + is( + List.of( + new TextExpansionResults(DEFAULT_RESULTS_FIELD, List.of(new TextExpansionResults.WeightedToken("token", 0.1F)), false), + new TextExpansionResults(DEFAULT_RESULTS_FIELD, List.of(new TextExpansionResults.WeightedToken("token2", 0.2F)), true) + ) + ) + ); + } + public record EmbeddingExpectation(Map tokens, boolean isTruncated) {} public static Map buildExpectation(List embeddings) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java index 71d14e09872fd..09d9894d98853 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java @@ -100,6 +100,30 @@ public void testToXContent_CreatesTheRightFormatForMultipleEmbeddings() throws I }""")); } + public void testTransformToCoordinationFormat() { + var results = new TextEmbeddingResults( + List.of(new TextEmbeddingResults.Embedding(List.of(0.1F, 0.2F)), new TextEmbeddingResults.Embedding(List.of(0.3F, 0.4F))) + ).transformToCoordinationFormat(); + + assertThat( + results, + is( + List.of( + new org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults( + TextEmbeddingResults.TEXT_EMBEDDING, + new double[] { 0.1F, 0.2F }, + false + ), + new org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults( + TextEmbeddingResults.TEXT_EMBEDDING, + new double[] { 0.3F, 0.4F }, + false + ) + ) + ) + ); + } + @Override protected Writeable.Reader instanceReader() { return TextEmbeddingResults::new; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java index d90c9ec807495..13e04772683eb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java @@ -182,7 +182,7 @@ private void replaceErrorOnMissing( } static InferModelAction.Response translateInferenceServiceResponse(InferenceServiceResults inferenceResults) { - var legacyResults = new ArrayList(inferenceResults.transformToLegacyFormat()); + var legacyResults = new ArrayList(inferenceResults.transformToCoordinationFormat()); return new InferModelAction.Response(legacyResults, null, false); } } From 5b7325b44393246e9852b49f41e08485ae4678cd Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Mon, 4 Dec 2023 09:07:42 -0500 Subject: [PATCH 119/181] Fix test failure #102868 (#102889) closes https://github.com/elastic/elasticsearch/issues/102868 --- .../ExceptionSerializationTests.java | 34 ++++++------------- 1 file changed, 10 insertions(+), 24 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index 2263bfe78f218..f7362c7001c36 100644 --- a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -39,7 +39,6 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.CancellableThreadsTests; @@ -129,9 +128,8 @@ public class ExceptionSerializationTests extends ESTestCase { - public void testExceptionRegistration() throws ClassNotFoundException, IOException, URISyntaxException { + public void testExceptionRegistration() throws IOException, URISyntaxException { final Set> notRegistered = new HashSet<>(); - final Set> hasDedicatedWrite = new HashSet<>(); final Set> registered = new HashSet<>(); final String path = "/org/elasticsearch"; final Path startPath = PathUtils.get(ElasticsearchException.class.getProtectionDomain().getCodeSource().getLocation().toURI()) @@ -146,13 +144,13 @@ public void testExceptionRegistration() throws ClassNotFoundException, IOExcepti private Path pkgPrefix = PathUtils.get(path).getParent(); @Override - public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { + public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) { pkgPrefix = pkgPrefix.resolve(dir.getFileName()); return FileVisitResult.CONTINUE; } @Override - public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) { checkFile(file.getFileName().toString()); return FileVisitResult.CONTINUE; } @@ -180,13 +178,6 @@ private void checkClass(Class clazz) { notRegistered.add(clazz); } else if (ElasticsearchException.isRegistered(clazz.asSubclass(Throwable.class), TransportVersion.current())) { registered.add(clazz); - try { - if (clazz.getMethod("writeTo", StreamOutput.class) != null) { - hasDedicatedWrite.add(clazz); - } - } catch (Exception e) { - // fair enough - } } } @@ -199,7 +190,7 @@ private Class loadClass(String filename) throws ClassNotFoundException { for (Path p : pkgPrefix) { pkg.append(p.getFileName().toString()).append("."); } - pkg.append(filename.substring(0, filename.length() - 6)); + pkg.append(filename, 0, filename.length() - 6); return getClass().getClassLoader().loadClass(pkg.toString()); } @@ -209,7 +200,7 @@ public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOExce } @Override - public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { + public FileVisitResult postVisitDirectory(Path dir, IOException exc) { pkgPrefix = pkgPrefix.getParent(); return FileVisitResult.CONTINUE; } @@ -220,7 +211,7 @@ public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOEx Files.walkFileTree(testStartPath, visitor); assertTrue(notRegistered.remove(TestException.class)); assertTrue(notRegistered.remove(UnknownHeaderException.class)); - assertTrue("Classes subclassing ElasticsearchException must be registered \n" + notRegistered.toString(), notRegistered.isEmpty()); + assertTrue("Classes subclassing ElasticsearchException must be registered \n" + notRegistered, notRegistered.isEmpty()); assertTrue(registered.removeAll(ElasticsearchException.getRegisteredKeys())); // check assertEquals(registered.toString(), 0, registered.size()); } @@ -344,7 +335,7 @@ public void testInvalidIndexTemplateException() throws IOException { assertEquals(ex.name(), "foo"); ex = serialize(new InvalidIndexTemplateException(null, "bar")); assertEquals(ex.getMessage(), "index_template [null] invalid, cause [bar]"); - assertEquals(ex.name(), null); + assertNull(ex.name()); } public void testActionTransportException() throws IOException { @@ -353,17 +344,12 @@ public void testActionTransportException() throws IOException { assertEquals("[name?][" + transportAddress + "][ACTION BABY!] message?", ex.getMessage()); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102868") public void testSearchContextMissingException() throws IOException { ShardSearchContextId contextId = new ShardSearchContextId(UUIDs.randomBase64UUID(), randomLong()); - TransportVersion version = TransportVersionUtils.randomVersion(random()); + TransportVersion version = TransportVersionUtils.randomCompatibleVersion(random()); SearchContextMissingException ex = serialize(new SearchContextMissingException(contextId), version); assertThat(ex.contextId().getId(), equalTo(contextId.getId())); - if (version.onOrAfter(TransportVersions.V_7_7_0)) { - assertThat(ex.contextId().getSessionId(), equalTo(contextId.getSessionId())); - } else { - assertThat(ex.contextId().getSessionId(), equalTo("")); - } + assertThat(ex.contextId().getSessionId(), equalTo(contextId.getSessionId())); } public void testCircuitBreakingException() throws IOException { @@ -422,7 +408,7 @@ public void testConnectTransportException() throws IOException { } public void testSearchPhaseExecutionException() throws IOException { - ShardSearchFailure[] empty = new ShardSearchFailure[0]; + ShardSearchFailure[] empty = ShardSearchFailure.EMPTY_ARRAY; SearchPhaseExecutionException ex = serialize(new SearchPhaseExecutionException("boom", "baam", new NullPointerException(), empty)); assertEquals("boom", ex.getPhaseName()); assertEquals("baam", ex.getMessage()); From bba08fc97c2c7b783263b5cd6de2e75a8bf42871 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20FOUCRET?= Date: Mon, 4 Dec 2023 15:31:02 +0100 Subject: [PATCH 120/181] Renaming inference rescorer feature flag to learn to rank. (#102883) --- .../org/elasticsearch/test/cluster/FeatureFlag.java | 2 +- x-pack/plugin/ml/qa/basic-multi-node/build.gradle | 2 +- x-pack/plugin/ml/qa/ml-with-security/build.gradle | 4 ++-- .../org/elasticsearch/xpack/ml/MachineLearning.java | 9 ++++----- ...rerFeature.java => LearnToRankRescorerFeature.java} | 10 +++++----- .../org/elasticsearch/xpack/test/rest/XPackRestIT.java | 2 +- 6 files changed, 14 insertions(+), 15 deletions(-) rename x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/{InferenceRescorerFeature.java => LearnToRankRescorerFeature.java} (61%) diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java index b83cc7bba06e5..ff7195f9f5f37 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java @@ -16,7 +16,7 @@ */ public enum FeatureFlag { TIME_SERIES_MODE("es.index_mode_feature_flag_registered=true", Version.fromString("8.0.0"), null), - INFERENCE_RESCORER("es.inference_rescorer_feature_flag_enabled=true", Version.fromString("8.10.0"), null), + LEARN_TO_RANK("es.learn_to_rank_feature_flag_enabled=true", Version.fromString("8.10.0"), null), FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null); public final String systemProperty; diff --git a/x-pack/plugin/ml/qa/basic-multi-node/build.gradle b/x-pack/plugin/ml/qa/basic-multi-node/build.gradle index fca019a6fc689..bf6ab9ed7d77e 100644 --- a/x-pack/plugin/ml/qa/basic-multi-node/build.gradle +++ b/x-pack/plugin/ml/qa/basic-multi-node/build.gradle @@ -17,7 +17,7 @@ testClusters.configureEach { setting 'xpack.license.self_generated.type', 'trial' setting 'indices.lifecycle.history_index_enabled', 'false' setting 'slm.history_index_enabled', 'false' - requiresFeature 'es.inference_rescorer_feature_flag_enabled', Version.fromString("8.10.0") + requiresFeature 'es.learn_to_rank_feature_flag_enabled', Version.fromString("8.10.0") } if (BuildParams.inFipsJvm){ diff --git a/x-pack/plugin/ml/qa/ml-with-security/build.gradle b/x-pack/plugin/ml/qa/ml-with-security/build.gradle index b28e6bec462b9..b8b706353d624 100644 --- a/x-pack/plugin/ml/qa/ml-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/ml-with-security/build.gradle @@ -181,7 +181,7 @@ tasks.named("yamlRestTest").configure { 'ml/inference_crud/Test put nlp model config with vocabulary set', 'ml/inference_crud/Test put model model aliases with nlp model', 'ml/inference_processor/Test create processor with missing mandatory fields', - 'ml/inference_rescore/Test rescore with missing model', + 'ml/learn_to_rank_rescorer/Test rescore with missing model', 'ml/inference_stats_crud/Test get stats given missing trained model', 'ml/inference_stats_crud/Test get stats given expression without matches and allow_no_match is false', 'ml/jobs_crud/Test cannot create job with model snapshot id set', @@ -258,5 +258,5 @@ testClusters.configureEach { user username: "no_ml", password: "x-pack-test-password", role: "minimal" setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' - requiresFeature 'es.inference_rescorer_feature_flag_enabled', Version.fromString("8.10.0") + requiresFeature 'es.learn_to_rank_feature_flag_enabled', Version.fromString("8.10.0") } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index db23e7796f862..d0f7302105768 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -324,8 +324,8 @@ import org.elasticsearch.xpack.ml.inference.deployment.DeploymentManager; import org.elasticsearch.xpack.ml.inference.ingest.InferenceProcessor; import org.elasticsearch.xpack.ml.inference.loadingservice.ModelLoadingService; -import org.elasticsearch.xpack.ml.inference.ltr.InferenceRescorerFeature; import org.elasticsearch.xpack.ml.inference.ltr.LearnToRankRescorerBuilder; +import org.elasticsearch.xpack.ml.inference.ltr.LearnToRankRescorerFeature; import org.elasticsearch.xpack.ml.inference.ltr.LearnToRankService; import org.elasticsearch.xpack.ml.inference.modelsize.MlModelSizeNamedXContentProvider; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; @@ -886,8 +886,7 @@ private static void reportClashingNodeAttribute(String attrName) { @Override public List> getRescorers() { - if (enabled && InferenceRescorerFeature.isEnabled()) { - // Inference rescorer requires access to the model loading service + if (enabled && LearnToRankRescorerFeature.isEnabled()) { return List.of( new RescorerSpec<>( LearnToRankRescorerBuilder.NAME, @@ -1798,7 +1797,7 @@ public List getNamedXContent() { ); namedXContent.addAll(new CorrelationNamedContentProvider().getNamedXContentParsers()); // LTR Combine with Inference named content provider when feature flag is removed - if (InferenceRescorerFeature.isEnabled()) { + if (LearnToRankRescorerFeature.isEnabled()) { namedXContent.addAll(new MlLTRNamedXContentProvider().getNamedXContentParsers()); } return namedXContent; @@ -1886,7 +1885,7 @@ public List getNamedWriteables() { namedWriteables.addAll(new CorrelationNamedContentProvider().getNamedWriteables()); namedWriteables.addAll(new ChangePointNamedContentProvider().getNamedWriteables()); // LTR Combine with Inference named content provider when feature flag is removed - if (InferenceRescorerFeature.isEnabled()) { + if (LearnToRankRescorerFeature.isEnabled()) { namedWriteables.addAll(new MlLTRNamedXContentProvider().getNamedWriteables()); } return namedWriteables; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/InferenceRescorerFeature.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerFeature.java similarity index 61% rename from x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/InferenceRescorerFeature.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerFeature.java index 8a26714c7c06b..18b2c6fe5ff3f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/InferenceRescorerFeature.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerFeature.java @@ -10,19 +10,19 @@ import org.elasticsearch.common.util.FeatureFlag; /** - * Inference rescorer feature flag. When the feature is complete, this flag will be removed. + * Learn to rank feature flag. When the feature is complete, this flag will be removed. * * Upon removal, ensure transport serialization is all corrected for future BWC. * * See {@link LearnToRankRescorerBuilder} */ -public class InferenceRescorerFeature { +public class LearnToRankRescorerFeature { - private InferenceRescorerFeature() {} + private LearnToRankRescorerFeature() {} - private static final FeatureFlag INFERENCE_RESCORE_FEATURE_FLAG = new FeatureFlag("inference_rescorer"); + private static final FeatureFlag LEARN_TO_RANK = new FeatureFlag("learn_to_rank"); public static boolean isEnabled() { - return INFERENCE_RESCORE_FEATURE_FLAG.isEnabled(); + return LEARN_TO_RANK.isEnabled(); } } diff --git a/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java b/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java index a0e0fd621ba46..3fd8e952d626e 100644 --- a/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java +++ b/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java @@ -43,7 +43,7 @@ public class XPackRestIT extends AbstractXPackRestTest { .setting("xpack.searchable.snapshot.shared_cache.region_size", "256KB") .user("x_pack_rest_user", "x-pack-test-password") .feature(FeatureFlag.TIME_SERIES_MODE) - .feature(FeatureFlag.INFERENCE_RESCORER) + .feature(FeatureFlag.LEARN_TO_RANK) .configFile("testnode.pem", Resource.fromClasspath("org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem")) .configFile("testnode.crt", Resource.fromClasspath("org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) .configFile("service_tokens", Resource.fromClasspath("service_tokens")) From 3493ce4ebe75d1c44bd0eb01cf68ca568bae8674 Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Mon, 4 Dec 2023 15:31:42 +0100 Subject: [PATCH 121/181] [Connector API] Implement update error action (#102841) --- .../api/connector.update_error.json | 39 ++++ .../entsearch/335_connector_update_error.yml | 60 ++++++ .../xpack/application/EnterpriseSearch.java | 5 + .../application/connector/Connector.java | 52 ++++- .../connector/ConnectorIndexService.java | 31 +++ .../RestUpdateConnectorErrorAction.java | 45 +++++ .../TransportUpdateConnectorErrorAction.java | 52 +++++ .../action/UpdateConnectorErrorAction.java | 186 ++++++++++++++++++ .../connector/ConnectorIndexServiceTests.java | 43 ++++ ...ErrorActionRequestBWCSerializingTests.java | 50 +++++ ...rrorActionResponseBWCSerializingTests.java | 42 ++++ .../xpack/security/operator/Constants.java | 1 + 12 files changed, 600 insertions(+), 6 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_error.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_error.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorErrorAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorErrorAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorActionResponseBWCSerializingTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_error.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_error.json new file mode 100644 index 0000000000000..5d82a3729b501 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_error.json @@ -0,0 +1,39 @@ +{ + "connector.update_error": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Updates the error field in the connector document." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/{connector_id}/_error", + "methods": [ + "PUT" + ], + "parts": { + "connector_id": { + "type": "string", + "description": "The unique identifier of the connector to be updated." + } + } + } + ] + }, + "body": { + "description": "An object containing the connector's error.", + "required": true + } + } +} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_error.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_error.yml new file mode 100644 index 0000000000000..70021e3899525 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_error.yml @@ -0,0 +1,60 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + + - do: + connector.put: + connector_id: test-connector + body: + index_name: search-1-test + name: my-connector + language: pl + is_native: false + service_type: super-connector + +--- +"Update Connector Error": + - do: + connector.update_error: + connector_id: test-connector + body: + error: "some error" + + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { error: "some error" } + +--- +"Update Connector Error - 404 when connector doesn't exist": + - do: + catch: "missing" + connector.update_error: + connector_id: test-non-existent-connector + body: + error: "some error" + +--- +"Update Connector Error - 400 status code when connector_id is empty": + - do: + catch: "bad_request" + connector.update_error: + connector_id: "" + body: + error: "some error" + +--- +"Update Connector Error - 400 status code when payload is not string": + - do: + catch: "bad_request" + connector.update_error: + connector_id: test-connector + body: + error: + field_1: test + field_2: something diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index 2a53a46760868..09b86988ffe81 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -50,6 +50,7 @@ import org.elasticsearch.xpack.application.connector.action.RestGetConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestListConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestPutConnectorAction; +import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorLastSyncStatsAction; @@ -59,11 +60,13 @@ import org.elasticsearch.xpack.application.connector.action.TransportGetConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportListConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportPutConnectorAction; +import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorLastSyncStatsAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorSchedulingAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSyncStatsAction; @@ -201,6 +204,7 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(GetConnectorAction.INSTANCE, TransportGetConnectorAction.class), new ActionHandler<>(ListConnectorAction.INSTANCE, TransportListConnectorAction.class), new ActionHandler<>(PutConnectorAction.INSTANCE, TransportPutConnectorAction.class), + new ActionHandler<>(UpdateConnectorErrorAction.INSTANCE, TransportUpdateConnectorErrorAction.class), new ActionHandler<>(UpdateConnectorFilteringAction.INSTANCE, TransportUpdateConnectorFilteringAction.class), new ActionHandler<>(UpdateConnectorLastSeenAction.INSTANCE, TransportUpdateConnectorLastSeenAction.class), new ActionHandler<>(UpdateConnectorLastSyncStatsAction.INSTANCE, TransportUpdateConnectorLastSyncStatsAction.class), @@ -267,6 +271,7 @@ public List getRestHandlers( new RestGetConnectorAction(), new RestListConnectorAction(), new RestPutConnectorAction(), + new RestUpdateConnectorErrorAction(), new RestUpdateConnectorFilteringAction(), new RestUpdateConnectorLastSeenAction(), new RestUpdateConnectorLastSyncStatsAction(), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java index 45b906d815aee..d68cc9f7227bc 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java @@ -200,14 +200,14 @@ public Connector(StreamInput in) throws IOException { public static final ParseField CONFIGURATION_FIELD = new ParseField("configuration"); static final ParseField CUSTOM_SCHEDULING_FIELD = new ParseField("custom_scheduling"); static final ParseField DESCRIPTION_FIELD = new ParseField("description"); - static final ParseField ERROR_FIELD = new ParseField("error"); + public static final ParseField ERROR_FIELD = new ParseField("error"); static final ParseField FEATURES_FIELD = new ParseField("features"); public static final ParseField FILTERING_FIELD = new ParseField("filtering"); public static final ParseField INDEX_NAME_FIELD = new ParseField("index_name"); static final ParseField IS_NATIVE_FIELD = new ParseField("is_native"); public static final ParseField LANGUAGE_FIELD = new ParseField("language"); public static final ParseField LAST_SEEN_FIELD = new ParseField("last_seen"); - static final ParseField NAME_FIELD = new ParseField("name"); + public static final ParseField NAME_FIELD = new ParseField("name"); public static final ParseField PIPELINE_FIELD = new ParseField("pipeline"); public static final ParseField SCHEDULING_FIELD = new ParseField("scheduling"); public static final ParseField SERVICE_TYPE_FIELD = new ParseField("service_type"); @@ -457,8 +457,28 @@ public String getConnectorId() { return connectorId; } - public ConnectorScheduling getScheduling() { - return scheduling; + public String getApiKeyId() { + return apiKeyId; + } + + public Map getConfiguration() { + return configuration; + } + + public Map getCustomScheduling() { + return customScheduling; + } + + public String getDescription() { + return description; + } + + public String getError() { + return error; + } + + public ConnectorFeatures getFeatures() { + return features; } public List getFiltering() { @@ -469,20 +489,40 @@ public String getIndexName() { return indexName; } + public boolean isNative() { + return isNative; + } + public String getLanguage() { return language; } + public String getName() { + return name; + } + public ConnectorIngestPipeline getPipeline() { return pipeline; } + public ConnectorScheduling getScheduling() { + return scheduling; + } + public String getServiceType() { return serviceType; } - public Map getConfiguration() { - return configuration; + public ConnectorStatus getStatus() { + return status; + } + + public Object getSyncCursor() { + return syncCursor; + } + + public boolean isSyncNow() { + return syncNow; } public ConnectorSyncInfo getSyncInfo() { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index d99ad28dc3970..744a4d2028990 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -31,6 +31,7 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSyncStatsAction; @@ -323,6 +324,36 @@ public void updateConnectorScheduling(UpdateConnectorSchedulingAction.Request re } } + /** + * Updates the error property of a {@link Connector}. + * + * @param request The request for updating the connector's error. + * @param listener The listener for handling responses, including successful updates or errors. + */ + public void updateConnectorError(UpdateConnectorErrorAction.Request request, ActionListener listener) { + try { + String connectorId = request.getConnectorId(); + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( + new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) + .id(connectorId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(request.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)) + ); + clientWithOrigin.update( + updateRequest, + new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { + if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { + l.onFailure(new ResourceNotFoundException(connectorId)); + return; + } + l.onResponse(updateResponse); + }) + ); + } catch (Exception e) { + listener.onFailure(e); + } + } + private static ConnectorIndexService.ConnectorResult mapSearchResponseToConnectorList(SearchResponse response) { final List connectorResults = Arrays.stream(response.getHits().getHits()) .map(ConnectorIndexService::hitToConnector) diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorErrorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorErrorAction.java new file mode 100644 index 0000000000000..ea8bd1b4ee50f --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorErrorAction.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.PUT; + +public class RestUpdateConnectorErrorAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_update_error_action"; + } + + @Override + public List routes() { + return List.of(new Route(PUT, "/" + EnterpriseSearch.CONNECTOR_API_ENDPOINT + "/{connector_id}/_error")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + UpdateConnectorErrorAction.Request request = UpdateConnectorErrorAction.Request.fromXContentBytes( + restRequest.param("connector_id"), + restRequest.content(), + restRequest.getXContentType() + ); + return channel -> client.execute( + UpdateConnectorErrorAction.INSTANCE, + request, + new RestToXContentListener<>(channel, UpdateConnectorErrorAction.Response::status, r -> null) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorErrorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorErrorAction.java new file mode 100644 index 0000000000000..629fd14861cf6 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorErrorAction.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.ConnectorIndexService; + +public class TransportUpdateConnectorErrorAction extends HandledTransportAction< + UpdateConnectorErrorAction.Request, + UpdateConnectorErrorAction.Response> { + + protected final ConnectorIndexService connectorIndexService; + + @Inject + public TransportUpdateConnectorErrorAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + UpdateConnectorErrorAction.NAME, + transportService, + actionFilters, + UpdateConnectorErrorAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorIndexService = new ConnectorIndexService(client); + } + + @Override + protected void doExecute( + Task task, + UpdateConnectorErrorAction.Request request, + ActionListener listener + ) { + connectorIndexService.updateConnectorError(request, listener.map(r -> new UpdateConnectorErrorAction.Response(r.getResult()))); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorAction.java new file mode 100644 index 0000000000000..c9e48dac08cd5 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorAction.java @@ -0,0 +1,186 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.Connector; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class UpdateConnectorErrorAction extends ActionType { + + public static final UpdateConnectorErrorAction INSTANCE = new UpdateConnectorErrorAction(); + public static final String NAME = "cluster:admin/xpack/connector/update_error"; + + public UpdateConnectorErrorAction() { + super(NAME, UpdateConnectorErrorAction.Response::new); + } + + public static class Request extends ActionRequest implements ToXContentObject { + + private final String connectorId; + + @Nullable + private final String error; + + public Request(String connectorId, String error) { + this.connectorId = connectorId; + this.error = error; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorId = in.readString(); + this.error = in.readOptionalString(); + } + + public String getConnectorId() { + return connectorId; + } + + public String getError() { + return error; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorId)) { + validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + } + + return validationException; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_update_error_request", + false, + ((args, connectorId) -> new UpdateConnectorErrorAction.Request(connectorId, (String) args[0])) + ); + + static { + PARSER.declareStringOrNull(optionalConstructorArg(), Connector.ERROR_FIELD); + } + + public static UpdateConnectorErrorAction.Request fromXContentBytes( + String connectorId, + BytesReference source, + XContentType xContentType + ) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return UpdateConnectorErrorAction.Request.fromXContent(parser, connectorId); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse: " + source.utf8ToString(), e); + } + } + + public static UpdateConnectorErrorAction.Request fromXContent(XContentParser parser, String connectorId) throws IOException { + return PARSER.parse(parser, connectorId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(Connector.ERROR_FIELD.getPreferredName(), error); + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorId); + out.writeOptionalString(error); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorId, request.connectorId) && Objects.equals(error, request.error); + } + + @Override + public int hashCode() { + return Objects.hash(connectorId, error); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + final DocWriteResponse.Result result; + + public Response(StreamInput in) throws IOException { + super(in); + result = DocWriteResponse.Result.readFrom(in); + } + + public Response(DocWriteResponse.Result result) { + this.result = result; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + this.result.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("result", this.result.getLowercase()); + builder.endObject(); + return builder; + } + + public RestStatus status() { + return switch (result) { + case NOT_FOUND -> RestStatus.NOT_FOUND; + default -> RestStatus.OK; + }; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response that = (Response) o; + return Objects.equals(result, that.result); + } + + @Override + public int hashCode() { + return Objects.hash(result); + } + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index e155cdfefbfa1..0f2c6c3fa3e8e 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSyncStatsAction; @@ -172,6 +173,23 @@ public void testUpdateConnectorScheduling() throws Exception { assertThat(updatedScheduling, equalTo(indexedConnector.getScheduling())); } + public void testUpdateConnectorError() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + DocWriteResponse resp = awaitPutConnector(connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + UpdateConnectorErrorAction.Request updateErrorRequest = new UpdateConnectorErrorAction.Request( + connector.getConnectorId(), + randomAlphaOfLengthBetween(5, 15) + ); + + DocWriteResponse updateResponse = awaitUpdateConnectorError(updateErrorRequest); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + + Connector indexedConnector = awaitGetConnector(connector.getConnectorId()); + assertThat(updateErrorRequest.getError(), equalTo(indexedConnector.getError())); + } + private DeleteResponse awaitDeleteConnector(String connectorId) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); @@ -399,4 +417,29 @@ public void onFailure(Exception e) { assertNotNull("Received null response from update scheduling request", resp.get()); return resp.get(); } + + private UpdateResponse awaitUpdateConnectorError(UpdateConnectorErrorAction.Request updatedError) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorIndexService.updateConnectorError(updatedError, new ActionListener<>() { + @Override + public void onResponse(UpdateResponse indexResponse) { + resp.set(indexResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for update error request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from update error request", resp.get()); + return resp.get(); + } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..94092cee61b40 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorActionRequestBWCSerializingTests.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorErrorActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase< + UpdateConnectorErrorAction.Request> { + + private String connectorId; + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorErrorAction.Request::new; + } + + @Override + protected UpdateConnectorErrorAction.Request createTestInstance() { + this.connectorId = randomUUID(); + return new UpdateConnectorErrorAction.Request(connectorId, randomAlphaOfLengthBetween(5, 15)); + } + + @Override + protected UpdateConnectorErrorAction.Request mutateInstance(UpdateConnectorErrorAction.Request instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorErrorAction.Request doParseInstance(XContentParser parser) throws IOException { + return UpdateConnectorErrorAction.Request.fromXContent(parser, this.connectorId); + } + + @Override + protected UpdateConnectorErrorAction.Request mutateInstanceForVersion( + UpdateConnectorErrorAction.Request instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorActionResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..a39fcac3d2f04 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorActionResponseBWCSerializingTests.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorErrorActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< + UpdateConnectorErrorAction.Response> { + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorErrorAction.Response::new; + } + + @Override + protected UpdateConnectorErrorAction.Response createTestInstance() { + return new UpdateConnectorErrorAction.Response(randomFrom(DocWriteResponse.Result.values())); + } + + @Override + protected UpdateConnectorErrorAction.Response mutateInstance(UpdateConnectorErrorAction.Response instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorErrorAction.Response mutateInstanceForVersion( + UpdateConnectorErrorAction.Response instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 86640e2e1a784..ffc894af423cf 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -127,6 +127,7 @@ public class Constants { "cluster:admin/xpack/connector/get", "cluster:admin/xpack/connector/list", "cluster:admin/xpack/connector/put", + "cluster:admin/xpack/connector/update_error", "cluster:admin/xpack/connector/update_filtering", "cluster:admin/xpack/connector/update_last_seen", "cluster:admin/xpack/connector/update_last_sync_stats", From fd1e26a4bb3ba4a466fa614f200aa5a57b32b1d4 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Mon, 4 Dec 2023 15:37:52 +0100 Subject: [PATCH 122/181] [Enterprise Search] Add GET connector sync job by id (#102908) Add GET connector sync job by id. --- .../api/connector_sync_job.get.json | 32 +++ .../entsearch/440_connector_sync_job_get.yml | 36 +++ .../xpack/application/EnterpriseSearch.java | 5 + .../connector/syncjob/ConnectorSyncJob.java | 271 ++++++++++++++++-- .../syncjob/ConnectorSyncJobIndexService.java | 36 ++- .../action/GetConnectorSyncJobAction.java | 153 ++++++++++ .../action/RestGetConnectorSyncJobAction.java | 42 +++ .../TransportGetConnectorSyncJobAction.java | 55 ++++ .../ConnectorSyncJobIndexServiceTests.java | 112 +++++--- .../syncjob/ConnectorSyncJobTestUtils.java | 9 + .../syncjob/ConnectorSyncJobTests.java | 207 +++++++++++++ ...ncJobActionRequestBWCSerializingTests.java | 47 +++ ...cJobActionResponseBWCSerializingTests.java | 50 ++++ .../GetConnectorSyncJobActionTests.java | 36 +++ ...ansportGetConnectorSyncJobActionTests.java | 75 +++++ .../xpack/security/operator/Constants.java | 1 + 16 files changed, 1103 insertions(+), 64 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.get.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/440_connector_sync_job_get.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestGetConnectorSyncJobAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportGetConnectorSyncJobAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobActionResponseBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobActionTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportGetConnectorSyncJobActionTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.get.json new file mode 100644 index 0000000000000..6eb461ad62128 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.get.json @@ -0,0 +1,32 @@ +{ + "connector_sync_job.get": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Returns the details about a connector sync job." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/_sync_job/{connector_sync_job_id}", + "methods": [ + "GET" + ], + "parts": { + "connector_sync_job_id": { + "type": "string", + "description": "The unique identifier of the connector sync job to be returned." + } + } + } + ] + } + } +} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/440_connector_sync_job_get.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/440_connector_sync_job_get.yml new file mode 100644 index 0000000000000..ade0736436e87 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/440_connector_sync_job_get.yml @@ -0,0 +1,36 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + - do: + connector.put: + connector_id: test-connector + body: + index_name: search-test + name: my-connector + language: de + is_native: false + service_type: super-connector + +--- +'Get connector sync job': + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: access_control + trigger_method: scheduled + - set: { id: id } + - match: { id: $id } + - do: + connector_sync_job.get: + connector_sync_job_id: $id + - match: { job_type: access_control } + - match: { trigger_method: scheduled } + +--- +'Get connector sync job - Missing sync job id': + - do: + connector_sync_job.get: + connector_sync_job_id: non-existing-sync-job-id + catch: missing diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index 09b86988ffe81..f93177666f3d8 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -75,14 +75,17 @@ import org.elasticsearch.xpack.application.connector.syncjob.action.CancelConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.CheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.DeleteConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.GetConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestCancelConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestCheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestDeleteConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.RestGetConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestPostConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportCancelConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportCheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportDeleteConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.TransportGetConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportPostConnectorSyncJobAction; import org.elasticsearch.xpack.application.rules.QueryRulesConfig; import org.elasticsearch.xpack.application.rules.QueryRulesIndexService; @@ -212,6 +215,7 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(UpdateConnectorSchedulingAction.INSTANCE, TransportUpdateConnectorSchedulingAction.class), // SyncJob API + new ActionHandler<>(GetConnectorSyncJobAction.INSTANCE, TransportGetConnectorSyncJobAction.class), new ActionHandler<>(PostConnectorSyncJobAction.INSTANCE, TransportPostConnectorSyncJobAction.class), new ActionHandler<>(DeleteConnectorSyncJobAction.INSTANCE, TransportDeleteConnectorSyncJobAction.class), new ActionHandler<>(CheckInConnectorSyncJobAction.INSTANCE, TransportCheckInConnectorSyncJobAction.class), @@ -279,6 +283,7 @@ public List getRestHandlers( new RestUpdateConnectorSchedulingAction(), // SyncJob API + new RestGetConnectorSyncJobAction(), new RestPostConnectorSyncJobAction(), new RestDeleteConnectorSyncJobAction(), new RestCancelConnectorSyncJobAction(), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java index 6c0e9635d986d..2a302ddb68199 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java @@ -7,22 +7,36 @@ package org.elasticsearch.xpack.application.connector.syncjob; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.ConnectorFiltering; +import org.elasticsearch.xpack.application.connector.ConnectorIngestPipeline; import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; import java.io.IOException; import java.time.Instant; import java.util.Collections; +import java.util.List; import java.util.Map; import java.util.Objects; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + /** * Represents a sync job in the Elasticsearch ecosystem. Sync jobs refer to a unit of work, which syncs data from a 3rd party * data source into an Elasticsearch index using the Connectors service. A ConnectorSyncJob always refers @@ -60,7 +74,7 @@ public class ConnectorSyncJob implements Writeable, ToXContentObject { static final ParseField CREATED_AT_FIELD = new ParseField("created_at"); - static final ParseField DELETED_DOCUMENT_COUNT = new ParseField("deleted_document_count"); + static final ParseField DELETED_DOCUMENT_COUNT_FIELD = new ParseField("deleted_document_count"); static final ParseField ERROR_FIELD = new ParseField("error"); @@ -92,6 +106,7 @@ public class ConnectorSyncJob implements Writeable, ToXContentObject { static final ConnectorSyncJobTriggerMethod DEFAULT_TRIGGER_METHOD = ConnectorSyncJobTriggerMethod.ON_DEMAND; + @Nullable private final Instant cancelationRequestedAt; @Nullable @@ -127,7 +142,6 @@ public class ConnectorSyncJob implements Writeable, ToXContentObject { private final ConnectorSyncStatus status; - @Nullable private final long totalDocumentCount; private final ConnectorSyncJobTriggerMethod triggerMethod; @@ -217,44 +231,269 @@ public ConnectorSyncJob(StreamInput in) throws IOException { this.workerHostname = in.readOptionalString(); } + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_sync_job", + true, + (args) -> { + int i = 0; + return new Builder().setCancellationRequestedAt((Instant) args[i++]) + .setCanceledAt((Instant) args[i++]) + .setCompletedAt((Instant) args[i++]) + .setConnector((Connector) args[i++]) + .setCreatedAt((Instant) args[i++]) + .setDeletedDocumentCount((Long) args[i++]) + .setError((String) args[i++]) + .setId((String) args[i++]) + .setIndexedDocumentCount((Long) args[i++]) + .setIndexedDocumentVolume((Long) args[i++]) + .setJobType((ConnectorSyncJobType) args[i++]) + .setLastSeen((Instant) args[i++]) + .setMetadata((Map) args[i++]) + .setStartedAt((Instant) args[i++]) + .setStatus((ConnectorSyncStatus) args[i++]) + .setTotalDocumentCount((Long) args[i++]) + .setTriggerMethod((ConnectorSyncJobTriggerMethod) args[i++]) + .setWorkerHostname((String) args[i]) + .build(); + } + ); + + static { + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> Instant.parse(p.text()), + CANCELATION_REQUESTED_AT_FIELD, + ObjectParser.ValueType.STRING_OR_NULL + ); + PARSER.declareField(optionalConstructorArg(), (p, c) -> Instant.parse(p.text()), CANCELED_AT_FIELD, ObjectParser.ValueType.STRING); + PARSER.declareField(optionalConstructorArg(), (p, c) -> Instant.parse(p.text()), COMPLETED_AT_FIELD, ObjectParser.ValueType.STRING); + PARSER.declareField( + constructorArg(), + (p, c) -> ConnectorSyncJob.syncJobConnectorFromXContent(p), + CONNECTOR_FIELD, + ObjectParser.ValueType.OBJECT + ); + PARSER.declareField(constructorArg(), (p, c) -> Instant.parse(p.text()), CREATED_AT_FIELD, ObjectParser.ValueType.STRING); + PARSER.declareLong(constructorArg(), DELETED_DOCUMENT_COUNT_FIELD); + PARSER.declareStringOrNull(optionalConstructorArg(), ERROR_FIELD); + PARSER.declareString(constructorArg(), ID_FIELD); + PARSER.declareLong(constructorArg(), INDEXED_DOCUMENT_COUNT_FIELD); + PARSER.declareLong(constructorArg(), INDEXED_DOCUMENT_VOLUME_FIELD); + PARSER.declareField( + constructorArg(), + (p, c) -> ConnectorSyncJobType.fromString(p.text()), + JOB_TYPE_FIELD, + ObjectParser.ValueType.STRING + ); + PARSER.declareField(constructorArg(), (p, c) -> Instant.parse(p.text()), LAST_SEEN_FIELD, ObjectParser.ValueType.STRING); + PARSER.declareField(constructorArg(), (p, c) -> p.map(), METADATA_FIELD, ObjectParser.ValueType.OBJECT); + PARSER.declareField(optionalConstructorArg(), (p, c) -> Instant.parse(p.text()), STARTED_AT_FIELD, ObjectParser.ValueType.STRING); + PARSER.declareField( + constructorArg(), + (p, c) -> ConnectorSyncStatus.fromString(p.text()), + STATUS_FIELD, + ObjectParser.ValueType.STRING + ); + PARSER.declareLong(constructorArg(), TOTAL_DOCUMENT_COUNT_FIELD); + PARSER.declareField( + constructorArg(), + (p, c) -> ConnectorSyncJobTriggerMethod.fromString(p.text()), + TRIGGER_METHOD_FIELD, + ObjectParser.ValueType.STRING + ); + PARSER.declareString(optionalConstructorArg(), WORKER_HOSTNAME_FIELD); + } + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser SYNC_JOB_CONNECTOR_PARSER = new ConstructingObjectParser<>( + "sync_job_connector", + true, + (args) -> { + int i = 0; + return new Connector.Builder().setConnectorId((String) args[i++]) + .setFiltering((List) args[i++]) + .setIndexName((String) args[i++]) + .setLanguage((String) args[i++]) + .setPipeline((ConnectorIngestPipeline) args[i++]) + .setServiceType((String) args[i++]) + .setConfiguration((Map) args[i++]) + .build(); + } + ); + + static { + SYNC_JOB_CONNECTOR_PARSER.declareString(constructorArg(), Connector.ID_FIELD); + SYNC_JOB_CONNECTOR_PARSER.declareObjectArray( + optionalConstructorArg(), + (p, c) -> ConnectorFiltering.fromXContent(p), + Connector.FILTERING_FIELD + ); + SYNC_JOB_CONNECTOR_PARSER.declareString(optionalConstructorArg(), Connector.INDEX_NAME_FIELD); + SYNC_JOB_CONNECTOR_PARSER.declareString(optionalConstructorArg(), Connector.LANGUAGE_FIELD); + SYNC_JOB_CONNECTOR_PARSER.declareField( + optionalConstructorArg(), + (p, c) -> ConnectorIngestPipeline.fromXContent(p), + Connector.PIPELINE_FIELD, + ObjectParser.ValueType.OBJECT + ); + SYNC_JOB_CONNECTOR_PARSER.declareString(optionalConstructorArg(), Connector.SERVICE_TYPE_FIELD); + SYNC_JOB_CONNECTOR_PARSER.declareField( + optionalConstructorArg(), + (parser, context) -> parser.map(), + Connector.CONFIGURATION_FIELD, + ObjectParser.ValueType.OBJECT + ); + } + + public static ConnectorSyncJob fromXContentBytes(BytesReference source, XContentType xContentType) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return ConnectorSyncJob.fromXContent(parser); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse a connector sync job document.", e); + } + } + + public static ConnectorSyncJob fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + public static Connector syncJobConnectorFromXContent(XContentParser parser) throws IOException { + return SYNC_JOB_CONNECTOR_PARSER.parse(parser, null); + } + public String getId() { return id; } + public Instant getCancelationRequestedAt() { + return cancelationRequestedAt; + } + + public Instant getCanceledAt() { + return canceledAt; + } + + public Instant getCompletedAt() { + return completedAt; + } + + public Connector getConnector() { + return connector; + } + + public Instant getCreatedAt() { + return createdAt; + } + + public long getDeletedDocumentCount() { + return deletedDocumentCount; + } + + public String getError() { + return error; + } + + public long getIndexedDocumentCount() { + return indexedDocumentCount; + } + + public long getIndexedDocumentVolume() { + return indexedDocumentVolume; + } + + public ConnectorSyncJobType getJobType() { + return jobType; + } + + public Instant getLastSeen() { + return lastSeen; + } + + public Map getMetadata() { + return metadata; + } + + public Instant getStartedAt() { + return startedAt; + } + + public ConnectorSyncStatus getStatus() { + return status; + } + + public long getTotalDocumentCount() { + return totalDocumentCount; + } + + public ConnectorSyncJobTriggerMethod getTriggerMethod() { + return triggerMethod; + } + + public String getWorkerHostname() { + return workerHostname; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); { - builder.field(CANCELATION_REQUESTED_AT_FIELD.getPreferredName(), cancelationRequestedAt); - builder.field(CANCELED_AT_FIELD.getPreferredName(), canceledAt); - builder.field(COMPLETED_AT_FIELD.getPreferredName(), completedAt); + if (cancelationRequestedAt != null) { + builder.field(CANCELATION_REQUESTED_AT_FIELD.getPreferredName(), cancelationRequestedAt); + } + if (canceledAt != null) { + builder.field(CANCELED_AT_FIELD.getPreferredName(), canceledAt); + } + if (completedAt != null) { + builder.field(COMPLETED_AT_FIELD.getPreferredName(), completedAt); + } builder.startObject(CONNECTOR_FIELD.getPreferredName()); { builder.field(Connector.ID_FIELD.getPreferredName(), connector.getConnectorId()); - builder.field(Connector.FILTERING_FIELD.getPreferredName(), connector.getFiltering()); - builder.field(Connector.INDEX_NAME_FIELD.getPreferredName(), connector.getIndexName()); - builder.field(Connector.LANGUAGE_FIELD.getPreferredName(), connector.getLanguage()); - builder.field(Connector.PIPELINE_FIELD.getPreferredName(), connector.getPipeline()); - builder.field(Connector.SERVICE_TYPE_FIELD.getPreferredName(), connector.getServiceType()); - builder.field(Connector.CONFIGURATION_FIELD.getPreferredName(), connector.getConfiguration()); + if (connector.getFiltering() != null) { + builder.field(Connector.FILTERING_FIELD.getPreferredName(), connector.getFiltering()); + } + if (connector.getIndexName() != null) { + builder.field(Connector.INDEX_NAME_FIELD.getPreferredName(), connector.getIndexName()); + } + if (connector.getLanguage() != null) { + builder.field(Connector.LANGUAGE_FIELD.getPreferredName(), connector.getLanguage()); + } + if (connector.getPipeline() != null) { + builder.field(Connector.PIPELINE_FIELD.getPreferredName(), connector.getPipeline()); + } + if (connector.getServiceType() != null) { + builder.field(Connector.SERVICE_TYPE_FIELD.getPreferredName(), connector.getServiceType()); + } + if (connector.getConfiguration() != null) { + builder.field(Connector.CONFIGURATION_FIELD.getPreferredName(), connector.getConfiguration()); + } } builder.endObject(); builder.field(CREATED_AT_FIELD.getPreferredName(), createdAt); - builder.field(DELETED_DOCUMENT_COUNT.getPreferredName(), deletedDocumentCount); - builder.field(ERROR_FIELD.getPreferredName(), error); + builder.field(DELETED_DOCUMENT_COUNT_FIELD.getPreferredName(), deletedDocumentCount); + if (error != null) { + builder.field(ERROR_FIELD.getPreferredName(), error); + } builder.field(ID_FIELD.getPreferredName(), id); builder.field(INDEXED_DOCUMENT_COUNT_FIELD.getPreferredName(), indexedDocumentCount); builder.field(INDEXED_DOCUMENT_VOLUME_FIELD.getPreferredName(), indexedDocumentVolume); builder.field(JOB_TYPE_FIELD.getPreferredName(), jobType); - builder.field(LAST_SEEN_FIELD.getPreferredName(), lastSeen); + if (lastSeen != null) { + builder.field(LAST_SEEN_FIELD.getPreferredName(), lastSeen); + } builder.field(METADATA_FIELD.getPreferredName(), metadata); - builder.field(STARTED_AT_FIELD.getPreferredName(), startedAt); + if (startedAt != null) { + builder.field(STARTED_AT_FIELD.getPreferredName(), startedAt); + } builder.field(STATUS_FIELD.getPreferredName(), status); builder.field(TOTAL_DOCUMENT_COUNT_FIELD.getPreferredName(), totalDocumentCount); builder.field(TRIGGER_METHOD_FIELD.getPreferredName(), triggerMethod); - builder.field(WORKER_HOSTNAME_FIELD.getPreferredName(), workerHostname); + if (workerHostname != null) { + builder.field(WORKER_HOSTNAME_FIELD.getPreferredName(), workerHostname); + } } builder.endObject(); return builder; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index ab593fe99fcee..5e1686dde80f2 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -27,6 +27,7 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.engine.DocumentMissingException; import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.Connector; import org.elasticsearch.xpack.application.connector.ConnectorFiltering; import org.elasticsearch.xpack.application.connector.ConnectorIndexService; @@ -174,6 +175,40 @@ public void checkInConnectorSyncJob(String connectorSyncJobId, ActionListener listener) { + final GetRequest getRequest = new GetRequest(CONNECTOR_SYNC_JOB_INDEX_NAME).id(connectorSyncJobId).realtime(true); + + try { + clientWithOrigin.get( + getRequest, + new DelegatingIndexNotFoundOrDocumentMissingActionListener<>(connectorSyncJobId, listener, (l, getResponse) -> { + if (getResponse.isExists() == false) { + l.onFailure(new ResourceNotFoundException(connectorSyncJobId)); + return; + } + + try { + final ConnectorSyncJob syncJob = ConnectorSyncJob.fromXContentBytes( + getResponse.getSourceAsBytesRef(), + XContentType.JSON + ); + l.onResponse(syncJob); + } catch (Exception e) { + listener.onFailure(e); + } + }) + ); + } catch (Exception e) { + listener.onFailure(e); + } + } + /** * Cancels the {@link ConnectorSyncJob} in the underlying index. * Canceling means to set the {@link ConnectorSyncStatus} to "canceling" and not "canceled" as this is an async operation. @@ -211,7 +246,6 @@ public void cancelConnectorSyncJob(String connectorSyncJobId, ActionListener { + + public static final GetConnectorSyncJobAction INSTANCE = new GetConnectorSyncJobAction(); + public static final String NAME = "cluster:admin/xpack/connector/sync_job/get"; + + private GetConnectorSyncJobAction() { + super(NAME, GetConnectorSyncJobAction.Response::new); + } + + public static class Request extends ActionRequest implements ToXContentObject { + private final String connectorSyncJobId; + + private static final ParseField CONNECTOR_ID_FIELD = new ParseField("connector_id"); + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorSyncJobId = in.readString(); + } + + public Request(String connectorSyncJobId) { + this.connectorSyncJobId = connectorSyncJobId; + } + + public String getConnectorSyncJobId() { + return connectorSyncJobId; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorSyncJobId)) { + validationException = addValidationError( + ConnectorSyncJobConstants.EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE, + validationException + ); + } + + return validationException; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorSyncJobId); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorSyncJobId, request.connectorSyncJobId); + } + + @Override + public int hashCode() { + return Objects.hash(connectorSyncJobId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(CONNECTOR_ID_FIELD.getPreferredName(), connectorSyncJobId); + builder.endObject(); + return builder; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "get_connector_sync_job_request", + false, + (args) -> new Request((String) args[0]) + ); + + static { + PARSER.declareString(constructorArg(), CONNECTOR_ID_FIELD); + } + + public static Request parse(XContentParser parser) { + return PARSER.apply(parser, null); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + private final ConnectorSyncJob connectorSyncJob; + + public Response(ConnectorSyncJob connectorSyncJob) { + this.connectorSyncJob = connectorSyncJob; + } + + public Response(StreamInput in) throws IOException { + super(in); + this.connectorSyncJob = new ConnectorSyncJob(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + connectorSyncJob.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return connectorSyncJob.toXContent(builder, params); + } + + public static GetConnectorSyncJobAction.Response fromXContent(XContentParser parser) throws IOException { + return new GetConnectorSyncJobAction.Response(ConnectorSyncJob.fromXContent(parser)); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response response = (Response) o; + return Objects.equals(connectorSyncJob, response.connectorSyncJob); + } + + @Override + public int hashCode() { + return Objects.hash(connectorSyncJob); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestGetConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestGetConnectorSyncJobAction.java new file mode 100644 index 0000000000000..1f5606810757e --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestGetConnectorSyncJobAction.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants.CONNECTOR_SYNC_JOB_ID_PARAM; + +public class RestGetConnectorSyncJobAction extends BaseRestHandler { + @Override + public String getName() { + return "connector_sync_job_get_action"; + } + + @Override + public List routes() { + return List.of( + new Route( + RestRequest.Method.GET, + "/" + EnterpriseSearch.CONNECTOR_SYNC_JOB_API_ENDPOINT + "/{" + CONNECTOR_SYNC_JOB_ID_PARAM + "}" + ) + ); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + GetConnectorSyncJobAction.Request request = new GetConnectorSyncJobAction.Request(restRequest.param(CONNECTOR_SYNC_JOB_ID_PARAM)); + return restChannel -> client.execute(GetConnectorSyncJobAction.INSTANCE, request, new RestToXContentListener<>(restChannel)); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportGetConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportGetConnectorSyncJobAction.java new file mode 100644 index 0000000000000..1024b9953fd09 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportGetConnectorSyncJobAction.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobIndexService; + +public class TransportGetConnectorSyncJobAction extends HandledTransportAction< + GetConnectorSyncJobAction.Request, + GetConnectorSyncJobAction.Response> { + + protected final ConnectorSyncJobIndexService connectorSyncJobIndexService; + + @Inject + public TransportGetConnectorSyncJobAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + GetConnectorSyncJobAction.NAME, + transportService, + actionFilters, + GetConnectorSyncJobAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorSyncJobIndexService = new ConnectorSyncJobIndexService(client); + } + + @Override + protected void doExecute( + Task task, + GetConnectorSyncJobAction.Request request, + ActionListener listener + ) { + connectorSyncJobIndexService.getConnectorSyncJob( + request.getConnectorSyncJobId(), + listener.map(GetConnectorSyncJobAction.Response::new) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java index cadc8b761cbe3..8613078e3074e 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java @@ -80,46 +80,21 @@ public void testCreateConnectorSyncJob() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( connector.getConnectorId() ); - PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); - Map connectorSyncJobSource = getConnectorSyncJobSourceById(response.getId()); - - String id = (String) connectorSyncJobSource.get(ConnectorSyncJob.ID_FIELD.getPreferredName()); - ConnectorSyncJobType requestJobType = syncJobRequest.getJobType(); - ConnectorSyncJobType jobType = ConnectorSyncJobType.fromString( - (String) connectorSyncJobSource.get(ConnectorSyncJob.JOB_TYPE_FIELD.getPreferredName()) - ); - ConnectorSyncJobTriggerMethod requestTriggerMethod = syncJobRequest.getTriggerMethod(); - ConnectorSyncJobTriggerMethod triggerMethod = ConnectorSyncJobTriggerMethod.fromString( - (String) connectorSyncJobSource.get(ConnectorSyncJob.TRIGGER_METHOD_FIELD.getPreferredName()) - ); - - ConnectorSyncStatus initialStatus = ConnectorSyncStatus.fromString( - (String) connectorSyncJobSource.get(ConnectorSyncJob.STATUS_FIELD.getPreferredName()) - ); - - Instant createdNow = Instant.parse((String) connectorSyncJobSource.get(ConnectorSyncJob.CREATED_AT_FIELD.getPreferredName())); - Instant lastSeen = Instant.parse((String) connectorSyncJobSource.get(ConnectorSyncJob.LAST_SEEN_FIELD.getPreferredName())); + PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); - Integer totalDocumentCount = (Integer) connectorSyncJobSource.get(ConnectorSyncJob.TOTAL_DOCUMENT_COUNT_FIELD.getPreferredName()); - Integer indexedDocumentCount = (Integer) connectorSyncJobSource.get( - ConnectorSyncJob.INDEXED_DOCUMENT_COUNT_FIELD.getPreferredName() - ); - Integer indexedDocumentVolume = (Integer) connectorSyncJobSource.get( - ConnectorSyncJob.INDEXED_DOCUMENT_VOLUME_FIELD.getPreferredName() - ); - Integer deletedDocumentCount = (Integer) connectorSyncJobSource.get(ConnectorSyncJob.DELETED_DOCUMENT_COUNT.getPreferredName()); - - assertThat(id, notNullValue()); - assertThat(jobType, equalTo(requestJobType)); - assertThat(triggerMethod, equalTo(requestTriggerMethod)); - assertThat(initialStatus, equalTo(ConnectorSyncJob.DEFAULT_INITIAL_STATUS)); - assertThat(createdNow, equalTo(lastSeen)); - assertThat(totalDocumentCount, equalTo(0)); - assertThat(indexedDocumentCount, equalTo(0)); - assertThat(indexedDocumentVolume, equalTo(0)); - assertThat(deletedDocumentCount, equalTo(0)); + ConnectorSyncJob connectorSyncJob = awaitGetConnectorSyncJob(response.getId()); + + assertThat(connectorSyncJob.getId(), notNullValue()); + assertThat(connectorSyncJob.getJobType(), equalTo(requestJobType)); + assertThat(connectorSyncJob.getTriggerMethod(), equalTo(requestTriggerMethod)); + assertThat(connectorSyncJob.getStatus(), equalTo(ConnectorSyncJob.DEFAULT_INITIAL_STATUS)); + assertThat(connectorSyncJob.getCreatedAt(), equalTo(connectorSyncJob.getLastSeen())); + assertThat(connectorSyncJob.getTotalDocumentCount(), equalTo(0L)); + assertThat(connectorSyncJob.getIndexedDocumentCount(), equalTo(0L)); + assertThat(connectorSyncJob.getIndexedDocumentVolume(), equalTo(0L)); + assertThat(connectorSyncJob.getDeletedDocumentCount(), equalTo(0L)); } public void testCreateConnectorSyncJob_WithMissingJobType_ExpectDefaultJobTypeToBeSet() throws Exception { @@ -130,12 +105,9 @@ public void testCreateConnectorSyncJob_WithMissingJobType_ExpectDefaultJobTypeTo ); PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); - Map connectorSyncJobSource = getConnectorSyncJobSourceById(response.getId()); - ConnectorSyncJobType jobType = ConnectorSyncJobType.fromString( - (String) connectorSyncJobSource.get(ConnectorSyncJob.JOB_TYPE_FIELD.getPreferredName()) - ); + ConnectorSyncJob connectorSyncJob = awaitGetConnectorSyncJob(response.getId()); - assertThat(jobType, equalTo(ConnectorSyncJob.DEFAULT_JOB_TYPE)); + assertThat(connectorSyncJob.getJobType(), equalTo(ConnectorSyncJob.DEFAULT_JOB_TYPE)); } public void testCreateConnectorSyncJob_WithMissingTriggerMethod_ExpectDefaultTriggerMethodToBeSet() throws Exception { @@ -146,12 +118,9 @@ public void testCreateConnectorSyncJob_WithMissingTriggerMethod_ExpectDefaultTri ); PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); - Map connectorSyncJobSource = getConnectorSyncJobSourceById(response.getId()); - ConnectorSyncJobTriggerMethod triggerMethod = ConnectorSyncJobTriggerMethod.fromString( - (String) connectorSyncJobSource.get(ConnectorSyncJob.TRIGGER_METHOD_FIELD.getPreferredName()) - ); + ConnectorSyncJob connectorSyncJob = awaitGetConnectorSyncJob(response.getId()); - assertThat(triggerMethod, equalTo(ConnectorSyncJob.DEFAULT_TRIGGER_METHOD)); + assertThat(connectorSyncJob.getTriggerMethod(), equalTo(ConnectorSyncJob.DEFAULT_TRIGGER_METHOD)); } public void testCreateConnectorSyncJob_WithMissingConnectorId_ExpectException() throws Exception { @@ -184,6 +153,28 @@ public void testDeleteConnectorSyncJob_WithMissingSyncJobId_ExpectException() { expectThrows(ResourceNotFoundException.class, () -> awaitDeleteConnectorSyncJob(NON_EXISTING_SYNC_JOB_ID)); } + public void testGetConnectorSyncJob() throws Exception { + PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( + connector.getConnectorId() + ); + ConnectorSyncJobType jobType = syncJobRequest.getJobType(); + ConnectorSyncJobTriggerMethod triggerMethod = syncJobRequest.getTriggerMethod(); + + PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); + String syncJobId = response.getId(); + + ConnectorSyncJob syncJob = awaitGetConnectorSyncJob(syncJobId); + + assertThat(syncJob.getId(), equalTo(syncJobId)); + assertThat(syncJob.getJobType(), equalTo(jobType)); + assertThat(syncJob.getTriggerMethod(), equalTo(triggerMethod)); + assertThat(syncJob.getConnector().getConnectorId(), equalTo(connector.getConnectorId())); + } + + public void testGetConnectorSyncJob_WithMissingSyncJobId_ExpectException() { + expectThrows(ResourceNotFoundException.class, () -> awaitGetConnectorSyncJob(NON_EXISTING_SYNC_JOB_ID)); + } + public void testCheckInConnectorSyncJob() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( connector.getConnectorId() @@ -346,6 +337,33 @@ private Map getConnectorSyncJobSourceById(String syncJobId) thro return getResponseActionFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS).getSource(); } + private ConnectorSyncJob awaitGetConnectorSyncJob(String connectorSyncJobId) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + + connectorSyncJobIndexService.getConnectorSyncJob(connectorSyncJobId, new ActionListener() { + @Override + public void onResponse(ConnectorSyncJob connectorSyncJob) { + resp.set(connectorSyncJob); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + + assertTrue("Timeout waiting for get request", latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from get request", resp.get()); + return resp.get(); + } + private UpdateResponse awaitCheckInConnectorSyncJob(String connectorSyncJobId) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java index 4fa1b9122284d..9ec404e109496 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.application.connector.syncjob.action.CancelConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.CheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.DeleteConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.GetConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; import java.time.Instant; @@ -100,4 +101,12 @@ public static CancelConnectorSyncJobAction.Request getRandomCancelConnectorSyncJ public static CheckInConnectorSyncJobAction.Request getRandomCheckInConnectorSyncJobActionRequest() { return new CheckInConnectorSyncJobAction.Request(randomAlphaOfLength(10)); } + + public static GetConnectorSyncJobAction.Request getRandomGetConnectorSyncJobRequest() { + return new GetConnectorSyncJobAction.Request(randomAlphaOfLength(10)); + } + + public static GetConnectorSyncJobAction.Response getRandomGetConnectorSyncJobResponse() { + return new GetConnectorSyncJobAction.Response(getRandomConnectorSyncJob()); + } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java index aeecf582c9ec7..ace1138b8e987 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java @@ -7,15 +7,23 @@ package org.elasticsearch.xpack.application.connector.syncjob; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; import org.junit.Before; import java.io.IOException; +import java.time.Instant; import java.util.List; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.notNullValue; public class ConnectorSyncJobTests extends ESTestCase { @@ -35,6 +43,205 @@ public final void testRandomSerialization() throws IOException { } } + public void testFromXContent_WithAllFields_AllSet() throws IOException { + String content = XContentHelper.stripWhitespace(""" + { + "cancelation_requested_at": "2023-12-01T14:19:39.394194Z", + "canceled_at": "2023-12-01T14:19:39.394194Z", + "completed_at": "2023-12-01T14:19:39.394194Z", + "connector": { + "connector_id": "connector-id", + "filtering": [ + { + "active": { + "advanced_snippet": { + "created_at": "2023-12-01T14:18:37.397819Z", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": {} + }, + "rules": [ + { + "created_at": "2023-12-01T14:18:37.397819Z", + "field": "_", + "id": "DEFAULT", + "order": 0, + "policy": "include", + "rule": "regex", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": ".*" + } + ], + "validation": { + "errors": [], + "state": "valid" + } + }, + "domain": "DEFAULT", + "draft": { + "advanced_snippet": { + "created_at": "2023-12-01T14:18:37.397819Z", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": {} + }, + "rules": [ + { + "created_at": "2023-12-01T14:18:37.397819Z", + "field": "_", + "id": "DEFAULT", + "order": 0, + "policy": "include", + "rule": "regex", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": ".*" + } + ], + "validation": { + "errors": [], + "state": "valid" + } + } + } + ], + "index_name": "search-connector", + "language": "english", + "pipeline": { + "extract_binary_content": true, + "name": "ent-search-generic-ingestion", + "reduce_whitespace": true, + "run_ml_inference": false + }, + "service_type": "service type", + "configuration": {} + }, + "created_at": "2023-12-01T14:18:43.07693Z", + "deleted_document_count": 10, + "error": "some-error", + "id": "HIC-JYwB9RqKhB7x_hIE", + "indexed_document_count": 10, + "indexed_document_volume": 10, + "job_type": "full", + "last_seen": "2023-12-01T14:18:43.07693Z", + "metadata": {}, + "started_at": "2023-12-01T14:18:43.07693Z", + "status": "canceling", + "total_document_count": 0, + "trigger_method": "scheduled", + "worker_hostname": "worker-hostname" + } + """); + + ConnectorSyncJob syncJob = ConnectorSyncJob.fromXContentBytes(new BytesArray(content), XContentType.JSON); + + assertThat(syncJob.getCancelationRequestedAt(), equalTo(Instant.parse("2023-12-01T14:19:39.394194Z"))); + assertThat(syncJob.getCanceledAt(), equalTo(Instant.parse("2023-12-01T14:19:39.394194Z"))); + assertThat(syncJob.getCompletedAt(), equalTo(Instant.parse("2023-12-01T14:19:39.394194Z"))); + + assertThat(syncJob.getConnector().getConnectorId(), equalTo("connector-id")); + assertThat(syncJob.getConnector().getFiltering(), hasSize(greaterThan(0))); + assertThat(syncJob.getConnector().getIndexName(), equalTo("search-connector")); + assertThat(syncJob.getConnector().getLanguage(), equalTo("english")); + assertThat(syncJob.getConnector().getPipeline(), notNullValue()); + + assertThat(syncJob.getCreatedAt(), equalTo(Instant.parse("2023-12-01T14:18:43.07693Z"))); + assertThat(syncJob.getDeletedDocumentCount(), equalTo(10L)); + assertThat(syncJob.getError(), equalTo("some-error")); + assertThat(syncJob.getId(), equalTo("HIC-JYwB9RqKhB7x_hIE")); + assertThat(syncJob.getIndexedDocumentCount(), equalTo(10L)); + assertThat(syncJob.getIndexedDocumentVolume(), equalTo(10L)); + assertThat(syncJob.getJobType(), equalTo(ConnectorSyncJobType.FULL)); + assertThat(syncJob.getLastSeen(), equalTo(Instant.parse("2023-12-01T14:18:43.07693Z"))); + assertThat(syncJob.getMetadata(), notNullValue()); + assertThat(syncJob.getStartedAt(), equalTo(Instant.parse("2023-12-01T14:18:43.07693Z"))); + assertThat(syncJob.getStatus(), equalTo(ConnectorSyncStatus.CANCELING)); + assertThat(syncJob.getTotalDocumentCount(), equalTo(0L)); + assertThat(syncJob.getTriggerMethod(), equalTo(ConnectorSyncJobTriggerMethod.SCHEDULED)); + assertThat(syncJob.getWorkerHostname(), equalTo("worker-hostname")); + } + + public void testFromXContent_WithAllNonOptionalFieldsSet_DoesNotThrow() throws IOException { + String content = XContentHelper.stripWhitespace(""" + { + "connector": { + "connector_id": "connector-id", + "filtering": [ + { + "active": { + "advanced_snippet": { + "created_at": "2023-12-01T14:18:37.397819Z", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": {} + }, + "rules": [ + { + "created_at": "2023-12-01T14:18:37.397819Z", + "field": "_", + "id": "DEFAULT", + "order": 0, + "policy": "include", + "rule": "regex", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": ".*" + } + ], + "validation": { + "errors": [], + "state": "valid" + } + }, + "domain": "DEFAULT", + "draft": { + "advanced_snippet": { + "created_at": "2023-12-01T14:18:37.397819Z", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": {} + }, + "rules": [ + { + "created_at": "2023-12-01T14:18:37.397819Z", + "field": "_", + "id": "DEFAULT", + "order": 0, + "policy": "include", + "rule": "regex", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": ".*" + } + ], + "validation": { + "errors": [], + "state": "valid" + } + } + } + ], + "index_name": "search-connector", + "language": "english", + "pipeline": { + "extract_binary_content": true, + "name": "ent-search-generic-ingestion", + "reduce_whitespace": true, + "run_ml_inference": false + }, + "service_type": "service type", + "configuration": {} + }, + "created_at": "2023-12-01T14:18:43.07693Z", + "deleted_document_count": 10, + "id": "HIC-JYwB9RqKhB7x_hIE", + "indexed_document_count": 10, + "indexed_document_volume": 10, + "job_type": "full", + "last_seen": "2023-12-01T14:18:43.07693Z", + "metadata": {}, + "status": "canceling", + "total_document_count": 0, + "trigger_method": "scheduled" + } + """); + + ConnectorSyncJob.fromXContentBytes(new BytesArray(content), XContentType.JSON); + } + private void assertTransportSerialization(ConnectorSyncJob testInstance) throws IOException { ConnectorSyncJob deserializedInstance = copyInstance(testInstance); assertNotSame(testInstance, deserializedInstance); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..c0b7711474a0b --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobActionRequestBWCSerializingTests.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class GetConnectorSyncJobActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase< + GetConnectorSyncJobAction.Request> { + @Override + protected Writeable.Reader instanceReader() { + return GetConnectorSyncJobAction.Request::new; + } + + @Override + protected GetConnectorSyncJobAction.Request createTestInstance() { + return ConnectorSyncJobTestUtils.getRandomGetConnectorSyncJobRequest(); + } + + @Override + protected GetConnectorSyncJobAction.Request mutateInstance(GetConnectorSyncJobAction.Request instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected GetConnectorSyncJobAction.Request doParseInstance(XContentParser parser) throws IOException { + return GetConnectorSyncJobAction.Request.parse(parser); + } + + @Override + protected GetConnectorSyncJobAction.Request mutateInstanceForVersion( + GetConnectorSyncJobAction.Request instance, + TransportVersion version + ) { + return new GetConnectorSyncJobAction.Request(instance.getConnectorSyncJobId()); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobActionResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..00f6e7cf57fc1 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobActionResponseBWCSerializingTests.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; +import java.util.List; + +public class GetConnectorSyncJobActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< + GetConnectorSyncJobAction.Response> { + + @Override + public NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(List.of(new NamedWriteableRegistry.Entry(Connector.class, Connector.NAME, Connector::new))); + } + + @Override + protected Writeable.Reader instanceReader() { + return GetConnectorSyncJobAction.Response::new; + } + + @Override + protected GetConnectorSyncJobAction.Response createTestInstance() { + return ConnectorSyncJobTestUtils.getRandomGetConnectorSyncJobResponse(); + } + + @Override + protected GetConnectorSyncJobAction.Response mutateInstance(GetConnectorSyncJobAction.Response instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected GetConnectorSyncJobAction.Response mutateInstanceForVersion( + GetConnectorSyncJobAction.Response instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobActionTests.java new file mode 100644 index 0000000000000..807f02124f32a --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobActionTests.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class GetConnectorSyncJobActionTests extends ESTestCase { + + public void testValidate_WhenConnectorSyncJobIdIsPresent_ExpectNoValidationError() { + GetConnectorSyncJobAction.Request request = ConnectorSyncJobTestUtils.getRandomGetConnectorSyncJobRequest(); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, nullValue()); + } + + public void testValidate_WhenConnectorSyncJobIdIsEmpty_ExpectValidationError() { + GetConnectorSyncJobAction.Request requestWithMissingConnectorId = new GetConnectorSyncJobAction.Request(""); + ActionRequestValidationException exception = requestWithMissingConnectorId.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString(ConnectorSyncJobConstants.EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE)); + } + +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportGetConnectorSyncJobActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportGetConnectorSyncJobActionTests.java new file mode 100644 index 0000000000000..7b83d008d92bc --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportGetConnectorSyncJobActionTests.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.junit.Before; + +import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.mockito.Mockito.mock; + +public class TransportGetConnectorSyncJobActionTests extends ESSingleNodeTestCase { + + private static final Long TIMEOUT_SECONDS = 10L; + + private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); + private TransportGetConnectorSyncJobAction action; + + @Before + public void setup() { + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + x -> null, + null, + Collections.emptySet() + ); + + action = new TransportGetConnectorSyncJobAction(transportService, clusterService, mock(ActionFilters.class), client()); + } + + @Override + public void tearDown() throws Exception { + super.tearDown(); + ThreadPool.terminate(threadPool, TIMEOUT_SECONDS, TimeUnit.SECONDS); + } + + public void testGetConnectorSyncJob_ExpectNoWarnings() throws InterruptedException { + GetConnectorSyncJobAction.Request request = ConnectorSyncJobTestUtils.getRandomGetConnectorSyncJobRequest(); + + executeRequest(request); + + ensureNoWarnings(); + } + + private void executeRequest(GetConnectorSyncJobAction.Request request) throws InterruptedException { + final CountDownLatch latch = new CountDownLatch(1); + action.doExecute(mock(Task.class), request, ActionListener.wrap(response -> latch.countDown(), exception -> latch.countDown())); + + boolean requestTimedOut = latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + + assertTrue("Timeout waiting for get request", requestTimedOut); + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index ffc894af423cf..3409f549cb579 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -136,6 +136,7 @@ public class Constants { "cluster:admin/xpack/connector/sync_job/post", "cluster:admin/xpack/connector/sync_job/delete", "cluster:admin/xpack/connector/sync_job/check_in", + "cluster:admin/xpack/connector/sync_job/get", "cluster:admin/xpack/connector/sync_job/cancel", "cluster:admin/xpack/deprecation/info", "cluster:admin/xpack/deprecation/nodes/info", From af30fe437ba6ba2f3540aa12249220c9d43cbdfb Mon Sep 17 00:00:00 2001 From: David Kyle Date: Mon, 4 Dec 2023 14:39:19 +0000 Subject: [PATCH 123/181] Check for null before overriding task settings (#102918) --- .../embeddings/OpenAiEmbeddingsModel.java | 5 ++++- .../OpenAiEmbeddingsModelTests.java | 19 +++++++++++++++++++ 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java index 5e2c352d88a01..02c1e41e0374a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java @@ -84,8 +84,11 @@ public ExecutableAction accept(OpenAiActionVisitor creator, Map } public OpenAiEmbeddingsModel overrideWith(Map taskSettings) { - var requestTaskSettings = OpenAiEmbeddingsRequestTaskSettings.fromMap(taskSettings); + if (taskSettings == null || taskSettings.isEmpty()) { + return this; + } + var requestTaskSettings = OpenAiEmbeddingsRequestTaskSettings.fromMap(taskSettings); return new OpenAiEmbeddingsModel(this, getTaskSettings().overrideWith(requestTaskSettings)); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java index 96ced66723f04..62cb609a59d2a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java @@ -14,8 +14,11 @@ import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceSettings; import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; +import java.util.Map; + import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsRequestTaskSettingsTests.getRequestTaskSettingsMap; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.sameInstance; public class OpenAiEmbeddingsModelTests extends ESTestCase { @@ -28,6 +31,22 @@ public void testOverrideWith_OverridesUser() { assertThat(overriddenModel, is(createModel("url", "org", "api_key", "model_name", "user_override"))); } + public void testOverrideWith_EmptyMap() { + var model = createModel("url", "org", "api_key", "model_name", null); + + var requestTaskSettingsMap = Map.of(); + + var overriddenModel = model.overrideWith(requestTaskSettingsMap); + assertThat(overriddenModel, sameInstance(model)); + } + + public void testOverrideWith_NullMap() { + var model = createModel("url", "org", "api_key", "model_name", null); + + var overriddenModel = model.overrideWith(null); + assertThat(overriddenModel, sameInstance(model)); + } + public static OpenAiEmbeddingsModel createModel( String url, @Nullable String org, From 2eff970cdb6c066b4b8b25f8694b8b40a124c580 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20R=C3=BChsen?= Date: Mon, 4 Dec 2023 16:38:53 +0100 Subject: [PATCH 124/181] [Profiling] Improve tests and fix arm64 handling (#102859) Co-authored-by: Elastic Machine --- .../xpack/profiling/GetStackTracesRequest.java | 2 +- .../xpack/profiling/GetStackTracesRequestTests.java | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java index a6680f08f4684..3ab797e4b16ad 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java @@ -89,7 +89,7 @@ public GetStackTracesRequest( this.customCO2PerKWH = customCO2PerKWH; this.customDatacenterPUE = customDatacenterPUE; this.customPerCoreWattX86 = customPerCoreWattX86; - this.customPerCoreWattARM64 = customPerCoreWattX86; + this.customPerCoreWattARM64 = customPerCoreWattARM64; this.customCostPerCoreHour = customCostPerCoreHour; } diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java index bb4973e75eec8..5b6befbe5a2c2 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java @@ -57,7 +57,13 @@ public void testSerialization() throws IOException { try (NamedWriteableAwareStreamInput in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), writableRegistry())) { GetStackTracesRequest deserialized = new GetStackTracesRequest(in); assertEquals(sampleSize, deserialized.getSampleSize()); + assertEquals(requestedDuration, deserialized.getRequestedDuration()); assertEquals(awsCostFactor, deserialized.getAwsCostFactor()); + assertEquals(customCO2PerKWH, deserialized.getCustomCO2PerKWH()); + assertEquals(datacenterPUE, deserialized.getCustomDatacenterPUE()); + assertEquals(perCoreWattX86, deserialized.getCustomPerCoreWattX86()); + assertEquals(perCoreWattARM64, deserialized.getCustomPerCoreWattARM64()); + assertEquals(customCostPerCoreHour, deserialized.getCustomCostPerCoreHour()); assertEquals(query, deserialized.getQuery()); } } From aaadd11b2da7aa7c4f3ca092f42cb8576f63b56f Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Mon, 4 Dec 2023 17:19:00 +0100 Subject: [PATCH 125/181] Fix CI error in DenseVectorFieldMapperTests (#102914) --- .../index/mapper/vectors/DenseVectorFieldMapperTests.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index eca673a6dcf24..c417ec995a20a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -790,7 +790,6 @@ public void testMaxDimsByteVector() throws IOException { assertArrayEquals("Parsed vector is not equal to original.", vector, vectorField.vectorValue()); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102899") public void testVectorSimilarity() { assertEquals( VectorSimilarityFunction.COSINE, @@ -799,7 +798,11 @@ public void testVectorSimilarity() { assertEquals( VectorSimilarityFunction.COSINE, VectorSimilarity.COSINE.vectorSimilarityFunction( - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, DenseVectorFieldMapper.NORMALIZE_COSINE), + IndexVersionUtils.randomVersionBetween( + random(), + IndexVersions.V_8_0_0, + IndexVersionUtils.getPreviousVersion(DenseVectorFieldMapper.NORMALIZE_COSINE) + ), ElementType.FLOAT ) ); From e173b2e6a11cbe83c600e8f90764e5af5f194eda Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 4 Dec 2023 17:43:45 +0100 Subject: [PATCH 126/181] Introduce a StreamOutput that counts how many bytes are written to the stream (#102906) Currently, the `DelayableWriteable` defines a `CountingStreamOutput` that is private to that class. The spatial module would benefit to use a similar implementation to write doc values. Therefore in this PR we propose to move this class to be a first class implementation of the StreamOutput family. This new implementation specialises a few more methods so it should perform better and it is fully tested. --- docs/changelog/102906.yaml | 6 + .../common/geo/SimpleFeatureFactory.java | 9 +- .../io/stream/CountingStreamOutput.java | 88 +++++++++ .../common/io/stream/DelayableWriteable.java | 22 +-- .../common/io/stream/BytesStreamsTests.java | 179 ++++++++++++++---- .../index/fielddata/TriangleTreeWriter.java | 80 ++++---- 6 files changed, 280 insertions(+), 104 deletions(-) create mode 100644 docs/changelog/102906.yaml create mode 100644 server/src/main/java/org/elasticsearch/common/io/stream/CountingStreamOutput.java diff --git a/docs/changelog/102906.yaml b/docs/changelog/102906.yaml new file mode 100644 index 0000000000000..3efaa2db58390 --- /dev/null +++ b/docs/changelog/102906.yaml @@ -0,0 +1,6 @@ +pr: 102906 +summary: Introduce a `StreamOutput` that counts how many bytes are written to the + stream +area: Distributed +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/common/geo/SimpleFeatureFactory.java b/server/src/main/java/org/elasticsearch/common/geo/SimpleFeatureFactory.java index aafef3f04ebc0..a5a8c2d4ed736 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/SimpleFeatureFactory.java +++ b/server/src/main/java/org/elasticsearch/common/geo/SimpleFeatureFactory.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BitUtil; import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.CountingStreamOutput; import org.elasticsearch.geometry.Rectangle; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileUtils; @@ -178,16 +179,14 @@ private static int encodeCommand(int id, int length) { } private static byte[] writeCommands(final int[] commands, final int type, final int length) throws IOException { - try (BytesStreamOutput output = new BytesStreamOutput()) { + try (BytesStreamOutput output = new BytesStreamOutput(); CountingStreamOutput counting = new CountingStreamOutput()) { for (int i = 0; i < length; i++) { - output.writeVInt(commands[i]); + counting.writeVInt(commands[i]); } - final int dataSize = output.size(); - output.reset(); output.writeVInt(24); output.writeVInt(type); output.writeVInt(34); - output.writeVInt(dataSize); + output.writeVInt(Math.toIntExact(counting.size())); for (int i = 0; i < length; i++) { output.writeVInt(commands[i]); } diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/CountingStreamOutput.java b/server/src/main/java/org/elasticsearch/common/io/stream/CountingStreamOutput.java new file mode 100644 index 0000000000000..cc4416444c9ee --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/io/stream/CountingStreamOutput.java @@ -0,0 +1,88 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.io.stream; + +import java.io.IOException; + +/** + * A reusable @link {@link StreamOutput} that just count how many bytes are written. + */ +public class CountingStreamOutput extends StreamOutput { + private long size; + + /** reset the written byes to 0 */ + public void reset() { + size = 0L; + } + + /** returns how many bytes would have been written */ + public long size() { + return size; + } + + @Override + public void writeByte(byte b) { + ++size; + } + + @Override + public void writeBytes(byte[] b, int offset, int length) { + size += length; + } + + @Override + public void writeInt(int i) { + size += Integer.BYTES; + } + + @Override + public void writeIntArray(int[] values) throws IOException { + writeVInt(values.length); + size += (long) values.length * Integer.BYTES; + } + + @Override + public void writeLong(long i) { + size += Long.BYTES; + } + + @Override + public void writeLongArray(long[] values) throws IOException { + writeVInt(values.length); + size += (long) values.length * Long.BYTES; + } + + @Override + public void writeFloat(float v) { + size += Float.BYTES; + } + + @Override + public void writeFloatArray(float[] values) throws IOException { + writeVInt(values.length); + size += (long) values.length * Float.BYTES; + } + + @Override + public void writeDouble(double v) { + size += Double.BYTES; + } + + @Override + public void writeDoubleArray(double[] values) throws IOException { + writeVInt(values.length); + size += (long) values.length * Double.BYTES; + } + + @Override + public void flush() {} + + @Override + public void close() {} +} diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/DelayableWriteable.java b/server/src/main/java/org/elasticsearch/common/io/stream/DelayableWriteable.java index d24a42ef3fcef..4b3683edf7307 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/DelayableWriteable.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/DelayableWriteable.java @@ -216,7 +216,7 @@ public static long getSerializedSize(Writeable ref) { try (CountingStreamOutput out = new CountingStreamOutput()) { out.setTransportVersion(TransportVersion.current()); ref.writeTo(out); - return out.size; + return out.size(); } catch (IOException exc) { throw new UncheckedIOException(exc); } @@ -237,24 +237,4 @@ private static T deserialize( return reader.read(in); } } - - private static class CountingStreamOutput extends StreamOutput { - long size = 0; - - @Override - public void writeByte(byte b) throws IOException { - ++size; - } - - @Override - public void writeBytes(byte[] b, int offset, int length) throws IOException { - size += length; - } - - @Override - public void flush() throws IOException {} - - @Override - public void close() throws IOException {} - } } diff --git a/server/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java b/server/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java index 92e8abbe83e16..0e54a9a49aa00 100644 --- a/server/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java +++ b/server/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java @@ -53,7 +53,7 @@ */ public class BytesStreamsTests extends ESTestCase { public void testEmpty() throws Exception { - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); // test empty stream to array assertEquals(0, out.size()); @@ -63,7 +63,7 @@ public void testEmpty() throws Exception { } public void testSingleByte() throws Exception { - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); assertEquals(0, out.size()); int expectedSize = 1; @@ -78,7 +78,7 @@ public void testSingleByte() throws Exception { } public void testSingleShortPage() throws Exception { - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); int expectedSize = 10; byte[] expectedData = randomizedByteArrayWithSize(expectedSize); @@ -95,7 +95,7 @@ public void testSingleShortPage() throws Exception { } public void testIllegalBulkWrite() throws Exception { - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); // bulk-write with wrong args expectThrows(IndexOutOfBoundsException.class, () -> out.writeBytes(new byte[] {}, 0, 1)); @@ -103,7 +103,7 @@ public void testIllegalBulkWrite() throws Exception { } public void testSingleShortPageBulkWrite() throws Exception { - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); // first bulk-write empty array: should not change anything int expectedSize = 0; @@ -123,7 +123,7 @@ public void testSingleShortPageBulkWrite() throws Exception { } public void testSingleFullPageBulkWrite() throws Exception { - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); int expectedSize = PageCacheRecycler.BYTE_PAGE_SIZE; byte[] expectedData = randomizedByteArrayWithSize(expectedSize); @@ -138,7 +138,7 @@ public void testSingleFullPageBulkWrite() throws Exception { } public void testSingleFullPageBulkWriteWithOffset() throws Exception { - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); int initialOffset = 10; int additionalLength = PageCacheRecycler.BYTE_PAGE_SIZE; @@ -157,7 +157,7 @@ public void testSingleFullPageBulkWriteWithOffset() throws Exception { } public void testSingleFullPageBulkWriteWithOffsetCrossover() throws Exception { - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); int initialOffset = 10; int additionalLength = PageCacheRecycler.BYTE_PAGE_SIZE * 2; @@ -176,7 +176,7 @@ public void testSingleFullPageBulkWriteWithOffsetCrossover() throws Exception { } public void testSingleFullPage() throws Exception { - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); int expectedSize = PageCacheRecycler.BYTE_PAGE_SIZE; byte[] expectedData = randomizedByteArrayWithSize(expectedSize); @@ -193,7 +193,7 @@ public void testSingleFullPage() throws Exception { } public void testOneFullOneShortPage() throws Exception { - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); int expectedSize = PageCacheRecycler.BYTE_PAGE_SIZE + 10; byte[] expectedData = randomizedByteArrayWithSize(expectedSize); @@ -210,7 +210,7 @@ public void testOneFullOneShortPage() throws Exception { } public void testTwoFullOneShortPage() throws Exception { - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); int expectedSize = (PageCacheRecycler.BYTE_PAGE_SIZE * 2) + 1; byte[] expectedData = randomizedByteArrayWithSize(expectedSize); @@ -263,7 +263,7 @@ public void testSkip() throws Exception { public void testSimpleStreams() throws Exception { assumeTrue("requires a 64-bit JRE ... ?!", Constants.JRE_IS_64BIT); - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); out.writeBoolean(false); out.writeByte((byte) 1); out.writeShort((short) -1); @@ -351,7 +351,7 @@ public String toString() { } public void testNamedWriteable() throws IOException { - try (BytesStreamOutput out = new BytesStreamOutput()) { + try (TestStreamOutput out = new TestStreamOutput()) { NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry( Collections.singletonList( new NamedWriteableRegistry.Entry(BaseNamedWriteable.class, TestNamedWriteable.NAME, TestNamedWriteable::new) @@ -385,7 +385,7 @@ public void testNamedWriteableList() throws IOException { expected.add(new TestNamedWriteable(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))); } - try (BytesStreamOutput out = new BytesStreamOutput()) { + try (TestStreamOutput out = new TestStreamOutput()) { out.writeNamedWriteableCollection(expected); try (StreamInput in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), namedWriteableRegistry)) { assertEquals(expected, in.readNamedWriteableCollectionAsList(BaseNamedWriteable.class)); @@ -395,7 +395,7 @@ public void testNamedWriteableList() throws IOException { } public void testNamedWriteableNotSupportedWithoutWrapping() throws IOException { - try (BytesStreamOutput out = new BytesStreamOutput()) { + try (TestStreamOutput out = new TestStreamOutput()) { TestNamedWriteable testNamedWriteable = new TestNamedWriteable("test1", "test2"); out.writeNamedWriteable(testNamedWriteable); StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes())); @@ -405,7 +405,7 @@ public void testNamedWriteableNotSupportedWithoutWrapping() throws IOException { } public void testNamedWriteableReaderReturnsNull() throws IOException { - try (BytesStreamOutput out = new BytesStreamOutput()) { + try (TestStreamOutput out = new TestStreamOutput()) { NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry( Collections.singletonList( new NamedWriteableRegistry.Entry(BaseNamedWriteable.class, TestNamedWriteable.NAME, (StreamInput in) -> null) @@ -426,7 +426,7 @@ public void testNamedWriteableReaderReturnsNull() throws IOException { } public void testOptionalWriteableReaderReturnsNull() throws IOException { - try (BytesStreamOutput out = new BytesStreamOutput()) { + try (TestStreamOutput out = new TestStreamOutput()) { out.writeOptionalWriteable(new TestNamedWriteable(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))); StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes())); AssertionError e = expectThrows(AssertionError.class, () -> in.readOptionalWriteable((StreamInput ignored) -> null)); @@ -435,7 +435,7 @@ public void testOptionalWriteableReaderReturnsNull() throws IOException { } public void testWriteableReaderReturnsWrongName() throws IOException { - try (BytesStreamOutput out = new BytesStreamOutput()) { + try (TestStreamOutput out = new TestStreamOutput()) { NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry( Collections.singletonList( new NamedWriteableRegistry.Entry( @@ -501,7 +501,7 @@ public void testWriteMap() throws IOException { expected.put(randomAlphaOfLength(2), randomAlphaOfLength(5)); } - final BytesStreamOutput out = new BytesStreamOutput(); + final TestStreamOutput out = new TestStreamOutput(); out.writeMap(expected, StreamOutput::writeString, StreamOutput::writeString); final StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes())); final Map loaded = in.readMap(StreamInput::readString, StreamInput::readString); @@ -518,7 +518,7 @@ public void testWriteImmutableMap() throws IOException { } final ImmutableOpenMap expected = expectedBuilder.build(); - final BytesStreamOutput out = new BytesStreamOutput(); + final TestStreamOutput out = new TestStreamOutput(); out.writeMap(expected, StreamOutput::writeString, StreamOutput::writeString); final StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes())); final ImmutableOpenMap loaded = in.readImmutableOpenMap(StreamInput::readString, StreamInput::readString); @@ -534,7 +534,7 @@ public void testWriteImmutableMapOfWritable() throws IOException { } final ImmutableOpenMap expected = expectedBuilder.build(); - final BytesStreamOutput out = new BytesStreamOutput(); + final TestStreamOutput out = new TestStreamOutput(); out.writeMap(expected); final StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes())); final ImmutableOpenMap loaded = in.readImmutableOpenMap(TestWriteable::new, TestWriteable::new); @@ -550,7 +550,7 @@ public void testWriteMapAsList() throws IOException { expected.put("key_" + value, value); } - final BytesStreamOutput out = new BytesStreamOutput(); + final TestStreamOutput out = new TestStreamOutput(); out.writeMapValues(expected, StreamOutput::writeString); final StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes())); final Map loaded = in.readMapValues(StreamInput::readString, value -> "key_" + value); @@ -674,7 +674,7 @@ public void testWriteMapWithConsistentOrder() throws IOException { assertNotEquals(mapKeys, reverseMapKeys); - try (BytesStreamOutput output = new BytesStreamOutput(); BytesStreamOutput reverseMapOutput = new BytesStreamOutput()) { + try (TestStreamOutput output = new TestStreamOutput(); TestStreamOutput reverseMapOutput = new TestStreamOutput()) { output.writeMapWithConsistentOrder(map); reverseMapOutput.writeMapWithConsistentOrder(reverseMap); @@ -689,7 +689,7 @@ public void testReadMapByUsingWriteMapWithConsistentOrder() throws IOException { () -> randomAlphaOfLength(5), () -> randomAlphaOfLength(5) ); - try (BytesStreamOutput streamOut = new BytesStreamOutput()) { + try (TestStreamOutput streamOut = new TestStreamOutput()) { streamOut.writeMapWithConsistentOrder(streamOutMap); StreamInput in = StreamInput.wrap(BytesReference.toBytes(streamOut.bytes())); Map streamInMap = in.readMap(); @@ -698,7 +698,7 @@ public void testReadMapByUsingWriteMapWithConsistentOrder() throws IOException { } public void testWriteMapWithConsistentOrderWithLinkedHashMapShouldThrowAssertError() throws IOException { - try (BytesStreamOutput output = new BytesStreamOutput()) { + try (TestStreamOutput output = new TestStreamOutput()) { Map map = new LinkedHashMap<>(); Throwable e = expectThrows(AssertionError.class, () -> output.writeMapWithConsistentOrder(map)); assertEquals(AssertionError.class, e.getClass()); @@ -715,7 +715,7 @@ public void testWriteRandomStrings() throws IOException { for (int iter = 0; iter < iters; iter++) { List strings = new ArrayList<>(); int numStrings = randomIntBetween(100, 1000); - BytesStreamOutput output = new BytesStreamOutput(0); + TestStreamOutput output = new TestStreamOutput(); for (int i = 0; i < numStrings; i++) { String s = randomRealisticUnicodeOfLengthBetween(0, 2048); strings.add(s); @@ -739,7 +739,7 @@ public void testWriteLargeSurrogateOnlyString() throws IOException { assertEquals(2, deseretLetter.length()); String largeString = IntStream.range(0, 2048).mapToObj(s -> deseretLetter).collect(Collectors.joining("")).trim(); assertEquals("expands to 4 bytes", 4, new BytesRef(deseretLetter).length); - try (BytesStreamOutput output = new BytesStreamOutput(0)) { + try (TestStreamOutput output = new TestStreamOutput()) { output.writeString(largeString); try (StreamInput streamInput = output.bytes().streamInput()) { assertEquals(largeString, streamInput.readString()); @@ -748,7 +748,7 @@ public void testWriteLargeSurrogateOnlyString() throws IOException { } public void testReadTooLargeArraySize() throws IOException { - try (BytesStreamOutput output = new BytesStreamOutput(0)) { + try (TestStreamOutput output = new TestStreamOutput()) { output.writeVInt(10); for (int i = 0; i < 10; i++) { output.writeInt(i); @@ -769,7 +769,7 @@ public void testReadTooLargeArraySize() throws IOException { } public void testReadCorruptedArraySize() throws IOException { - try (BytesStreamOutput output = new BytesStreamOutput(0)) { + try (TestStreamOutput output = new TestStreamOutput()) { output.writeVInt(10); for (int i = 0; i < 10; i++) { output.writeInt(i); @@ -791,7 +791,7 @@ public void testReadCorruptedArraySize() throws IOException { } public void testReadNegativeArraySize() throws IOException { - try (BytesStreamOutput output = new BytesStreamOutput(0)) { + try (TestStreamOutput output = new TestStreamOutput()) { output.writeVInt(10); for (int i = 0; i < 10; i++) { output.writeInt(i); @@ -814,10 +814,10 @@ public void testReadNegativeArraySize() throws IOException { public void testVInt() throws IOException { final int value = randomInt(); - BytesStreamOutput output = new BytesStreamOutput(); + TestStreamOutput output = new TestStreamOutput(); output.writeVInt(value); - BytesStreamOutput simple = new BytesStreamOutput(); + TestStreamOutput simple = new TestStreamOutput(); int i = value; while ((i & ~0x7F) != 0) { simple.writeByte(((byte) ((i & 0x7f) | 0x80))); @@ -834,14 +834,14 @@ public void testVLong() throws IOException { final long value = randomLong(); { // Read works for positive and negative numbers - BytesStreamOutput output = new BytesStreamOutput(); + TestStreamOutput output = new TestStreamOutput(); output.writeVLongNoCheck(value); // Use NoCheck variant so we can write negative numbers StreamInput input = output.bytes().streamInput(); assertEquals(value, input.readVLong()); } if (value < 0) { // Write doesn't work for negative numbers - BytesStreamOutput output = new BytesStreamOutput(); + TestStreamOutput output = new TestStreamOutput(); Exception e = expectThrows(IllegalStateException.class, () -> output.writeVLong(value)); assertEquals("Negative longs unsupported, use writeLong or writeZLong for negative numbers [" + value + "]", e.getMessage()); } @@ -855,7 +855,7 @@ public enum TestEnum { public void testEnum() throws IOException { TestEnum value = randomFrom(TestEnum.values()); - BytesStreamOutput output = new BytesStreamOutput(); + TestStreamOutput output = new TestStreamOutput(); output.writeEnum(value); StreamInput input = output.bytes().streamInput(); assertEquals(value, input.readEnum(TestEnum.class)); @@ -863,7 +863,7 @@ public void testEnum() throws IOException { } public void testInvalidEnum() throws IOException { - BytesStreamOutput output = new BytesStreamOutput(); + TestStreamOutput output = new TestStreamOutput(); int randomNumber = randomInt(); boolean validEnum = randomNumber >= 0 && randomNumber < TestEnum.values().length; output.writeVInt(randomNumber); @@ -878,7 +878,7 @@ public void testInvalidEnum() throws IOException { } private static void assertEqualityAfterSerialize(TimeValue value, int expectedSize) throws IOException { - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); out.writeTimeValue(value); assertEquals(expectedSize, out.size()); @@ -897,8 +897,111 @@ public void testTimeValueSerialize() throws Exception { assertEqualityAfterSerialize(TimeValue.timeValueSeconds(30), 2); final TimeValue timeValue = new TimeValue(randomIntBetween(0, 1024), randomFrom(TimeUnit.values())); - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); out.writeZLong(timeValue.duration()); assertEqualityAfterSerialize(timeValue, 1 + out.bytes().length()); } + + private static class TestStreamOutput extends BytesStream { + + private final BytesStreamOutput output = new BytesStreamOutput(); + private final CountingStreamOutput counting = new CountingStreamOutput(); + + @Override + public void writeByte(byte b) { + output.writeByte(b); + counting.writeByte(b); + assertThat((long) output.size(), equalTo(counting.size())); + } + + @Override + public void writeBytes(byte[] b, int offset, int length) { + output.writeBytes(b, offset, length); + counting.writeBytes(b, offset, length); + assertThat((long) output.size(), equalTo(counting.size())); + } + + @Override + public void writeInt(int i) throws IOException { + output.writeInt(i); + counting.writeInt(i); + assertThat((long) output.size(), equalTo(counting.size())); + } + + @Override + public void writeIntArray(int[] values) throws IOException { + output.writeIntArray(values); + counting.writeIntArray(values); + assertThat((long) output.size(), equalTo(counting.size())); + } + + @Override + public void writeLong(long i) throws IOException { + output.writeLong(i); + counting.writeLong(i); + assertThat((long) output.size(), equalTo(counting.size())); + } + + @Override + public void writeLongArray(long[] values) throws IOException { + output.writeLongArray(values); + counting.writeLongArray(values); + assertThat((long) output.size(), equalTo(counting.size())); + } + + @Override + public void writeFloat(float v) throws IOException { + output.writeFloat(v); + counting.writeFloat(v); + assertThat((long) output.size(), equalTo(counting.size())); + } + + @Override + public void writeFloatArray(float[] values) throws IOException { + output.writeFloatArray(values); + counting.writeFloatArray(values); + assertThat((long) output.size(), equalTo(counting.size())); + } + + @Override + public void writeDouble(double v) throws IOException { + output.writeDouble(v); + counting.writeDouble(v); + assertThat((long) output.size(), equalTo(counting.size())); + } + + @Override + public void writeDoubleArray(double[] values) throws IOException { + output.writeDoubleArray(values); + counting.writeDoubleArray(values); + assertThat((long) output.size(), equalTo(counting.size())); + } + + @Override + public BytesReference bytes() { + BytesReference bytesReference = output.bytes(); + assertThat((long) bytesReference.length(), equalTo(counting.size())); + return bytesReference; + } + + public int size() { + int size = output.size(); + assertThat((long) size, equalTo(counting.size())); + return size; + } + + @Override + public void flush() { + output.flush(); + counting.flush(); + assertThat((long) output.size(), equalTo(counting.size())); + } + + @Override + public void close() { + assertThat((long) output.size(), equalTo(counting.size())); + output.close(); + counting.close(); + } + } } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeWriter.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeWriter.java index 9e1ebd8a75b43..a69f0f6d73365 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeWriter.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeWriter.java @@ -11,7 +11,7 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.CountingStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; @@ -124,31 +124,31 @@ private TriangleTreeNode(ShapeField.DecodedTriangle component) { } private void writeTo(StreamOutput out) throws IOException { - BytesStreamOutput scratchBuffer = new BytesStreamOutput(); + CountingStreamOutput countingBuffer = new CountingStreamOutput(); writeMetadata(out); writeComponent(out); if (left != null) { - left.writeNode(out, maxX, maxY, scratchBuffer); + left.writeNode(out, maxX, maxY, countingBuffer); } if (right != null) { - right.writeNode(out, maxX, maxY, scratchBuffer); + right.writeNode(out, maxX, maxY, countingBuffer); } } - private void writeNode(StreamOutput out, int parentMaxX, int parentMaxY, BytesStreamOutput scratchBuffer) throws IOException { + private void writeNode(StreamOutput out, int parentMaxX, int parentMaxY, CountingStreamOutput countingBuffer) throws IOException { out.writeVLong((long) parentMaxX - maxX); out.writeVLong((long) parentMaxY - maxY); - int size = nodeSize(false, parentMaxX, parentMaxY, scratchBuffer); - out.writeVInt(size); + long size = nodeSize(false, parentMaxX, parentMaxY, countingBuffer); + out.writeVInt(Math.toIntExact(size)); writeMetadata(out); writeComponent(out); if (left != null) { - left.writeNode(out, maxX, maxY, scratchBuffer); + left.writeNode(out, maxX, maxY, countingBuffer); } if (right != null) { - int rightSize = right.nodeSize(true, maxX, maxY, scratchBuffer); - out.writeVInt(rightSize); - right.writeNode(out, maxX, maxY, scratchBuffer); + long rightSize = right.nodeSize(true, maxX, maxY, countingBuffer); + out.writeVInt(Math.toIntExact(rightSize)); + right.writeNode(out, maxX, maxY, countingBuffer); } } @@ -184,50 +184,50 @@ private void writeComponent(StreamOutput out) throws IOException { out.writeVLong((long) maxY - component.cY); } - private int nodeSize(boolean includeBox, int parentMaxX, int parentMaxY, BytesStreamOutput scratchBuffer) throws IOException { - int size = 0; + private long nodeSize(boolean includeBox, int parentMaxX, int parentMaxY, CountingStreamOutput countingBuffer) throws IOException { + long size = 0; size++; // metadata - size += componentSize(scratchBuffer); + size += componentSize(countingBuffer); if (left != null) { - size += left.nodeSize(true, maxX, maxY, scratchBuffer); + size += left.nodeSize(true, maxX, maxY, countingBuffer); } if (right != null) { - int rightSize = right.nodeSize(true, maxX, maxY, scratchBuffer); - scratchBuffer.reset(); - scratchBuffer.writeVLong(rightSize); - size += scratchBuffer.size(); // jump size + long rightSize = right.nodeSize(true, maxX, maxY, countingBuffer); + countingBuffer.reset(); + countingBuffer.writeVLong(rightSize); + size += countingBuffer.size(); // jump size size += rightSize; } if (includeBox) { - int jumpSize = size; - scratchBuffer.reset(); - scratchBuffer.writeVLong((long) parentMaxX - maxX); - scratchBuffer.writeVLong((long) parentMaxY - maxY); - scratchBuffer.writeVLong(jumpSize); - size += scratchBuffer.size(); // box size + long jumpSize = size; + countingBuffer.reset(); + countingBuffer.writeVLong((long) parentMaxX - maxX); + countingBuffer.writeVLong((long) parentMaxY - maxY); + countingBuffer.writeVLong(jumpSize); + size += countingBuffer.size(); // box size } return size; } - private int componentSize(BytesStreamOutput scratchBuffer) throws IOException { - scratchBuffer.reset(); + private long componentSize(CountingStreamOutput countingBuffer) throws IOException { + countingBuffer.reset(); if (component.type == ShapeField.DecodedTriangle.TYPE.POINT) { - scratchBuffer.writeVLong((long) maxX - component.aX); - scratchBuffer.writeVLong((long) maxY - component.aY); + countingBuffer.writeVLong((long) maxX - component.aX); + countingBuffer.writeVLong((long) maxY - component.aY); } else if (component.type == ShapeField.DecodedTriangle.TYPE.LINE) { - scratchBuffer.writeVLong((long) maxX - component.aX); - scratchBuffer.writeVLong((long) maxY - component.aY); - scratchBuffer.writeVLong((long) maxX - component.bX); - scratchBuffer.writeVLong((long) maxY - component.bY); + countingBuffer.writeVLong((long) maxX - component.aX); + countingBuffer.writeVLong((long) maxY - component.aY); + countingBuffer.writeVLong((long) maxX - component.bX); + countingBuffer.writeVLong((long) maxY - component.bY); } else { - scratchBuffer.writeVLong((long) maxX - component.aX); - scratchBuffer.writeVLong((long) maxY - component.aY); - scratchBuffer.writeVLong((long) maxX - component.bX); - scratchBuffer.writeVLong((long) maxY - component.bY); - scratchBuffer.writeVLong((long) maxX - component.cX); - scratchBuffer.writeVLong((long) maxY - component.cY); + countingBuffer.writeVLong((long) maxX - component.aX); + countingBuffer.writeVLong((long) maxY - component.aY); + countingBuffer.writeVLong((long) maxX - component.bX); + countingBuffer.writeVLong((long) maxY - component.bY); + countingBuffer.writeVLong((long) maxX - component.cX); + countingBuffer.writeVLong((long) maxY - component.cY); } - return Math.toIntExact(scratchBuffer.size()); + return countingBuffer.size(); } } } From 4c2f23f30feb065f7bbd901f1774512def777038 Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Mon, 4 Dec 2023 12:35:23 -0500 Subject: [PATCH 127/181] [ML] Correct ML exceptions to use RestStatus (#102781) * added exception status for JobResultsProvider * InferenceRunner - Throw statusException if a statusException is caught * TransportDeleteForecastAction - throw if caught status exception * ExpiredForecastsRemover - replaced failed search with status exception; added throw if status exception * ExpiredResultsRemover - throw if statusException * ExpiredModelSnapshotsRemover replaced exception with too many requests * ExpiredAnnotationsRemover too many requests * ProcessContext changed to statusException with too many requests for tryLock failure * ChunkedTrainedModelRestorer - Changed to status exception with too many requests * updating exceptions to have status codes in ml code for handling action failures * add status ioException in MachineLearning.java * changed exceptions caused by incomplete upgrades to include status RequestTimeout(408) * updated handling of exception collections * Added too_many_requests for ElasticsearchMappings * Added Request_Timeout status for failed datafeed job cleanup in TransportPutJobAction * Added RequstTimeout status for failed query parsing in DataFrameAnalyticsSource * Added InternalServerError status for negative pipeline count in GetTrainedMdoelStatsAction * removed assertion in ExceptionCollectionHandling --- .../dataframe/DataFrameAnalyticsSource.java | 9 ++- .../persistence/ElasticsearchMappings.java | 8 ++- ...rtCancelJobModelSnapshotUpgradeAction.java | 8 ++- .../ml/action/TransportCloseJobAction.java | 9 +-- .../action/TransportDeleteForecastAction.java | 12 +++- .../ml/action/TransportPutJobAction.java | 6 +- ...TransportStopDataFrameAnalyticsAction.java | 9 +-- .../action/TransportStopDatafeedAction.java | 9 +-- ...ransportUpgradeJobModelSnapshotAction.java | 6 +- .../CategorizeTextAggregationBuilder.java | 13 ++-- .../InternalCategorizationAggregation.java | 23 ++++--- .../datafeed/DatafeedConfigAutoUpdater.java | 19 +++++- .../dataframe/inference/InferenceRunner.java | 12 ++-- .../ChunkedTrainedModelRestorer.java | 6 +- .../job/persistence/JobResultsProvider.java | 24 +++++-- .../process/autodetect/ProcessContext.java | 5 +- .../retention/ExpiredAnnotationsRemover.java | 11 +++- .../retention/ExpiredForecastsRemover.java | 26 +++++++- .../ExpiredModelSnapshotsRemover.java | 12 +++- .../job/retention/ExpiredResultsRemover.java | 20 +++++- .../ml/utils/ExceptionCollectionHandling.java | 64 +++++++++++++++++++ .../persistence/ResultsPersisterService.java | 7 +- 22 files changed, 252 insertions(+), 66 deletions(-) create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/ExceptionCollectionHandling.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsSource.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsSource.java index e6b263abd0f01..9c326f067caf7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsSource.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsSource.java @@ -6,7 +6,7 @@ */ package org.elasticsearch.xpack.core.ml.dataframe; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -14,6 +14,7 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -171,7 +172,11 @@ public QueryBuilder getParsedQuery() { if (exception instanceof RuntimeException runtimeException) { throw runtimeException; } else { - throw new ElasticsearchException(queryProvider.getParsingException()); + throw new ElasticsearchStatusException( + queryProvider.getParsingException().getMessage(), + RestStatus.BAD_REQUEST, + queryProvider.getParsingException() + ); } } return queryProvider.getParsedQuery(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java index 1dde9dc6075d0..4187762ca58c6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction; @@ -23,6 +23,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.plugins.MapperPlugin; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transports; import org.elasticsearch.xcontent.XContentType; @@ -189,10 +190,11 @@ protected void doRun() throws Exception { listener.onResponse(true); } else { listener.onFailure( - new ElasticsearchException( + new ElasticsearchStatusException( "Attempt to put missing mapping in indices " + Arrays.toString(indicesThatRequireAnUpdate) - + " was not acknowledged" + + " was not acknowledged", + RestStatus.TOO_MANY_REQUESTS ) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCancelJobModelSnapshotUpgradeAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCancelJobModelSnapshotUpgradeAction.java index aa28135787b5d..2dcb9c5dfe705 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCancelJobModelSnapshotUpgradeAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCancelJobModelSnapshotUpgradeAction.java @@ -9,7 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; @@ -37,6 +37,8 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; +import static org.elasticsearch.xpack.ml.utils.ExceptionCollectionHandling.exceptionArrayToStatusException; + public class TransportCancelJobModelSnapshotUpgradeAction extends HandledTransportAction { private static final Logger logger = LogManager.getLogger(TransportCancelJobModelSnapshotUpgradeAction.class); @@ -134,11 +136,11 @@ private void sendResponseOrFailure(ActionListener listener, AtomicArra + request.getJobId() + "]. Total failures [" + caughtExceptions.size() - + "], rethrowing first, all Exceptions: [" + + "], rethrowing first. All Exceptions: [" + caughtExceptions.stream().map(Exception::getMessage).collect(Collectors.joining(", ")) + "]"; - ElasticsearchException e = new ElasticsearchException(msg, caughtExceptions.get(0)); + ElasticsearchStatusException e = exceptionArrayToStatusException(failures, msg); listener.onFailure(e); } }); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java index 53f6c19ce43f1..7b561ccaede2d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; @@ -63,6 +63,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.ml.utils.ExceptionCollectionHandling.exceptionArrayToStatusException; public class TransportCloseJobAction extends TransportTasksAction< JobTask, @@ -537,7 +538,7 @@ private static void sendResponseOrFailure( AtomicArray failures ) { List caughtExceptions = failures.asList(); - if (caughtExceptions.size() == 0) { + if (caughtExceptions.isEmpty()) { listener.onResponse(new CloseJobAction.Response(true)); return; } @@ -546,11 +547,11 @@ private static void sendResponseOrFailure( + jobId + "] with [" + caughtExceptions.size() - + "] failures, rethrowing last, all Exceptions: [" + + "] failures, rethrowing first. All Exceptions: [" + caughtExceptions.stream().map(Exception::getMessage).collect(Collectors.joining(", ")) + "]"; - ElasticsearchException e = new ElasticsearchException(msg, caughtExceptions.get(0)); + ElasticsearchStatusException e = exceptionArrayToStatusException(failures, msg); listener.onFailure(e); } }); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteForecastAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteForecastAction.java index 5aa85a6331c22..495d75b2de2cd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteForecastAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteForecastAction.java @@ -249,7 +249,17 @@ private static void handleFailure(Exception e, DeleteForecastAction.Request requ ); } } else { - listener.onFailure(new ElasticsearchException("An error occurred while searching forecasts to delete", e)); + if (e instanceof ElasticsearchException elasticsearchException) { + listener.onFailure( + new ElasticsearchStatusException( + "An error occurred while searching forecasts to delete", + elasticsearchException.status(), + elasticsearchException + ) + ); + } else { + listener.onFailure(new ElasticsearchException("An error occurred while searching forecasts to delete", e)); + } } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutJobAction.java index ebe766f6b5669..767ec08078b42 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutJobAction.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; @@ -23,6 +23,7 @@ import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -120,8 +121,9 @@ protected void masterOperation( () -> format("[%s] failed to cleanup job after datafeed creation failure", request.getJobBuilder().getId()), deleteFailed ); - ElasticsearchException ex = new ElasticsearchException( + ElasticsearchStatusException ex = new ElasticsearchStatusException( "failed to cleanup job after datafeed creation failure", + RestStatus.REQUEST_TIMEOUT, failed ); ex.addSuppressed(deleteFailed); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDataFrameAnalyticsAction.java index c3d35fbc11593..42d36006acbde 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDataFrameAnalyticsAction.java @@ -8,7 +8,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; @@ -58,6 +57,8 @@ import java.util.stream.Collectors; import java.util.stream.Stream; +import static org.elasticsearch.xpack.ml.utils.ExceptionCollectionHandling.exceptionArrayToStatusException; + /** * Stops the persistent task for running data frame analytics. */ @@ -297,7 +298,7 @@ private static void sendResponseOrFailure( AtomicArray failures ) { List caughtExceptions = failures.asList(); - if (caughtExceptions.size() == 0) { + if (caughtExceptions.isEmpty()) { listener.onResponse(new StopDataFrameAnalyticsAction.Response(true)); return; } @@ -306,11 +307,11 @@ private static void sendResponseOrFailure( + analyticsId + "] with [" + caughtExceptions.size() - + "] failures, rethrowing last, all Exceptions: [" + + "] failures, rethrowing first. All Exceptions: [" + caughtExceptions.stream().map(Exception::getMessage).collect(Collectors.joining(", ")) + "]"; - ElasticsearchException e = new ElasticsearchException(msg, caughtExceptions.get(0)); + ElasticsearchStatusException e = exceptionArrayToStatusException(failures, msg); listener.onFailure(e); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java index 2c9668a504b55..41359f5fcc166 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; @@ -58,6 +58,7 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ml.utils.ExceptionCollectionHandling.exceptionArrayToStatusException; public class TransportStopDatafeedAction extends TransportTasksAction< TransportStartDatafeedAction.DatafeedTask, @@ -462,7 +463,7 @@ private static void sendResponseOrFailure( AtomicArray failures ) { List caughtExceptions = failures.asList(); - if (caughtExceptions.size() == 0) { + if (caughtExceptions.isEmpty()) { listener.onResponse(new StopDatafeedAction.Response(true)); return; } @@ -471,11 +472,11 @@ private static void sendResponseOrFailure( + datafeedId + "] with [" + caughtExceptions.size() - + "] failures, rethrowing last, all Exceptions: [" + + "] failures, rethrowing first. All Exceptions: [" + caughtExceptions.stream().map(Exception::getMessage).collect(Collectors.joining(", ")) + "]"; - ElasticsearchException e = new ElasticsearchException(msg, caughtExceptions.get(0)); + ElasticsearchStatusException e = exceptionArrayToStatusException(failures, msg); listener.onFailure(e); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpgradeJobModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpgradeJobModelSnapshotAction.java index 6335e0b78bd83..3f6193c124a9a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpgradeJobModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpgradeJobModelSnapshotAction.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; @@ -28,6 +28,7 @@ import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.persistent.PersistentTasksCustomMetadata.PersistentTask; import org.elasticsearch.persistent.PersistentTasksService; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -270,8 +271,9 @@ public void onFailure(Exception e) { @Override public void onTimeout(TimeValue timeout) { listener.onFailure( - new ElasticsearchException( + new ElasticsearchStatusException( "snapshot upgrader request [{}] [{}] timed out after [{}]", + RestStatus.REQUEST_TIMEOUT, params.getJobId(), params.getSnapshotId(), timeout diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregationBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregationBuilder.java index 8df56d9df9c2f..6fce8aa20ed16 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregationBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregationBuilder.java @@ -7,11 +7,12 @@ package org.elasticsearch.xpack.ml.aggs.categorization; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -123,12 +124,13 @@ public CategorizeTextAggregationBuilder(StreamInput in) throws IOException { super(in); // Disallow this aggregation in mixed version clusters that cross the algorithm change boundary. if (in.getTransportVersion().before(ALGORITHM_CHANGED_VERSION)) { - throw new ElasticsearchException( + throw new ElasticsearchStatusException( "[" + NAME + "] aggregation cannot be used in a cluster where some nodes have version [" + ALGORITHM_CHANGED_VERSION - + "] or higher and others have a version before this" + + "] or higher and others have a version before this", + RestStatus.BAD_REQUEST ); } this.bucketCountThresholds = new TermsAggregator.BucketCountThresholds(in); @@ -279,12 +281,13 @@ protected CategorizeTextAggregationBuilder( protected void doWriteTo(StreamOutput out) throws IOException { // Disallow this aggregation in mixed version clusters that cross the algorithm change boundary. if (out.getTransportVersion().before(ALGORITHM_CHANGED_VERSION)) { - throw new ElasticsearchException( + throw new ElasticsearchStatusException( "[" + NAME + "] aggregation cannot be used in a cluster where some nodes have version [" + ALGORITHM_CHANGED_VERSION - + "] or higher and others have a version before this" + + "] or higher and others have a version before this", + RestStatus.BAD_REQUEST ); } bucketCountThresholds.writeTo(out); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java index 0ed673ac5a365..9b337d559854a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java @@ -8,10 +8,11 @@ package org.elasticsearch.xpack.ml.aggs.categorization; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.BytesRefHash; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -109,12 +110,13 @@ public Bucket(SerializableTokenListCategory serializableCategory, long bucketOrd public Bucket(StreamInput in) throws IOException { // Disallow this aggregation in mixed version clusters that cross the algorithm change boundary. if (in.getTransportVersion().before(CategorizeTextAggregationBuilder.ALGORITHM_CHANGED_VERSION)) { - throw new ElasticsearchException( + throw new ElasticsearchStatusException( "[" + CategorizeTextAggregationBuilder.NAME + "] aggregation cannot be used in a cluster where some nodes have version [" + CategorizeTextAggregationBuilder.ALGORITHM_CHANGED_VERSION - + "] or higher and others have a version before this" + + "] or higher and others have a version before this", + RestStatus.BAD_REQUEST ); } serializableCategory = new SerializableTokenListCategory(in); @@ -127,12 +129,13 @@ public Bucket(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { // Disallow this aggregation in mixed version clusters that cross the algorithm change boundary. if (out.getTransportVersion().before(CategorizeTextAggregationBuilder.ALGORITHM_CHANGED_VERSION)) { - throw new ElasticsearchException( + throw new ElasticsearchStatusException( "[" + CategorizeTextAggregationBuilder.NAME + "] aggregation cannot be used in a cluster where some nodes have version [" + CategorizeTextAggregationBuilder.ALGORITHM_CHANGED_VERSION - + "] or higher and others have a version before this" + + "] or higher and others have a version before this", + RestStatus.BAD_REQUEST ); } serializableCategory.writeTo(out); @@ -239,12 +242,13 @@ public InternalCategorizationAggregation(StreamInput in) throws IOException { super(in); // Disallow this aggregation in mixed version clusters that cross the algorithm change boundary. if (in.getTransportVersion().before(CategorizeTextAggregationBuilder.ALGORITHM_CHANGED_VERSION)) { - throw new ElasticsearchException( + throw new ElasticsearchStatusException( "[" + CategorizeTextAggregationBuilder.NAME + "] aggregation cannot be used in a cluster where some nodes have version [" + CategorizeTextAggregationBuilder.ALGORITHM_CHANGED_VERSION - + "] or higher and others have a version before this" + + "] or higher and others have a version before this", + RestStatus.BAD_REQUEST ); } this.similarityThreshold = in.readVInt(); @@ -257,12 +261,13 @@ public InternalCategorizationAggregation(StreamInput in) throws IOException { protected void doWriteTo(StreamOutput out) throws IOException { // Disallow this aggregation in mixed version clusters that cross the algorithm change boundary. if (out.getTransportVersion().before(CategorizeTextAggregationBuilder.ALGORITHM_CHANGED_VERSION)) { - throw new ElasticsearchException( + throw new ElasticsearchStatusException( "[" + CategorizeTextAggregationBuilder.NAME + "] aggregation cannot be used in a cluster where some nodes have version [" + CategorizeTextAggregationBuilder.ALGORITHM_CHANGED_VERSION - + "] or higher and others have a version before this" + + "] or higher and others have a version before this", + RestStatus.BAD_REQUEST ); } out.writeVInt(similarityThreshold); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdater.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdater.java index 330327dc31a46..e61ffba9b3164 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdater.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdater.java @@ -3,6 +3,8 @@ * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. + * + * This file has been contributed to be a Generative AI */ package org.elasticsearch.xpack.ml.datafeed; @@ -10,6 +12,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.support.IndicesOptions; @@ -17,6 +20,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.ml.MlConfigIndex; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedUpdate; @@ -109,7 +113,20 @@ public void runUpdate() { logger.debug(() -> "[" + update.getId() + "] datafeed successfully updated"); } catch (Exception ex) { logger.warn(() -> "[" + update.getId() + "] failed being updated", ex); - failures.add(new ElasticsearchException("Failed to update datafeed {}", ex, update.getId())); + if (ex instanceof ElasticsearchException elasticsearchException) { + failures.add( + new ElasticsearchStatusException( + "Failed to update datafeed {}", + elasticsearchException.status(), + elasticsearchException, + update.getId() + ) + ); + } else { + failures.add( + new ElasticsearchStatusException("Failed to update datafeed {}", RestStatus.REQUEST_TIMEOUT, ex, update.getId()) + ); + } } } if (failures.isEmpty()) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java index 168b0deda87d4..cc59903436e2f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.index.IndexRequest; @@ -115,15 +116,14 @@ public void run(String modelId) { } } catch (Exception e) { LOGGER.error(() -> format("[%s] Error running inference on model [%s]", config.getId(), modelId), e); - - if (e instanceof ElasticsearchException) { - Throwable rootCause = ((ElasticsearchException) e).getRootCause(); - throw new ElasticsearchException( + if (e instanceof ElasticsearchException elasticsearchException) { + throw new ElasticsearchStatusException( "[{}] failed running inference on model [{}]; cause was [{}]", - rootCause, + elasticsearchException.status(), + elasticsearchException.getRootCause(), config.getId(), modelId, - rootCause.getMessage() + elasticsearchException.getRootCause().getMessage() ); } throw ExceptionsHelper.serverError( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java index 015b88552a1d0..3ace40e0deb6b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java @@ -9,7 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequest; @@ -24,6 +24,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; @@ -238,13 +239,14 @@ static SearchResponse retryingSearch(Client client, String modelId, SearchReques * This intentionally prevents that code from attempting to retry loading the entire model. If the retry logic here * fails after the set retries we should not retry loading the entire model to avoid additional strain on the cluster. */ - throw new ElasticsearchException( + throw new ElasticsearchStatusException( format( "loading model [%s] failed after [%s] retries. The deployment is now in a failed state, " + "the error may be transient please stop the deployment and restart", modelId, retries ), + RestStatus.TOO_MANY_REQUESTS, e ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java index 5f796242e5bf8..d309ee2e5dc95 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java @@ -635,13 +635,19 @@ public void datafeedTimingStats( int unavailableShards = searchResponse.getTotalShards() - searchResponse.getSuccessfulShards(); if (CollectionUtils.isEmpty(shardFailures) == false) { LOGGER.error("[{}] Search request returned shard failures: {}", jobId, Arrays.toString(shardFailures)); - listener.onFailure(new ElasticsearchException(ExceptionsHelper.shardFailuresToErrorMsg(jobId, shardFailures))); + listener.onFailure( + new ElasticsearchStatusException( + ExceptionsHelper.shardFailuresToErrorMsg(jobId, shardFailures), + RestStatus.TOO_MANY_REQUESTS + ) + ); return; } if (unavailableShards > 0) { listener.onFailure( - new ElasticsearchException( - "[" + jobId + "] Search request encountered [" + unavailableShards + "] unavailable shards" + new ElasticsearchStatusException( + "[" + jobId + "] Search request encountered [" + unavailableShards + "] unavailable shards", + RestStatus.TOO_MANY_REQUESTS ) ); return; @@ -739,13 +745,19 @@ public void getAutodetectParams(Job job, String snapshotId, Consumer 0) { errorHandler.accept( - new ElasticsearchException( - "[" + jobId + "] Search request encountered [" + unavailableShards + "] unavailable shards" + new ElasticsearchStatusException( + "[" + jobId + "] Search request encountered [" + unavailableShards + "] unavailable shards", + RestStatus.TOO_MANY_REQUESTS ) ); return; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/ProcessContext.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/ProcessContext.java index 49b02bdd6ae74..f124deecd9914 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/ProcessContext.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/ProcessContext.java @@ -8,7 +8,8 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.job.task.JobTask; @@ -61,7 +62,7 @@ void tryLock() { throw ExceptionsHelper.serverError("Failed to acquire process lock for job [" + jobTask.getJobId() + "]"); } } catch (InterruptedException e) { - throw new ElasticsearchException(e); + throw new ElasticsearchStatusException(e.getMessage(), RestStatus.TOO_MANY_REQUESTS, e); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemover.java index 427b7c9defa5a..917d5881ae130 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemover.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ThreadedActionListener; import org.elasticsearch.client.internal.OriginSettingClient; @@ -19,6 +19,7 @@ import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.annotations.Annotation; @@ -100,7 +101,13 @@ public void onResponse(BulkByScrollResponse bulkByScrollResponse) { @Override public void onFailure(Exception e) { - listener.onFailure(new ElasticsearchException("Failed to remove expired annotations for job [" + job.getId() + "]", e)); + listener.onFailure( + new ElasticsearchStatusException( + "Failed to remove expired annotations for job [" + job.getId() + "]", + RestStatus.TOO_MANY_REQUESTS, + e + ) + ); } }); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java index ed4e6875e260a..424668a20bf05 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -22,6 +23,7 @@ import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -75,7 +77,15 @@ public void remove(float requestsPerSec, ActionListener listener, Boole LOGGER.debug("Removing forecasts that expire before [{}]", cutoffEpochMs); ActionListener forecastStatsHandler = ActionListener.wrap( searchResponse -> deleteForecasts(searchResponse, requestsPerSec, listener, isTimedOutSupplier), - e -> listener.onFailure(new ElasticsearchException("An error occurred while searching forecasts to delete", e)) + e -> { + listener.onFailure( + new ElasticsearchStatusException( + "An error occurred while searching forecasts to delete", + RestStatus.TOO_MANY_REQUESTS, + e + ) + ); + } ); SearchSourceBuilder source = new SearchSourceBuilder(); @@ -143,7 +153,19 @@ public void onResponse(BulkByScrollResponse bulkByScrollResponse) { @Override public void onFailure(Exception e) { - listener.onFailure(new ElasticsearchException("Failed to remove expired forecasts", e)); + if (e instanceof ElasticsearchException elasticsearchException) { + listener.onFailure( + new ElasticsearchException( + "Failed to remove expired forecasts", + elasticsearchException.status(), + elasticsearchException + ) + ); + } else { + listener.onFailure( + new ElasticsearchStatusException("Failed to remove expired forecasts", RestStatus.TOO_MANY_REQUESTS, e) + ); + } } }); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java index 1854e3b752de3..507e9dac6282d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ThreadedActionListener; @@ -16,6 +16,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; @@ -230,7 +231,14 @@ public void onResponse(QueryPage searchResponse) { @Override public void onFailure(Exception e) { - listener.onFailure(new ElasticsearchException("[{}] Search for expired snapshots failed", e, job.getId())); + listener.onFailure( + new ElasticsearchStatusException( + "[{}] Search for expired snapshots failed", + RestStatus.TOO_MANY_REQUESTS, + e, + job.getId() + ) + ); } }; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java index 537297d130789..db712def11eac 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ThreadedActionListener; @@ -22,6 +23,7 @@ import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; @@ -115,7 +117,23 @@ public void onResponse(BulkByScrollResponse bulkByScrollResponse) { @Override public void onFailure(Exception e) { - listener.onFailure(new ElasticsearchException("Failed to remove expired results for job [" + job.getId() + "]", e)); + if (e instanceof ElasticsearchException elasticsearchException) { + listener.onFailure( + new ElasticsearchStatusException( + "Failed to remove expired results for job [" + job.getId() + "]", + elasticsearchException.status(), + elasticsearchException + ) + ); + } else { + listener.onFailure( + new ElasticsearchStatusException( + "Failed to remove expired results for job [" + job.getId() + "]", + RestStatus.TOO_MANY_REQUESTS, + e + ) + ); + } } }); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/ExceptionCollectionHandling.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/ExceptionCollectionHandling.java new file mode 100644 index 0000000000000..d60194918274e --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/ExceptionCollectionHandling.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + * + * This file was contributed to by generative AI + */ + +package org.elasticsearch.xpack.ml.utils; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.rest.RestStatus; + +import java.util.List; + +public class ExceptionCollectionHandling { + + /** + * Given an array of exceptions, return a single ElasticsearchStatusException. + * Return the first exception if all exceptions have 4XX status. + * Otherwise, return a generic 500 error. + * + * @param failures must not be empty or null + * @param message the message to use for the ElasticsearchStatusException + */ + public static ElasticsearchStatusException exceptionArrayToStatusException(AtomicArray failures, String message) { + + List caughtExceptions = failures.asList(); + if (caughtExceptions.isEmpty()) { + assert false : "method to combine exceptions called with no exceptions"; + return new ElasticsearchStatusException("No exceptions caught", RestStatus.INTERNAL_SERVER_ERROR); + } else { + + boolean allElasticsearchException = true; + boolean allStatus4xx = true; + + for (Exception exception : caughtExceptions) { + if (exception instanceof ElasticsearchException elasticsearchException) { + if (elasticsearchException.status().getStatus() < 400 || elasticsearchException.status().getStatus() >= 500) { + allStatus4xx = false; + } + } else { + allElasticsearchException = false; + break; + } + } + + if (allElasticsearchException && allStatus4xx) { + return new ElasticsearchStatusException( + message, + ((ElasticsearchException) caughtExceptions.get(0)).status(), + caughtExceptions.get(0) + ); + } else { + return new ElasticsearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR); + } + + } + + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java index 5fa434e530bc5..e87fbf48ca421 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java @@ -8,7 +8,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; @@ -192,8 +191,9 @@ public void bulkIndexWithRetry( ) { if (isShutdown || isResetMode) { finalListener.onFailure( - new ElasticsearchException( + new ElasticsearchStatusException( "Bulk indexing has failed as {}", + RestStatus.TOO_MANY_REQUESTS, isShutdown ? "node is shutting down." : "machine learning feature is being reset." ) ); @@ -233,8 +233,9 @@ private BulkResponse bulkIndexWithRetry( BiConsumer> actionExecutor ) { if (isShutdown || isResetMode) { - throw new ElasticsearchException( + throw new ElasticsearchStatusException( "Bulk indexing has failed as {}", + RestStatus.TOO_MANY_REQUESTS, isShutdown ? "node is shutting down." : "machine learning feature is being reset." ); } From c54ce6872386dbeea2384398f036cf0914ba7937 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Mon, 4 Dec 2023 18:54:38 +0100 Subject: [PATCH 128/181] Switch InstallPluginActionTests to non-blocking SecureRandom seed generator (#102893) * Force urandom on Linux * Unmute test --- distribution/tools/plugin-cli/build.gradle | 8 ++++++++ .../plugins/cli/InstallPluginActionTests.java | 1 - 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/distribution/tools/plugin-cli/build.gradle b/distribution/tools/plugin-cli/build.gradle index e0d1dd983c0de..3859dfa1ddbb9 100644 --- a/distribution/tools/plugin-cli/build.gradle +++ b/distribution/tools/plugin-cli/build.gradle @@ -6,6 +6,8 @@ * Side Public License, v 1. */ +import org.elasticsearch.gradle.OS + apply plugin: 'elasticsearch.build' base { @@ -38,6 +40,12 @@ tasks.named("dependencyLicenses").configure { tasks.named("test").configure { // TODO: find a way to add permissions for the tests in this module systemProperty 'tests.security.manager', 'false' + // These tests are "heavy" on the secure number generator. On Linux, the NativePRNG defaults to /dev/random for the seeds, and + // its entropy is quite limited, to the point that it's known to hang: https://bugs.openjdk.org/browse/JDK-6521844 + // We force the seed to be initialized from /dev/urandom, which is less secure, but in case of unit tests is not important. + if (OS.current() == OS.LINUX) { + systemProperty 'java.security.egd', 'file:/dev/urandom' + } } /* diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java index f7882a3fce743..c088e89338e74 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java @@ -118,7 +118,6 @@ import static org.mockito.Mockito.spy; @LuceneTestCase.SuppressFileSystems("*") -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102783") public class InstallPluginActionTests extends ESTestCase { private InstallPluginAction skipJarHellAction; From 67ab4b46ea380d565f7ae2bec0a3d95587613a42 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Mon, 4 Dec 2023 19:02:21 +0100 Subject: [PATCH 129/181] [Transform] Ensure transform updates only modify the expected transform task (#102934) --- docs/changelog/102934.yaml | 6 ++++++ .../core/transform/action/UpdateTransformAction.java | 10 ++++++++++ .../action/UpdateTransformActionRequestTests.java | 9 +++++++++ 3 files changed, 25 insertions(+) create mode 100644 docs/changelog/102934.yaml diff --git a/docs/changelog/102934.yaml b/docs/changelog/102934.yaml new file mode 100644 index 0000000000000..4f61427506cf3 --- /dev/null +++ b/docs/changelog/102934.yaml @@ -0,0 +1,6 @@ +pr: 102934 +summary: Ensure transform updates only modify the expected transform task +area: Transform +type: bug +issues: + - 102933 diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformAction.java index f9da4082dbfa2..b2a764b0be5b0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.tasks.Task; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -186,6 +187,15 @@ public boolean equals(Object obj) { && Objects.equals(authState, other.authState) && getTimeout().equals(other.getTimeout()); } + + @Override + public boolean match(Task task) { + if (task.getDescription().startsWith(TransformField.PERSISTENT_TASK_DESCRIPTION_PREFIX)) { + String taskId = task.getDescription().substring(TransformField.PERSISTENT_TASK_DESCRIPTION_PREFIX.length()); + return taskId.equals(this.id); + } + return false; + } } public static class Response extends BaseTasksResponse implements ToXContentObject { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformActionRequestTests.java index bcfe2b1728cbf..9c90a10e204f0 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformActionRequestTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.persistent.AllocatedPersistentTask; import org.elasticsearch.xpack.core.transform.action.UpdateTransformAction.Request; import org.elasticsearch.xpack.core.transform.transforms.AuthorizationStateTests; import org.elasticsearch.xpack.core.transform.transforms.TransformConfigTests; @@ -74,4 +75,12 @@ protected Request mutateInstance(Request instance) { return new Request(update, id, deferValidation, timeout); } + + public void testMatch() { + Request request = new Request(randomTransformConfigUpdate(), "my-transform-7", false, null); + assertTrue(request.match(new AllocatedPersistentTask(123, "", "", "data_frame_my-transform-7", null, null))); + assertFalse(request.match(new AllocatedPersistentTask(123, "", "", "data_frame_my-transform-", null, null))); + assertFalse(request.match(new AllocatedPersistentTask(123, "", "", "data_frame_my-transform-77", null, null))); + assertFalse(request.match(new AllocatedPersistentTask(123, "", "", "my-transform-7", null, null))); + } } From 5b0aec537e54339c6d90fa367a5fc2ce7cce3ecc Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 4 Dec 2023 19:09:30 +0100 Subject: [PATCH 130/181] Fix remaining leaked SearchResponse issues in :server:test (#102897) Same as #102896, handling almost all of the remaining spots (just a handful of tricky ones left that I'll open a separate PR for). --- .../search/KnnSearchSingleNodeTests.java | 218 ++++----- .../action/search/SearchAsyncActionTests.java | 146 ++++--- .../search/SearchResponseMergerTests.java | 9 +- .../action/search/SearchResponseTests.java | 412 ++++++++++-------- .../TransportMultiSearchActionTests.java | 32 +- .../search/TransportSearchActionTests.java | 108 +++-- .../FieldStatsProviderRefreshTests.java | 45 +- .../flattened/FlattenedFieldSearchTests.java | 217 ++++----- .../search/SearchServiceTests.java | 56 ++- .../search/geo/GeoPointShapeQueryTests.java | 5 +- .../snapshots/SnapshotResiliencyTests.java | 16 +- .../ESBlobStoreRepositoryIntegTestCase.java | 15 +- .../geo/BasePointShapeQueryTestCase.java | 3 +- .../search/geo/BaseShapeIntegTestCase.java | 2 +- 14 files changed, 719 insertions(+), 565 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/action/search/KnnSearchSingleNodeTests.java b/server/src/test/java/org/elasticsearch/action/search/KnnSearchSingleNodeTests.java index e6abe2f041a4c..a678956b20e59 100644 --- a/server/src/test/java/org/elasticsearch/action/search/KnnSearchSingleNodeTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/KnnSearchSingleNodeTests.java @@ -26,6 +26,7 @@ import java.util.List; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -62,15 +63,17 @@ public void testKnnSearchRemovedVector() throws IOException { float[] queryVector = randomVector(); KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector", queryVector, 20, 50, null).boost(5.0f); - SearchResponse response = client().prepareSearch("index") - .setKnnSearch(List.of(knnSearch)) - .setQuery(QueryBuilders.matchQuery("text", "goodnight")) - .setSize(10) - .get(); - - // Originally indexed 20 documents, but deleted vector field with an update, so only 19 should be hit - assertHitCount(response, 19); - assertEquals(10, response.getHits().getHits().length); + assertResponse( + client().prepareSearch("index") + .setKnnSearch(List.of(knnSearch)) + .setQuery(QueryBuilders.matchQuery("text", "goodnight")) + .setSize(10), + response -> { + // Originally indexed 20 documents, but deleted vector field with an update, so only 19 should be hit + assertHitCount(response, 19); + assertEquals(10, response.getHits().getHits().length); + } + ); // Make sure we still have 20 docs assertHitCount(client().prepareSearch("index").setSize(0).setTrackTotalHits(true), 20); } @@ -104,19 +107,22 @@ public void testKnnWithQuery() throws IOException { float[] queryVector = randomVector(); KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector", queryVector, 5, 50, null).boost(5.0f); - SearchResponse response = client().prepareSearch("index") - .setKnnSearch(List.of(knnSearch)) - .setQuery(QueryBuilders.matchQuery("text", "goodnight")) - .addFetchField("*") - .setSize(10) - .get(); - - // The total hits is k plus the number of text matches - assertHitCount(response, 15); - assertEquals(10, response.getHits().getHits().length); - - // Because of the boost, vector results should appear first - assertNotNull(response.getHits().getAt(0).field("vector")); + assertResponse( + client().prepareSearch("index") + .setKnnSearch(List.of(knnSearch)) + .setQuery(QueryBuilders.matchQuery("text", "goodnight")) + .addFetchField("*") + .setSize(10), + response -> { + + // The total hits is k plus the number of text matches + assertHitCount(response, 15); + assertEquals(10, response.getHits().getHits().length); + + // Because of the boost, vector results should appear first + assertNotNull(response.getHits().getAt(0).field("vector")); + } + ); } public void testKnnFilter() throws IOException { @@ -150,13 +156,13 @@ public void testKnnFilter() throws IOException { KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector", queryVector, 5, 50, null).addFilterQuery( QueryBuilders.termsQuery("field", "second") ); - SearchResponse response = client().prepareSearch("index").setKnnSearch(List.of(knnSearch)).addFetchField("*").setSize(10).get(); - - assertHitCount(response, 5); - assertEquals(5, response.getHits().getHits().length); - for (SearchHit hit : response.getHits().getHits()) { - assertEquals("second", hit.field("field").getValue()); - } + assertResponse(client().prepareSearch("index").setKnnSearch(List.of(knnSearch)).addFetchField("*").setSize(10), response -> { + assertHitCount(response, 5); + assertEquals(5, response.getHits().getHits().length); + for (SearchHit hit : response.getHits().getHits()) { + assertEquals("second", hit.field("field").getValue()); + } + }); } public void testKnnFilterWithRewrite() throws IOException { @@ -193,10 +199,10 @@ public void testKnnFilterWithRewrite() throws IOException { KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector", queryVector, 5, 50, null).addFilterQuery( QueryBuilders.termsLookupQuery("field", new TermsLookup("index", "lookup-doc", "other-field")) ); - SearchResponse response = client().prepareSearch("index").setKnnSearch(List.of(knnSearch)).setSize(10).get(); - - assertHitCount(response, 5); - assertEquals(5, response.getHits().getHits().length); + assertResponse(client().prepareSearch("index").setKnnSearch(List.of(knnSearch)).setSize(10), response -> { + assertHitCount(response, 5); + assertEquals(5, response.getHits().getHits().length); + }); } public void testMultiKnnClauses() throws IOException { @@ -239,26 +245,29 @@ public void testMultiKnnClauses() throws IOException { float[] queryVector = randomVector(20f, 21f); KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector", queryVector, 5, 50, null).boost(5.0f); KnnSearchBuilder knnSearch2 = new KnnSearchBuilder("vector_2", queryVector, 5, 50, null).boost(10.0f); - SearchResponse response = client().prepareSearch("index") - .setKnnSearch(List.of(knnSearch, knnSearch2)) - .setQuery(QueryBuilders.matchQuery("text", "goodnight")) - .addFetchField("*") - .setSize(10) - .addAggregation(AggregationBuilders.stats("stats").field("number")) - .get(); - - // The total hits is k plus the number of text matches - assertHitCount(response, 20); - assertEquals(10, response.getHits().getHits().length); - InternalStats agg = response.getAggregations().get("stats"); - assertThat(agg.getCount(), equalTo(20L)); - assertThat(agg.getMax(), equalTo(3.0)); - assertThat(agg.getMin(), equalTo(1.0)); - assertThat(agg.getAvg(), equalTo(2.25)); - assertThat(agg.getSum(), equalTo(45.0)); - - // Because of the boost & vector distributions, vector_2 results should appear first - assertNotNull(response.getHits().getAt(0).field("vector_2")); + assertResponse( + client().prepareSearch("index") + .setKnnSearch(List.of(knnSearch, knnSearch2)) + .setQuery(QueryBuilders.matchQuery("text", "goodnight")) + .addFetchField("*") + .setSize(10) + .addAggregation(AggregationBuilders.stats("stats").field("number")), + response -> { + + // The total hits is k plus the number of text matches + assertHitCount(response, 20); + assertEquals(10, response.getHits().getHits().length); + InternalStats agg = response.getAggregations().get("stats"); + assertThat(agg.getCount(), equalTo(20L)); + assertThat(agg.getMax(), equalTo(3.0)); + assertThat(agg.getMin(), equalTo(1.0)); + assertThat(agg.getAvg(), equalTo(2.25)); + assertThat(agg.getSum(), equalTo(45.0)); + + // Because of the boost & vector distributions, vector_2 results should appear first + assertNotNull(response.getHits().getAt(0).field("vector_2")); + } + ); } public void testMultiKnnClausesSameDoc() throws IOException { @@ -298,38 +307,42 @@ public void testMultiKnnClausesSameDoc() throws IOException { // Having the same query vector and same docs should mean our KNN scores are linearly combined if the same doc is matched KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector", queryVector, 5, 50, null); KnnSearchBuilder knnSearch2 = new KnnSearchBuilder("vector_2", queryVector, 5, 50, null); - SearchResponse responseOneKnn = client().prepareSearch("index") - .setKnnSearch(List.of(knnSearch)) - .addFetchField("*") - .setSize(10) - .addAggregation(AggregationBuilders.stats("stats").field("number")) - .get(); - SearchResponse responseBothKnn = client().prepareSearch("index") - .setKnnSearch(List.of(knnSearch, knnSearch2)) - .addFetchField("*") - .setSize(10) - .addAggregation(AggregationBuilders.stats("stats").field("number")) - .get(); - - // The total hits is k matched docs - assertHitCount(responseOneKnn, 5); - assertHitCount(responseBothKnn, 5); - assertEquals(5, responseOneKnn.getHits().getHits().length); - assertEquals(5, responseBothKnn.getHits().getHits().length); - - for (int i = 0; i < responseOneKnn.getHits().getHits().length; i++) { - SearchHit oneHit = responseOneKnn.getHits().getHits()[i]; - SearchHit bothHit = responseBothKnn.getHits().getHits()[i]; - assertThat(bothHit.getId(), equalTo(oneHit.getId())); - assertThat(bothHit.getScore(), greaterThan(oneHit.getScore())); - } - InternalStats oneAgg = responseOneKnn.getAggregations().get("stats"); - InternalStats bothAgg = responseBothKnn.getAggregations().get("stats"); - assertThat(bothAgg.getCount(), equalTo(oneAgg.getCount())); - assertThat(bothAgg.getAvg(), equalTo(oneAgg.getAvg())); - assertThat(bothAgg.getMax(), equalTo(oneAgg.getMax())); - assertThat(bothAgg.getSum(), equalTo(oneAgg.getSum())); - assertThat(bothAgg.getMin(), equalTo(oneAgg.getMin())); + assertResponse( + client().prepareSearch("index") + .setKnnSearch(List.of(knnSearch)) + .addFetchField("*") + .setSize(10) + .addAggregation(AggregationBuilders.stats("stats").field("number")), + responseOneKnn -> assertResponse( + client().prepareSearch("index") + .setKnnSearch(List.of(knnSearch, knnSearch2)) + .addFetchField("*") + .setSize(10) + .addAggregation(AggregationBuilders.stats("stats").field("number")), + responseBothKnn -> { + + // The total hits is k matched docs + assertHitCount(responseOneKnn, 5); + assertHitCount(responseBothKnn, 5); + assertEquals(5, responseOneKnn.getHits().getHits().length); + assertEquals(5, responseBothKnn.getHits().getHits().length); + + for (int i = 0; i < responseOneKnn.getHits().getHits().length; i++) { + SearchHit oneHit = responseOneKnn.getHits().getHits()[i]; + SearchHit bothHit = responseBothKnn.getHits().getHits()[i]; + assertThat(bothHit.getId(), equalTo(oneHit.getId())); + assertThat(bothHit.getScore(), greaterThan(oneHit.getScore())); + } + InternalStats oneAgg = responseOneKnn.getAggregations().get("stats"); + InternalStats bothAgg = responseBothKnn.getAggregations().get("stats"); + assertThat(bothAgg.getCount(), equalTo(oneAgg.getCount())); + assertThat(bothAgg.getAvg(), equalTo(oneAgg.getAvg())); + assertThat(bothAgg.getMax(), equalTo(oneAgg.getMax())); + assertThat(bothAgg.getSum(), equalTo(oneAgg.getSum())); + assertThat(bothAgg.getMin(), equalTo(oneAgg.getMin())); + } + ) + ); } public void testKnnFilteredAlias() throws IOException { @@ -366,10 +379,11 @@ public void testKnnFilteredAlias() throws IOException { float[] queryVector = randomVector(); KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector", queryVector, 10, 50, null); - SearchResponse response = client().prepareSearch("test-alias").setKnnSearch(List.of(knnSearch)).setSize(10).get(); - - assertHitCount(response, expectedHits); - assertEquals(expectedHits, response.getHits().getHits().length); + final int expectedHitCount = expectedHits; + assertResponse(client().prepareSearch("test-alias").setKnnSearch(List.of(knnSearch)).setSize(10), response -> { + assertHitCount(response, expectedHitCount); + assertEquals(expectedHitCount, response.getHits().getHits().length); + }); } public void testKnnSearchAction() throws IOException { @@ -399,14 +413,14 @@ public void testKnnSearchAction() throws IOException { // Since there's no kNN search action at the transport layer, we just emulate // how the action works (it builds a kNN query under the hood) float[] queryVector = randomVector(); - SearchResponse response = client().prepareSearch("index1", "index2") - .setQuery(new KnnVectorQueryBuilder("vector", queryVector, 5, null)) - .setSize(2) - .get(); - - // The total hits is num_cands * num_shards, since the query gathers num_cands hits from each shard - assertHitCount(response, 5 * 2); - assertEquals(2, response.getHits().getHits().length); + assertResponse( + client().prepareSearch("index1", "index2").setQuery(new KnnVectorQueryBuilder("vector", queryVector, 5, null)).setSize(2), + response -> { + // The total hits is num_cands * num_shards, since the query gathers num_cands hits from each shard + assertHitCount(response, 5 * 2); + assertEquals(2, response.getHits().getHits().length); + } + ); } public void testKnnVectorsWith4096Dims() throws IOException { @@ -434,11 +448,11 @@ public void testKnnVectorsWith4096Dims() throws IOException { float[] queryVector = randomVector(4096); KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector", queryVector, 3, 50, null).boost(5.0f); - SearchResponse response = client().prepareSearch("index").setKnnSearch(List.of(knnSearch)).addFetchField("*").setSize(10).get(); - - assertHitCount(response, 3); - assertEquals(3, response.getHits().getHits().length); - assertEquals(4096, response.getHits().getAt(0).field("vector").getValues().size()); + assertResponse(client().prepareSearch("index").setKnnSearch(List.of(knnSearch)).addFetchField("*").setSize(10), response -> { + assertHitCount(response, 3); + assertEquals(3, response.getHits().getHits().length); + assertEquals(4096, response.getHits().getAt(0).field("vector").getValues().size()); + }); } private float[] randomVector() { diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index 430e66c116744..a02eddf039e46 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -313,6 +313,7 @@ public void sendFreeContext(Transport.Connection connection, ShardSearchContextI final CountDownLatch latch = new CountDownLatch(1); final AtomicBoolean latchTriggered = new AtomicBoolean(); var results = new ArraySearchPhaseResults(shardsIter.size()); + final TestSearchResponse testResponse = new TestSearchResponse(); try { AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<>( "test", @@ -335,7 +336,6 @@ public void sendFreeContext(Transport.Connection connection, ShardSearchContextI request.getMaxConcurrentShardRequests(), SearchResponse.Clusters.EMPTY ) { - final TestSearchResponse response = new TestSearchResponse(); @Override protected void executePhaseOnShard( @@ -343,7 +343,7 @@ protected void executePhaseOnShard( SearchShardTarget shard, SearchActionListener listener ) { - assertTrue("shard: " + shard.getShardId() + " has been queried twice", response.queried.add(shard.getShardId())); + assertTrue("shard: " + shard.getShardId() + " has been queried twice", testResponse.queried.add(shard.getShardId())); Transport.Connection connection = getConnection(null, shard.getNodeId()); TestSearchPhaseResult testSearchPhaseResult = new TestSearchPhaseResult( new ShardSearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), @@ -368,7 +368,7 @@ public void run() { assertEquals(result.node.getId(), result.getSearchShardTarget().getNodeId()); sendReleaseSearchContext(result.getContextId(), new MockConnection(result.node), OriginalIndices.NONE); } - responseListener.onResponse(response); + responseListener.onResponse(testResponse); if (latchTriggered.compareAndSet(false, true) == false) { throw new AssertionError("latch triggered twice"); } @@ -391,6 +391,7 @@ public void run() { final List runnables = executor.shutdownNow(); assertThat(runnables, equalTo(Collections.emptyList())); } finally { + testResponse.decRef(); results.decRef(); } } @@ -437,79 +438,82 @@ public void sendFreeContext(Transport.Connection connection, ShardSearchContextI lookup.put(replicaNode.getId(), new MockConnection(replicaNode)); Map aliasFilters = Collections.singletonMap("_na_", AliasFilter.EMPTY); ExecutorService executor = Executors.newFixedThreadPool(randomIntBetween(1, Runtime.getRuntime().availableProcessors())); - AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<>( - "test", - logger, - transportService, - (cluster, node) -> { - assert cluster == null : "cluster was not null: " + cluster; - return lookup.get(node); - }, - aliasFilters, - Collections.emptyMap(), - executor, - request, - responseListener, - shardsIter, - new TransportSearchAction.SearchTimeProvider(0, 0, () -> 0), - ClusterState.EMPTY_STATE, - null, - new ArraySearchPhaseResults<>(shardsIter.size()), - request.getMaxConcurrentShardRequests(), - SearchResponse.Clusters.EMPTY - ) { - final TestSearchResponse response = new TestSearchResponse(); - - @Override - protected void executePhaseOnShard( - SearchShardIterator shardIt, - SearchShardTarget shard, - SearchActionListener listener + final TestSearchResponse response = new TestSearchResponse(); + try { + AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<>( + "test", + logger, + transportService, + (cluster, node) -> { + assert cluster == null : "cluster was not null: " + cluster; + return lookup.get(node); + }, + aliasFilters, + Collections.emptyMap(), + executor, + request, + responseListener, + shardsIter, + new TransportSearchAction.SearchTimeProvider(0, 0, () -> 0), + ClusterState.EMPTY_STATE, + null, + new ArraySearchPhaseResults<>(shardsIter.size()), + request.getMaxConcurrentShardRequests(), + SearchResponse.Clusters.EMPTY ) { - assertTrue("shard: " + shard.getShardId() + " has been queried twice", response.queried.add(shard.getShardId())); - Transport.Connection connection = getConnection(null, shard.getNodeId()); - final TestSearchPhaseResult testSearchPhaseResult; - if (shard.getShardId().id() == 0) { - testSearchPhaseResult = new TestSearchPhaseResult(null, connection.getNode()); - } else { - testSearchPhaseResult = new TestSearchPhaseResult( - new ShardSearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), - connection.getNode() - ); - Set ids = nodeToContextMap.computeIfAbsent(connection.getNode(), (n) -> newConcurrentSet()); - ids.add(testSearchPhaseResult.getContextId()); - } - if (randomBoolean()) { - listener.onResponse(testSearchPhaseResult); - } else { - new Thread(() -> listener.onResponse(testSearchPhaseResult)).start(); + @Override + protected void executePhaseOnShard( + SearchShardIterator shardIt, + SearchShardTarget shard, + SearchActionListener listener + ) { + assertTrue("shard: " + shard.getShardId() + " has been queried twice", response.queried.add(shard.getShardId())); + Transport.Connection connection = getConnection(null, shard.getNodeId()); + final TestSearchPhaseResult testSearchPhaseResult; + if (shard.getShardId().id() == 0) { + testSearchPhaseResult = new TestSearchPhaseResult(null, connection.getNode()); + } else { + testSearchPhaseResult = new TestSearchPhaseResult( + new ShardSearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), + connection.getNode() + ); + Set ids = nodeToContextMap.computeIfAbsent(connection.getNode(), (n) -> newConcurrentSet()); + ids.add(testSearchPhaseResult.getContextId()); + } + if (randomBoolean()) { + listener.onResponse(testSearchPhaseResult); + } else { + new Thread(() -> listener.onResponse(testSearchPhaseResult)).start(); + } } - } - @Override - protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { - return new SearchPhase("test") { - @Override - public void run() { - throw new RuntimeException("boom"); - } - }; + @Override + protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { + return new SearchPhase("test") { + @Override + public void run() { + throw new RuntimeException("boom"); + } + }; + } + }; + asyncAction.start(); + latch.await(); + assertNotNull(failure.get()); + assertThat(failure.get().getCause().getMessage(), containsString("boom")); + assertFalse(nodeToContextMap.isEmpty()); + assertTrue(nodeToContextMap.toString(), nodeToContextMap.containsKey(primaryNode) || nodeToContextMap.containsKey(replicaNode)); + assertEquals(shardsIter.size() - 1, numFreedContext.get()); + if (nodeToContextMap.containsKey(primaryNode)) { + assertTrue(nodeToContextMap.get(primaryNode).toString(), nodeToContextMap.get(primaryNode).isEmpty()); + } else { + assertTrue(nodeToContextMap.get(replicaNode).toString(), nodeToContextMap.get(replicaNode).isEmpty()); } - }; - asyncAction.start(); - latch.await(); - assertNotNull(failure.get()); - assertThat(failure.get().getCause().getMessage(), containsString("boom")); - assertFalse(nodeToContextMap.isEmpty()); - assertTrue(nodeToContextMap.toString(), nodeToContextMap.containsKey(primaryNode) || nodeToContextMap.containsKey(replicaNode)); - assertEquals(shardsIter.size() - 1, numFreedContext.get()); - if (nodeToContextMap.containsKey(primaryNode)) { - assertTrue(nodeToContextMap.get(primaryNode).toString(), nodeToContextMap.get(primaryNode).isEmpty()); - } else { - assertTrue(nodeToContextMap.get(replicaNode).toString(), nodeToContextMap.get(replicaNode).isEmpty()); + final List runnables = executor.shutdownNow(); + assertThat(runnables, equalTo(Collections.emptyList())); + } finally { + response.decRef(); } - final List runnables = executor.shutdownNow(); - assertThat(runnables, equalTo(Collections.emptyList())); } public void testAllowPartialResults() throws InterruptedException { diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java index e250964c50687..dc6e69b15ee32 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java @@ -77,7 +77,14 @@ public void init() { private void addResponse(SearchResponseMerger searchResponseMerger, SearchResponse searchResponse) { if (randomBoolean()) { - executorService.submit(() -> searchResponseMerger.add(searchResponse)); + searchResponse.incRef(); + executorService.submit(() -> { + try { + searchResponseMerger.add(searchResponse); + } finally { + searchResponse.decRef(); + } + }); } else { searchResponseMerger.add(searchResponse); } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java index b02dea53bc8b9..b45a04922c187 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java @@ -271,7 +271,12 @@ static SearchResponse.Clusters createCCSClusterObject( * compare xContent, so we omit it here */ public void testFromXContent() throws IOException { - doFromXContentTestWithRandomFields(createTestItem(), false); + var response = createTestItem(); + try { + doFromXContentTestWithRandomFields(response, false); + } finally { + response.decRef(); + } } /** @@ -281,7 +286,12 @@ public void testFromXContent() throws IOException { * fields to SearchHits, Aggregations etc... is tested in their own tests */ public void testFromXContentWithRandomFields() throws IOException { - doFromXContentTestWithRandomFields(createMinimalTestItem(), true); + var response = createMinimalTestItem(); + try { + doFromXContentTestWithRandomFields(response, true); + } finally { + response.decRef(); + } } private void doFromXContentTestWithRandomFields(SearchResponse response, boolean addRandomFields) throws IOException { @@ -328,15 +338,15 @@ public void testFromXContentWithFailures() throws IOException { for (int i = 0; i < failures.length; i++) { failures[i] = ShardSearchFailureTests.createTestItem(IndexMetadata.INDEX_UUID_NA_VALUE); } + BytesReference originalBytes; SearchResponse response = createTestItem(failures); XContentType xcontentType = randomFrom(XContentType.values()); - final ToXContent.Params params = new ToXContent.MapParams(singletonMap(RestSearchAction.TYPED_KEYS_PARAM, "true")); - BytesReference originalBytes = toShuffledXContent( - ChunkedToXContent.wrapAsToXContent(response), - xcontentType, - params, - randomBoolean() - ); + try { + final ToXContent.Params params = new ToXContent.MapParams(singletonMap(RestSearchAction.TYPED_KEYS_PARAM, "true")); + originalBytes = toShuffledXContent(ChunkedToXContent.wrapAsToXContent(response), xcontentType, params, randomBoolean()); + } finally { + response.decRef(); + } try (XContentParser parser = createParser(xcontentType.xContent(), originalBytes)) { SearchResponse parsed = SearchResponse.fromXContent(parser); try { @@ -388,26 +398,30 @@ public void testToXContent() throws IOException { ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY ); - String expectedString = XContentHelper.stripWhitespace(""" - { - "took": 0, - "timed_out": false, - "_shards": { - "total": 0, - "successful": 0, - "skipped": 0, - "failed": 0 - }, - "hits": { - "total": { - "value": 100, - "relation": "eq" - }, - "max_score": 1.5, - "hits": [ { "_id": "id1", "_score": 2.0 } ] - } - }"""); - assertEquals(expectedString, Strings.toString(response)); + try { + String expectedString = XContentHelper.stripWhitespace(""" + { + "took": 0, + "timed_out": false, + "_shards": { + "total": 0, + "successful": 0, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 100, + "relation": "eq" + }, + "max_score": 1.5, + "hits": [ { "_id": "id1", "_score": 2.0 } ] + } + }"""); + assertEquals(expectedString, Strings.toString(response)); + } finally { + response.decRef(); + } } { SearchResponse response = new SearchResponse( @@ -428,34 +442,38 @@ public void testToXContent() throws IOException { ShardSearchFailure.EMPTY_ARRAY, new SearchResponse.Clusters(5, 3, 2) ); - String expectedString = XContentHelper.stripWhitespace(""" - { - "took": 0, - "timed_out": false, - "_shards": { - "total": 0, - "successful": 0, - "skipped": 0, - "failed": 0 - }, - "_clusters": { - "total": 5, - "successful": 3, - "skipped": 2, - "running":0, - "partial": 0, - "failed": 0 - }, - "hits": { - "total": { - "value": 100, - "relation": "eq" - }, - "max_score": 1.5, - "hits": [ { "_id": "id1", "_score": 2.0 } ] - } - }"""); - assertEquals(expectedString, Strings.toString(response)); + try { + String expectedString = XContentHelper.stripWhitespace(""" + { + "took": 0, + "timed_out": false, + "_shards": { + "total": 0, + "successful": 0, + "skipped": 0, + "failed": 0 + }, + "_clusters": { + "total": 5, + "successful": 3, + "skipped": 2, + "running":0, + "partial": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 100, + "relation": "eq" + }, + "max_score": 1.5, + "hits": [ { "_id": "id1", "_score": 2.0 } ] + } + }"""); + assertEquals(expectedString, Strings.toString(response)); + } finally { + response.decRef(); + } } { SearchResponse response = new SearchResponse( @@ -485,142 +503,154 @@ public void testToXContent() throws IOException { new ShardSearchFailure[] { new ShardSearchFailure(new IllegalStateException("corrupt index")) } ) ); - String expectedString = XContentHelper.stripWhitespace(""" - { - "took": 0, - "timed_out": false, - "_shards": { - "total": 20, - "successful": 9, - "skipped": 2, - "failed": 0 - }, - "_clusters": { - "total": 4, - "successful": 1, - "skipped": 1, - "running":0, - "partial": 1, - "failed": 1, - "details": { - "(local)": { - "status": "successful", - "indices": "foo,bar*", - "took": 1000, - "timed_out": false, - "_shards": { - "total": 5, - "successful": 5, - "skipped": 1, - "failed": 0 - } - }, - "cluster_1": { - "status": "skipped", - "indices": "foo,bar*", - "took": 1000, - "timed_out": false, - "_shards": { - "total": 5, - "successful": 0, - "skipped": 0, - "failed": 5 - }, - "failures": [ - { - "shard": -1, - "index": null, - "reason": { - "type": "illegal_state_exception", - "reason": "corrupt index" - } - } - ] + try { + String expectedString = XContentHelper.stripWhitespace(""" + { + "took": 0, + "timed_out": false, + "_shards": { + "total": 20, + "successful": 9, + "skipped": 2, + "failed": 0 }, - "cluster_2": { - "status": "failed", - "indices": "foo,bar*", - "took": 1000, - "timed_out": false, - "_shards": { - "total": 5, - "successful": 0, - "skipped": 0, - "failed": 5 - }, - "failures": [ - { - "shard": -1, - "index": null, - "reason": { - "type": "illegal_state_exception", - "reason": "corrupt index" + "_clusters": { + "total": 4, + "successful": 1, + "skipped": 1, + "running":0, + "partial": 1, + "failed": 1, + "details": { + "(local)": { + "status": "successful", + "indices": "foo,bar*", + "took": 1000, + "timed_out": false, + "_shards": { + "total": 5, + "successful": 5, + "skipped": 1, + "failed": 0 } + }, + "cluster_1": { + "status": "skipped", + "indices": "foo,bar*", + "took": 1000, + "timed_out": false, + "_shards": { + "total": 5, + "successful": 0, + "skipped": 0, + "failed": 5 + }, + "failures": [ + { + "shard": -1, + "index": null, + "reason": { + "type": "illegal_state_exception", + "reason": "corrupt index" + } + } + ] + }, + "cluster_2": { + "status": "failed", + "indices": "foo,bar*", + "took": 1000, + "timed_out": false, + "_shards": { + "total": 5, + "successful": 0, + "skipped": 0, + "failed": 5 + }, + "failures": [ + { + "shard": -1, + "index": null, + "reason": { + "type": "illegal_state_exception", + "reason": "corrupt index" + } + } + ] + }, + "cluster_0": { + "status": "partial", + "indices": "foo,bar*", + "took": 1000, + "timed_out": false, + "_shards": { + "total": 5, + "successful": 4, + "skipped": 1, + "failed": 1 + }, + "failures": [ + { + "shard": -1, + "index": null, + "reason": { + "type": "illegal_state_exception", + "reason": "corrupt index" + } + } + ] } - ] + } }, - "cluster_0": { - "status": "partial", - "indices": "foo,bar*", - "took": 1000, - "timed_out": false, - "_shards": { - "total": 5, - "successful": 4, - "skipped": 1, - "failed": 1 + "hits": { + "total": { + "value": 100, + "relation": "eq" }, - "failures": [ + "max_score": 1.5, + "hits": [ { - "shard": -1, - "index": null, - "reason": { - "type": "illegal_state_exception", - "reason": "corrupt index" - } + "_id": "id1", + "_score": 2.0 } ] } - } - }, - "hits": { - "total": { - "value": 100, - "relation": "eq" - }, - "max_score": 1.5, - "hits": [ - { - "_id": "id1", - "_score": 2.0 - } - ] - } - }"""); - assertEquals(expectedString, Strings.toString(response)); + }"""); + assertEquals(expectedString, Strings.toString(response)); + } finally { + response.decRef(); + } } } public void testSerialization() throws IOException { SearchResponse searchResponse = createTestItem(false); - SearchResponse deserialized = copyWriteable( - searchResponse, - namedWriteableRegistry, - SearchResponse::new, - TransportVersion.current() - ); - if (searchResponse.getHits().getTotalHits() == null) { - assertNull(deserialized.getHits().getTotalHits()); - } else { - assertEquals(searchResponse.getHits().getTotalHits().value, deserialized.getHits().getTotalHits().value); - assertEquals(searchResponse.getHits().getTotalHits().relation, deserialized.getHits().getTotalHits().relation); + try { + SearchResponse deserialized = copyWriteable( + searchResponse, + namedWriteableRegistry, + SearchResponse::new, + TransportVersion.current() + ); + try { + if (searchResponse.getHits().getTotalHits() == null) { + assertNull(deserialized.getHits().getTotalHits()); + } else { + assertEquals(searchResponse.getHits().getTotalHits().value, deserialized.getHits().getTotalHits().value); + assertEquals(searchResponse.getHits().getTotalHits().relation, deserialized.getHits().getTotalHits().relation); + } + assertEquals(searchResponse.getHits().getHits().length, deserialized.getHits().getHits().length); + assertEquals(searchResponse.getNumReducePhases(), deserialized.getNumReducePhases()); + assertEquals(searchResponse.getFailedShards(), deserialized.getFailedShards()); + assertEquals(searchResponse.getTotalShards(), deserialized.getTotalShards()); + assertEquals(searchResponse.getSkippedShards(), deserialized.getSkippedShards()); + assertEquals(searchResponse.getClusters(), deserialized.getClusters()); + } finally { + deserialized.decRef(); + } + } finally { + searchResponse.decRef(); } - assertEquals(searchResponse.getHits().getHits().length, deserialized.getHits().getHits().length); - assertEquals(searchResponse.getNumReducePhases(), deserialized.getNumReducePhases()); - assertEquals(searchResponse.getFailedShards(), deserialized.getFailedShards()); - assertEquals(searchResponse.getTotalShards(), deserialized.getTotalShards()); - assertEquals(searchResponse.getSkippedShards(), deserialized.getSkippedShards()); - assertEquals(searchResponse.getClusters(), deserialized.getClusters()); } public void testToXContentEmptyClusters() throws IOException { @@ -634,15 +664,23 @@ public void testToXContentEmptyClusters() throws IOException { ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY ); - SearchResponse deserialized = copyWriteable( - searchResponse, - namedWriteableRegistry, - SearchResponse::new, - TransportVersion.current() - ); - XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent()); - deserialized.getClusters().toXContent(builder, ToXContent.EMPTY_PARAMS); - assertEquals(0, Strings.toString(builder).length()); + try { + SearchResponse deserialized = copyWriteable( + searchResponse, + namedWriteableRegistry, + SearchResponse::new, + TransportVersion.current() + ); + try { + XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent()); + deserialized.getClusters().toXContent(builder, ToXContent.EMPTY_PARAMS); + assertEquals(0, Strings.toString(builder).length()); + } finally { + deserialized.decRef(); + } + } finally { + searchResponse.decRef(); + } } public void testClustersHasRemoteCluster() { diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java index 70bd2d9f00a05..1097174628e58 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java @@ -83,7 +83,12 @@ public void search(final SearchRequest request, final ActionListener 1L, SearchResponse.Clusters.EMPTY)); + var response = SearchResponse.empty(() -> 1L, SearchResponse.Clusters.EMPTY); + try { + listener.onResponse(response); + } finally { + response.decRef(); + } } @Override @@ -161,18 +166,21 @@ public void search(final SearchRequest request, final ActionListener { counter.decrementAndGet(); - listener.onResponse( - new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, - null, - 0, - 0, - 0, - 0L, - ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY - ) + var response = new SearchResponse( + InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, + null, + 0, + 0, + 0, + 0L, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY ); + try { + listener.onResponse(response); + } finally { + response.decRef(); + } }); } diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java index 366161881d30f..7090d590a4901 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java @@ -542,7 +542,7 @@ public void testCCSRemoteReduceMergeFails() throws Exception { Tuple> tuple = setOnce.get(); assertEquals("", tuple.v1().getLocalClusterAlias()); assertThat(tuple.v2(), instanceOf(TransportSearchAction.CCSActionListener.class)); - tuple.v2().onResponse(emptySearchResponse()); + resolveWithEmptySearchResponse(tuple); } awaitLatch(latch, 5, TimeUnit.SECONDS); assertNotNull(failure.get()); @@ -584,9 +584,12 @@ public void testCCSRemoteReduce() throws Exception { SearchRequest searchRequest = new SearchRequest(); final CountDownLatch latch = new CountDownLatch(1); SetOnce>> setOnce = new SetOnce<>(); - AtomicReference response = new AtomicReference<>(); + final SetOnce response = new SetOnce<>(); LatchedActionListener listener = new LatchedActionListener<>( - ActionTestUtils.assertNoFailureListener(response::set), + ActionTestUtils.assertNoFailureListener(newValue -> { + newValue.incRef(); + response.set(newValue); + }), latch ); TransportSearchAction.ccsRemoteReduce( @@ -608,18 +611,25 @@ public void testCCSRemoteReduce() throws Exception { Tuple> tuple = setOnce.get(); assertEquals("", tuple.v1().getLocalClusterAlias()); assertThat(tuple.v2(), instanceOf(TransportSearchAction.CCSActionListener.class)); - tuple.v2().onResponse(emptySearchResponse()); + resolveWithEmptySearchResponse(tuple); } awaitLatch(latch, 5, TimeUnit.SECONDS); SearchResponse searchResponse = response.get(); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.RUNNING)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.PARTIAL)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.FAILED)); - assertEquals(totalClusters, searchResponse.getClusters().getTotal()); - assertEquals(totalClusters, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL)); - assertEquals(totalClusters == 1 ? 1 : totalClusters + 1, searchResponse.getNumReducePhases()); + try { + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED)); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.RUNNING)); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.PARTIAL)); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.FAILED)); + assertEquals(totalClusters, searchResponse.getClusters().getTotal()); + assertEquals( + totalClusters, + searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL) + ); + assertEquals(totalClusters == 1 ? 1 : totalClusters + 1, searchResponse.getNumReducePhases()); + } finally { + searchResponse.decRef(); + } } { SearchRequest searchRequest = new SearchRequest(); @@ -650,7 +660,7 @@ public void testCCSRemoteReduce() throws Exception { Tuple> tuple = setOnce.get(); assertEquals("", tuple.v1().getLocalClusterAlias()); assertThat(tuple.v2(), instanceOf(TransportSearchAction.CCSActionListener.class)); - tuple.v2().onResponse(emptySearchResponse()); + resolveWithEmptySearchResponse(tuple); } awaitLatch(latch, 5, TimeUnit.SECONDS); assertNotNull(failure.get()); @@ -710,7 +720,7 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti Tuple> tuple = setOnce.get(); assertEquals("", tuple.v1().getLocalClusterAlias()); assertThat(tuple.v2(), instanceOf(TransportSearchAction.CCSActionListener.class)); - tuple.v2().onResponse(emptySearchResponse()); + resolveWithEmptySearchResponse(tuple); } awaitLatch(latch, 5, TimeUnit.SECONDS); assertNotNull(failure.get()); @@ -728,9 +738,12 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti SearchRequest searchRequest = new SearchRequest(); final CountDownLatch latch = new CountDownLatch(1); SetOnce>> setOnce = new SetOnce<>(); - AtomicReference response = new AtomicReference<>(); + SetOnce response = new SetOnce<>(); LatchedActionListener listener = new LatchedActionListener<>( - ActionTestUtils.assertNoFailureListener(response::set), + ActionTestUtils.assertNoFailureListener(newValue -> { + newValue.mustIncRef(); + response.set(newValue); + }), latch ); Set clusterAliases = new HashSet<>(remoteClusterService.getRegisteredRemoteClusterNames()); @@ -756,22 +769,26 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti Tuple> tuple = setOnce.get(); assertEquals("", tuple.v1().getLocalClusterAlias()); assertThat(tuple.v2(), instanceOf(TransportSearchAction.CCSActionListener.class)); - tuple.v2().onResponse(emptySearchResponse()); + resolveWithEmptySearchResponse(tuple); } awaitLatch(latch, 5, TimeUnit.SECONDS); SearchResponse searchResponse = response.get(); - assertEquals( - disconnectedNodesIndices.size(), - searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED) - ); - assertEquals(totalClusters, searchResponse.getClusters().getTotal()); - int successful = totalClusters - disconnectedNodesIndices.size(); - assertEquals(successful, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.RUNNING)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.PARTIAL)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.FAILED)); - assertEquals(successful == 0 ? 0 : successful + 1, searchResponse.getNumReducePhases()); + try { + assertEquals( + disconnectedNodesIndices.size(), + searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED) + ); + assertEquals(totalClusters, searchResponse.getClusters().getTotal()); + int successful = totalClusters - disconnectedNodesIndices.size(); + assertEquals(successful, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL)); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.RUNNING)); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.PARTIAL)); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.FAILED)); + assertEquals(successful == 0 ? 0 : successful + 1, searchResponse.getNumReducePhases()); + } finally { + searchResponse.decRef(); + } } // give transport service enough time to realize that the node is down, and to notify the connection listeners @@ -794,7 +811,10 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti SetOnce>> setOnce = new SetOnce<>(); AtomicReference response = new AtomicReference<>(); LatchedActionListener listener = new LatchedActionListener<>( - ActionTestUtils.assertNoFailureListener(response::set), + ActionTestUtils.assertNoFailureListener(newValue -> { + newValue.mustIncRef(); + response.set(newValue); + }), latch ); Set clusterAliases = new HashSet<>(remoteClusterService.getRegisteredRemoteClusterNames()); @@ -820,18 +840,25 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti Tuple> tuple = setOnce.get(); assertEquals("", tuple.v1().getLocalClusterAlias()); assertThat(tuple.v2(), instanceOf(TransportSearchAction.CCSActionListener.class)); - tuple.v2().onResponse(emptySearchResponse()); + resolveWithEmptySearchResponse(tuple); } awaitLatch(latch, 5, TimeUnit.SECONDS); SearchResponse searchResponse = response.get(); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED)); - assertEquals(totalClusters, searchResponse.getClusters().getTotal()); - assertEquals(totalClusters, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.RUNNING)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.PARTIAL)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.FAILED)); - assertEquals(totalClusters == 1 ? 1 : totalClusters + 1, searchResponse.getNumReducePhases()); + try { + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED)); + assertEquals(totalClusters, searchResponse.getClusters().getTotal()); + assertEquals( + totalClusters, + searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL) + ); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.RUNNING)); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.PARTIAL)); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.FAILED)); + assertEquals(totalClusters == 1 ? 1 : totalClusters + 1, searchResponse.getNumReducePhases()); + } finally { + searchResponse.decRef(); + } }); assertEquals(0, service.getConnectionManager().size()); } finally { @@ -841,6 +868,15 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti } } + private static void resolveWithEmptySearchResponse(Tuple> tuple) { + var resp = emptySearchResponse(); + try { + tuple.v2().onResponse(resp); + } finally { + resp.decRef(); + } + } + public void testCollectSearchShards() throws Exception { int numClusters = randomIntBetween(2, 10); DiscoveryNode[] nodes = new DiscoveryNode[numClusters]; diff --git a/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java b/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java index a0eff567274dc..6a87c0f704600 100644 --- a/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java +++ b/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.IndicesRequestCache; @@ -18,7 +17,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; @@ -42,23 +41,23 @@ public void testQueryRewriteOnRefresh() throws Exception { // Search for a range and check that it missed the cache (since its the // first time it has run) - final SearchResponse r1 = client().prepareSearch("index") - .setSearchType(SearchType.QUERY_THEN_FETCH) - .setSize(0) - .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")) - .get(); - assertNoFailures(r1); - assertThat(r1.getHits().getTotalHits().value, equalTo(3L)); + assertNoFailuresAndResponse( + client().prepareSearch("index") + .setSearchType(SearchType.QUERY_THEN_FETCH) + .setSize(0) + .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")), + r1 -> assertThat(r1.getHits().getTotalHits().value, equalTo(3L)) + ); assertRequestCacheStats(0, 1); // Search again and check it hits the cache - final SearchResponse r2 = client().prepareSearch("index") - .setSearchType(SearchType.QUERY_THEN_FETCH) - .setSize(0) - .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")) - .get(); - assertNoFailures(r2); - assertThat(r2.getHits().getTotalHits().value, equalTo(3L)); + assertNoFailuresAndResponse( + client().prepareSearch("index") + .setSearchType(SearchType.QUERY_THEN_FETCH) + .setSize(0) + .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")), + r2 -> assertThat(r2.getHits().getTotalHits().value, equalTo(3L)) + ); assertRequestCacheStats(1, 1); // Index some more documents in the query range and refresh @@ -67,13 +66,13 @@ public void testQueryRewriteOnRefresh() throws Exception { refreshIndex(); // Search again and check the request cache for another miss since request cache should be invalidated by refresh - final SearchResponse r3 = client().prepareSearch("index") - .setSearchType(SearchType.QUERY_THEN_FETCH) - .setSize(0) - .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")) - .get(); - assertNoFailures(r3); - assertThat(r3.getHits().getTotalHits().value, equalTo(5L)); + assertNoFailuresAndResponse( + client().prepareSearch("index") + .setSearchType(SearchType.QUERY_THEN_FETCH) + .setSize(0) + .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")), + r3 -> assertThat(r3.getHits().getTotalHits().value, equalTo(5L)) + ); assertRequestCacheStats(1, 2); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSearchTests.java b/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSearchTests.java index 2c4c620c057b2..143aab4e58c78 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSearchTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSearchTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.settings.Settings; @@ -42,7 +41,9 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -204,30 +205,34 @@ public void testCardinalityAggregation() throws IOException { assertNoFailures(bulkResponse); // Test the root flattened field. - SearchResponse response = client().prepareSearch("test") - .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("flattened")) - .get(); - - assertNoFailures(response); - Cardinality count = response.getAggregations().get("cardinality"); - assertCardinality(count, numDocs, precisionThreshold); + assertNoFailuresAndResponse( + client().prepareSearch("test") + .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("flattened")), + response -> { + Cardinality count = response.getAggregations().get("cardinality"); + assertCardinality(count, numDocs, precisionThreshold); + } + ); // Test two keyed flattened fields. - SearchResponse firstResponse = client().prepareSearch("test") - .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("flattened.first")) - .get(); - assertNoFailures(firstResponse); - - Cardinality firstCount = firstResponse.getAggregations().get("cardinality"); - assertCardinality(firstCount, numDocs, precisionThreshold); - - SearchResponse secondResponse = client().prepareSearch("test") - .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("flattened.second")) - .get(); - assertNoFailures(secondResponse); + assertNoFailuresAndResponse( + client().prepareSearch("test") + .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("flattened.first")), + firstResponse -> { + + Cardinality firstCount = firstResponse.getAggregations().get("cardinality"); + assertCardinality(firstCount, numDocs, precisionThreshold); + } + ); - Cardinality secondCount = secondResponse.getAggregations().get("cardinality"); - assertCardinality(secondCount, (numDocs + 1) / 2, precisionThreshold); + assertNoFailuresAndResponse( + client().prepareSearch("test") + .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("flattened.second")), + secondResponse -> { + Cardinality secondCount = secondResponse.getAggregations().get("cardinality"); + assertCardinality(secondCount, (numDocs + 1) / 2, precisionThreshold); + } + ); } private void assertCardinality(Cardinality count, long value, int precisionThreshold) { @@ -262,60 +267,56 @@ public void testTermsAggregation() throws IOException { // Aggregate on the root 'labels' field. TermsAggregationBuilder builder = createTermsAgg("labels"); - SearchResponse response = client().prepareSearch("test").addAggregation(builder).get(); - assertNoFailures(response); - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(6)); + assertNoFailuresAndResponse(client().prepareSearch("test").addAggregation(builder), response -> { + Terms terms = response.getAggregations().get("terms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("terms")); + assertThat(terms.getBuckets().size(), equalTo(6)); - Terms.Bucket bucket1 = terms.getBuckets().get(0); - assertEquals("urgent", bucket1.getKey()); - assertEquals(5, bucket1.getDocCount()); + Terms.Bucket bucket1 = terms.getBuckets().get(0); + assertEquals("urgent", bucket1.getKey()); + assertEquals(5, bucket1.getDocCount()); - Terms.Bucket bucket2 = terms.getBuckets().get(1); - assertThat(bucket2.getKeyAsString(), startsWith("v1.2.")); - assertEquals(1, bucket2.getDocCount()); + Terms.Bucket bucket2 = terms.getBuckets().get(1); + assertThat(bucket2.getKeyAsString(), startsWith("v1.2.")); + assertEquals(1, bucket2.getDocCount()); + }); // Aggregate on the 'priority' subfield. TermsAggregationBuilder priorityAgg = createTermsAgg("labels.priority"); - SearchResponse priorityResponse = client().prepareSearch("test").addAggregation(priorityAgg).get(); - assertNoFailures(priorityResponse); + assertNoFailuresAndResponse(client().prepareSearch("test").addAggregation(priorityAgg), priorityResponse -> { + Terms priorityTerms = priorityResponse.getAggregations().get("terms"); + assertThat(priorityTerms, notNullValue()); + assertThat(priorityTerms.getName(), equalTo("terms")); + assertThat(priorityTerms.getBuckets().size(), equalTo(1)); - Terms priorityTerms = priorityResponse.getAggregations().get("terms"); - assertThat(priorityTerms, notNullValue()); - assertThat(priorityTerms.getName(), equalTo("terms")); - assertThat(priorityTerms.getBuckets().size(), equalTo(1)); - - Terms.Bucket priorityBucket = priorityTerms.getBuckets().get(0); - assertEquals("urgent", priorityBucket.getKey()); - assertEquals(5, priorityBucket.getDocCount()); + Terms.Bucket priorityBucket = priorityTerms.getBuckets().get(0); + assertEquals("urgent", priorityBucket.getKey()); + assertEquals(5, priorityBucket.getDocCount()); + }); // Aggregate on the 'release' subfield. TermsAggregationBuilder releaseAgg = createTermsAgg("labels.release"); - SearchResponse releaseResponse = client().prepareSearch("test").addAggregation(releaseAgg).get(); - assertNoFailures(releaseResponse); - - Terms releaseTerms = releaseResponse.getAggregations().get("terms"); - assertThat(releaseTerms, notNullValue()); - assertThat(releaseTerms.getName(), equalTo("terms")); - assertThat(releaseTerms.getBuckets().size(), equalTo(5)); - - for (Terms.Bucket bucket : releaseTerms.getBuckets()) { - assertThat(bucket.getKeyAsString(), startsWith("v1.2.")); - assertEquals(1, bucket.getDocCount()); - } + assertNoFailuresAndResponse(client().prepareSearch("test").addAggregation(releaseAgg), releaseResponse -> { + Terms releaseTerms = releaseResponse.getAggregations().get("terms"); + assertThat(releaseTerms, notNullValue()); + assertThat(releaseTerms.getName(), equalTo("terms")); + assertThat(releaseTerms.getBuckets().size(), equalTo(5)); + + for (Terms.Bucket bucket : releaseTerms.getBuckets()) { + assertThat(bucket.getKeyAsString(), startsWith("v1.2.")); + assertEquals(1, bucket.getDocCount()); + } + }); // Aggregate on the 'priority' subfield with a min_doc_count of 0. TermsAggregationBuilder minDocCountAgg = createTermsAgg("labels.priority").minDocCount(0); - SearchResponse minDocCountResponse = client().prepareSearch("test").addAggregation(minDocCountAgg).get(); - assertNoFailures(minDocCountResponse); - - Terms minDocCountTerms = minDocCountResponse.getAggregations().get("terms"); - assertThat(minDocCountTerms, notNullValue()); - assertThat(minDocCountTerms.getName(), equalTo("terms")); - assertThat(minDocCountTerms.getBuckets().size(), equalTo(1)); + assertNoFailuresAndResponse(client().prepareSearch("test").addAggregation(minDocCountAgg), minDocCountResponse -> { + Terms minDocCountTerms = minDocCountResponse.getAggregations().get("terms"); + assertThat(minDocCountTerms, notNullValue()); + assertThat(minDocCountTerms.getName(), equalTo("terms")); + assertThat(minDocCountTerms.getBuckets().size(), equalTo(1)); + }); } private TermsAggregationBuilder createTermsAgg(String field) { @@ -339,19 +340,22 @@ public void testLoadDocValuesFields() throws Exception { ) .get(); - SearchResponse response = client().prepareSearch("test").addDocValueField("flattened").addDocValueField("flattened.key").get(); - assertNoFailures(response); - assertHitCount(response, 1); + assertNoFailuresAndResponse( + client().prepareSearch("test").addDocValueField("flattened").addDocValueField("flattened.key"), + response -> { + assertHitCount(response, 1); - Map fields = response.getHits().getAt(0).getFields(); + Map fields = response.getHits().getAt(0).getFields(); - DocumentField field = fields.get("flattened"); - assertEquals("flattened", field.getName()); - assertEquals(Arrays.asList("other_value", "value"), field.getValues()); + DocumentField field = fields.get("flattened"); + assertEquals("flattened", field.getName()); + assertEquals(Arrays.asList("other_value", "value"), field.getValues()); - DocumentField keyedField = fields.get("flattened.key"); - assertEquals("flattened.key", keyedField.getName()); - assertEquals("value", keyedField.getValue()); + DocumentField keyedField = fields.get("flattened.key"); + assertEquals("flattened.key", keyedField.getName()); + assertEquals("value", keyedField.getValue()); + } + ); } public void testFieldSort() throws Exception { @@ -386,20 +390,22 @@ public void testFieldSort() throws Exception { .setSource(XContentFactory.jsonBuilder().startObject().startObject("flattened").field("other_key", "E").endObject().endObject()) .get(); - SearchResponse response = client().prepareSearch("test").addSort("flattened", SortOrder.DESC).get(); - assertNoFailures(response); - assertHitCount(response, 3); - assertOrderedSearchHits(response, "3", "1", "2"); - - response = client().prepareSearch("test").addSort("flattened.key", SortOrder.DESC).get(); - assertNoFailures(response); - assertHitCount(response, 3); - assertOrderedSearchHits(response, "2", "1", "3"); - - response = client().prepareSearch("test").addSort(new FieldSortBuilder("flattened.key").order(SortOrder.DESC).missing("Z")).get(); - assertNoFailures(response); - assertHitCount(response, 3); - assertOrderedSearchHits(response, "3", "2", "1"); + assertNoFailuresAndResponse(client().prepareSearch("test").addSort("flattened", SortOrder.DESC), response -> { + assertHitCount(response, 3); + assertOrderedSearchHits(response, "3", "1", "2"); + }); + assertNoFailuresAndResponse(client().prepareSearch("test").addSort("flattened.key", SortOrder.DESC), response -> { + assertHitCount(response, 3); + assertOrderedSearchHits(response, "2", "1", "3"); + }); + + assertNoFailuresAndResponse( + client().prepareSearch("test").addSort(new FieldSortBuilder("flattened.key").order(SortOrder.DESC).missing("Z")), + response -> { + assertHitCount(response, 3); + assertOrderedSearchHits(response, "3", "2", "1"); + } + ); } public void testSourceFiltering() { @@ -410,23 +416,32 @@ public void testSourceFiltering() { prepareIndex("test").setId("1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).setSource(source).get(); - SearchResponse response = client().prepareSearch("test").setFetchSource(true).get(); - assertThat(response.getHits().getAt(0).getSourceAsMap(), equalTo(source)); + assertResponse( + client().prepareSearch("test").setFetchSource(true), + response -> assertThat(response.getHits().getAt(0).getSourceAsMap(), equalTo(source)) + ); // Check 'include' filtering. - response = client().prepareSearch("test").setFetchSource("headers", null).get(); - assertThat(response.getHits().getAt(0).getSourceAsMap(), equalTo(source)); - - response = client().prepareSearch("test").setFetchSource("headers.content-type", null).get(); - Map filteredSource = Collections.singletonMap( - "headers", - Collections.singletonMap("content-type", "application/json") + assertResponse( + client().prepareSearch("test").setFetchSource("headers", null), + response -> assertThat(response.getHits().getAt(0).getSourceAsMap(), equalTo(source)) ); - assertThat(response.getHits().getAt(0).getSourceAsMap(), equalTo(filteredSource)); + + assertResponse(client().prepareSearch("test").setFetchSource("headers.content-type", null), response -> { + Map filteredSource = Collections.singletonMap( + "headers", + Collections.singletonMap("content-type", "application/json") + ); + assertThat(response.getHits().getAt(0).getSourceAsMap(), equalTo(filteredSource)); + }); // Check 'exclude' filtering. - response = client().prepareSearch("test").setFetchSource(null, "headers.content-type").get(); - filteredSource = Collections.singletonMap("headers", Collections.singletonMap("origin", "https://www.elastic.co")); - assertThat(response.getHits().getAt(0).getSourceAsMap(), equalTo(filteredSource)); + assertResponse( + client().prepareSearch("test").setFetchSource(null, "headers.content-type"), + response -> assertThat( + response.getHits().getAt(0).getSourceAsMap(), + equalTo(Collections.singletonMap("headers", Collections.singletonMap("origin", "https://www.elastic.co"))) + ) + ); } } diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index 270ab3003a1f1..aa787e6343654 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -137,6 +137,7 @@ import static org.elasticsearch.search.SearchService.SEARCH_WORKER_THREADS_ENABLED; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; @@ -241,8 +242,10 @@ protected Settings nodeSettings() { public void testClearOnClose() { createIndex("index"); prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); - SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); - assertThat(searchResponse.getScrollId(), is(notNullValue())); + assertResponse( + client().prepareSearch("index").setSize(1).setScroll("1m"), + searchResponse -> assertThat(searchResponse.getScrollId(), is(notNullValue())) + ); SearchService service = getInstanceFromNode(SearchService.class); assertEquals(1, service.getActiveContexts()); @@ -253,8 +256,10 @@ public void testClearOnClose() { public void testClearOnStop() { createIndex("index"); prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); - SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); - assertThat(searchResponse.getScrollId(), is(notNullValue())); + assertResponse( + client().prepareSearch("index").setSize(1).setScroll("1m"), + searchResponse -> assertThat(searchResponse.getScrollId(), is(notNullValue())) + ); SearchService service = getInstanceFromNode(SearchService.class); assertEquals(1, service.getActiveContexts()); @@ -265,8 +270,10 @@ public void testClearOnStop() { public void testClearIndexDelete() { createIndex("index"); prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); - SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); - assertThat(searchResponse.getScrollId(), is(notNullValue())); + assertResponse( + client().prepareSearch("index").setSize(1).setScroll("1m"), + searchResponse -> assertThat(searchResponse.getScrollId(), is(notNullValue())) + ); SearchService service = getInstanceFromNode(SearchService.class); assertEquals(1, service.getActiveContexts()); @@ -479,8 +486,10 @@ public void testSearchWhileIndexDeletedDoesNotLeakSearchContext() throws Executi public void testBeforeShardLockDuringShardCreate() { IndexService indexService = createIndex("index", Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).build()); prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); - SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); - assertThat(searchResponse.getScrollId(), is(notNullValue())); + assertResponse( + client().prepareSearch("index").setSize(1).setScroll("1m"), + searchResponse -> assertThat(searchResponse.getScrollId(), is(notNullValue())) + ); SearchService service = getInstanceFromNode(SearchService.class); assertEquals(1, service.getActiveContexts()); @@ -787,9 +796,9 @@ public void testMaxOpenScrollContexts() throws Exception { LinkedList clearScrollIds = new LinkedList<>(); for (int i = 0; i < SearchService.MAX_OPEN_SCROLL_CONTEXT.get(Settings.EMPTY); i++) { - SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); - - if (randomInt(4) == 0) clearScrollIds.addLast(searchResponse.getScrollId()); + assertResponse(client().prepareSearch("index").setSize(1).setScroll("1m"), searchResponse -> { + if (randomInt(4) == 0) clearScrollIds.addLast(searchResponse.getScrollId()); + }); } ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); @@ -797,7 +806,7 @@ public void testMaxOpenScrollContexts() throws Exception { client().clearScroll(clearScrollRequest); for (int i = 0; i < clearScrollIds.size(); i++) { - client().prepareSearch("index").setSize(1).setScroll("1m").get(); + client().prepareSearch("index").setSize(1).setScroll("1m").get().decRef(); } final ShardScrollRequestTest request = new ShardScrollRequestTest(indexShard.shardId()); @@ -1433,7 +1442,7 @@ public void testDeleteIndexWhileSearch() throws Exception { latch.countDown(); while (stopped.get() == false) { try { - client().prepareSearch("test").setRequestCache(false).get(); + client().prepareSearch("test").setRequestCache(false).get().decRef(); } catch (Exception ignored) { return; } @@ -1635,20 +1644,27 @@ public void testCancelFetchPhaseEarly() throws Exception { service.setOnCreateSearchContext(c -> searchContextCreated.set(true)); // Test fetch phase is cancelled early - String scrollId = client().search(searchRequest.allowPartialSearchResults(false).scroll(TimeValue.timeValueMinutes(10))) - .get() - .getScrollId(); + String scrollId; + var searchResponse = client().search(searchRequest.allowPartialSearchResults(false).scroll(TimeValue.timeValueMinutes(10))).get(); + try { + scrollId = searchResponse.getScrollId(); + } finally { + searchResponse.decRef(); + } - client().searchScroll(new SearchScrollRequest(scrollId)).get(); + client().searchScroll(new SearchScrollRequest(scrollId)).get().decRef(); assertThat(searchContextCreated.get(), is(true)); ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); clearScrollRequest.addScrollId(scrollId); client().clearScroll(clearScrollRequest); - scrollId = client().search(searchRequest.allowPartialSearchResults(false).scroll(TimeValue.timeValueMinutes(10))) - .get() - .getScrollId(); + searchResponse = client().search(searchRequest.allowPartialSearchResults(false).scroll(TimeValue.timeValueMinutes(10))).get(); + try { + scrollId = searchResponse.getScrollId(); + } finally { + searchResponse.decRef(); + } searchContextCreated.set(false); service.setOnCheckCancelled(t -> { SearchShardTask task = new SearchShardTask(randomLong(), "transport", "action", "", TaskId.EMPTY_TASK_ID, emptyMap()); diff --git a/server/src/test/java/org/elasticsearch/search/geo/GeoPointShapeQueryTests.java b/server/src/test/java/org/elasticsearch/search/geo/GeoPointShapeQueryTests.java index cfa0087731b60..779e0ad28433a 100644 --- a/server/src/test/java/org/elasticsearch/search/geo/GeoPointShapeQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/geo/GeoPointShapeQueryTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.geo; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoJson; import org.elasticsearch.common.settings.Settings; @@ -24,6 +23,7 @@ import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; public class GeoPointShapeQueryTests extends BasePointShapeQueryTestCase { @@ -78,8 +78,7 @@ public void testFieldAlias() throws IOException { .setRefreshPolicy(IMMEDIATE) .get(); - SearchResponse response = client().prepareSearch(defaultIndexName).setQuery(geoShapeQuery("alias", point)).get(); - assertEquals(1, response.getHits().getTotalHits().value); + assertHitCount(client().prepareSearch(defaultIndexName).setQuery(geoShapeQuery("alias", point)), 1); } private final DatelinePointShapeQueryTestCase dateline = new DatelinePointShapeQueryTestCase(); diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index 19f0d1e2e88a0..c5d5ecc1f90e8 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -766,16 +766,22 @@ public void testConcurrentSnapshotRestoreAndDeleteOther() { assertEquals(shards, restoreSnapshotResponse.getRestoreInfo().totalShards()); client().search( new SearchRequest("restored_" + index).source(new SearchSourceBuilder().size(0).trackTotalHits(true)), - searchResponseListener + searchResponseListener.delegateFailure((l, r) -> { + r.incRef(); + l.onResponse(r); + }) ); }); deterministicTaskQueue.runAllRunnableTasks(); - assertEquals( - documentsFirstSnapshot + documentsSecondSnapshot, - Objects.requireNonNull(safeResult(searchResponseListener).getHits().getTotalHits()).value - ); + var response = safeResult(searchResponseListener); + try { + assertEquals(documentsFirstSnapshot + documentsSecondSnapshot, Objects.requireNonNull(response.getHits().getTotalHits()).value); + } finally { + response.decRef(); + } + assertThat(safeResult(deleteSnapshotStepListener).isAcknowledged(), is(true)); assertThat(safeResult(restoreSnapshotResponseListener).getRestoreInfo().failedShards(), is(0)); diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java index a2499c06d6ccc..71030358e901f 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java @@ -333,7 +333,13 @@ protected void testSnapshotAndRestore(boolean recreateRepositoryBeforeRestore) t logger.info("--> add random documents to {}", index); addRandomDocuments(index, randomIntBetween(10, 1000)); } else { - int docCount = (int) prepareSearch(index).setSize(0).get().getHits().getTotalHits().value; + var resp = prepareSearch(index).setSize(0).get(); + final int docCount; + try { + docCount = (int) resp.getHits().getTotalHits().value; + } finally { + resp.decRef(); + } int deleteCount = randomIntBetween(1, docCount); logger.info("--> delete {} random documents from {}", deleteCount, index); for (int i = 0; i < deleteCount; i++) { @@ -403,7 +409,12 @@ public void testMultipleSnapshotAndRollback() throws Exception { addRandomDocuments(indexName, docCount); } // Check number of documents in this iteration - docCounts[i] = (int) prepareSearch(indexName).setSize(0).get().getHits().getTotalHits().value; + var resp = prepareSearch(indexName).setSize(0).get(); + try { + docCounts[i] = (int) resp.getHits().getTotalHits().value; + } finally { + resp.decRef(); + } logger.info("--> create snapshot {}:{} with {} documents", repoName, snapshotName + "-" + i, docCounts[i]); assertSuccessfulSnapshot( clusterAdmin().prepareCreateSnapshot(repoName, snapshotName + "-" + i).setWaitForCompletion(true).setIndices(indexName) diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java index 3f394c1384432..cef8d555b111d 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java @@ -147,7 +147,8 @@ public void testIndexPointsCircle() throws Exception { try { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, geometry).relation(ShapeRelation.INTERSECTS)) - .get(); + .get() + .decRef(); } catch (Exception e) { assertThat( e.getCause().getMessage(), diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeIntegTestCase.java index 58328671c58e8..cae57d5137acf 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeIntegTestCase.java @@ -262,7 +262,7 @@ public void testDisallowExpensiveQueries() throws InterruptedException, IOExcept // Set search.allow_expensive_queries to "null" updateClusterSettings(Settings.builder().put("search.allow_expensive_queries", (String) null)); - assertThat(builder.get().getHits().getTotalHits().value, equalTo(1L)); + assertHitCount(builder, 1); // Set search.allow_expensive_queries to "true" updateClusterSettings(Settings.builder().put("search.allow_expensive_queries", true)); From 143f4208d1bb6942a5be7054d074be970390523e Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 4 Dec 2023 19:09:48 +0100 Subject: [PATCH 131/181] Fix remaining leaked SearchResponse issues in :server:integTests (#102896) This should be the last round for this module, found these using a prototype that has `SearchResponse` ref-counted already. --- .../join/query/ChildQuerySearchIT.java | 1 - .../action/search/LookupRuntimeFieldIT.java | 161 +++++----- .../action/search/PointInTimeIT.java | 239 +++++++------- .../action/search/SearchShardsIT.java | 33 +- .../action/search/TransportSearchIT.java | 99 +++--- .../master/IndexingMasterFailoverIT.java | 4 +- .../cluster/MinimumMasterNodesIT.java | 5 +- .../allocation/FilteringAllocationIT.java | 7 +- .../ClusterDisruptionCleanSettingsIT.java | 4 +- .../index/shard/SearchIdleIT.java | 9 +- .../indices/IndicesOptionsIntegrationIT.java | 6 +- .../memory/breaker/CircuitBreakerNoopIT.java | 4 +- .../breaker/CircuitBreakerServiceIT.java | 2 +- .../RandomExceptionCircuitBreakerIT.java | 2 +- .../indices/recovery/IndexRecoveryIT.java | 2 +- .../state/CloseWhileRelocatingShardsIT.java | 31 +- .../indices/stats/IndexStatsIT.java | 60 +--- .../elasticsearch/recovery/RelocationIT.java | 56 ++-- .../elasticsearch/routing/AliasRoutingIT.java | 199 +++--------- .../routing/SimpleRoutingIT.java | 104 ++----- .../search/SearchCancellationIT.java | 20 +- .../basic/TransportTwoNodesSearchIT.java | 89 +++--- .../search/ccs/CrossClusterSearchIT.java | 10 +- .../highlight/HighlighterSearchIT.java | 3 +- .../search/functionscore/QueryRescorerIT.java | 2 +- .../search/query/MultiMatchQueryIT.java | 9 +- .../search/routing/SearchPreferenceIT.java | 14 +- .../routing/SearchReplicaSelectionIT.java | 2 +- .../search/scroll/DuelScrollIT.java | 172 +++++----- .../search/scroll/SearchScrollIT.java | 294 ++++++++++-------- .../SearchScrollWithFailingNodesIT.java | 47 +-- .../search/searchafter/SearchAfterIT.java | 94 +++--- .../search/slice/SearchSliceIT.java | 101 +++--- .../search/source/MetadataFetchingIT.java | 4 +- .../search/stats/FieldUsageStatsIT.java | 6 +- .../suggest/CompletionSuggestSearchIT.java | 8 +- .../snapshots/ConcurrentSnapshotsIT.java | 3 +- .../AbstractSnapshotIntegTestCase.java | 9 +- .../elasticsearch/test/ESIntegTestCase.java | 21 +- 39 files changed, 950 insertions(+), 986 deletions(-) diff --git a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java index e433ce0b60596..ae1adf4160c2a 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java @@ -305,7 +305,6 @@ public void testHasParentFilter() throws Exception { constantScoreQuery(hasParentQuery("parent", termQuery("p_field", parentToChildrenEntry.getKey()), false)) ).setSize(numChildDocsPerParent), response -> { - assertNoFailures(response); Set childIds = parentToChildrenEntry.getValue(); assertThat(response.getHits().getTotalHits().value, equalTo((long) childIds.size())); for (int i = 0; i < response.getHits().getTotalHits().value; i++) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/LookupRuntimeFieldIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/LookupRuntimeFieldIT.java index f2e0511ffb7ab..7eaed125156e0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/LookupRuntimeFieldIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/LookupRuntimeFieldIT.java @@ -24,6 +24,8 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.equalTo; public class LookupRuntimeFieldIT extends ESIntegTestCase { @@ -132,90 +134,92 @@ public void populateIndex() throws Exception { } public void testBasic() { - SearchResponse searchResponse = prepareSearch("books").addFetchField("author") - .addFetchField("title") - .addSort("published_date", SortOrder.DESC) - .setSize(3) - .get(); - ElasticsearchAssertions.assertNoFailures(searchResponse); - ElasticsearchAssertions.assertHitCount(searchResponse, 5); + assertNoFailuresAndResponse( + prepareSearch("books").addFetchField("author").addFetchField("title").addSort("published_date", SortOrder.DESC).setSize(3), + searchResponse -> { + ElasticsearchAssertions.assertHitCount(searchResponse, 5); - SearchHit hit0 = searchResponse.getHits().getHits()[0]; - assertThat(hit0.field("title").getValues(), equalTo(List.of("the fifth book"))); - assertThat( - hit0.field("author").getValues(), - equalTo(List.of(Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")))) - ); + SearchHit hit0 = searchResponse.getHits().getHits()[0]; + assertThat(hit0.field("title").getValues(), equalTo(List.of("the fifth book"))); + assertThat( + hit0.field("author").getValues(), + equalTo(List.of(Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")))) + ); - SearchHit hit1 = searchResponse.getHits().getHits()[1]; - assertThat(hit1.field("title").getValues(), equalTo(List.of("the forth book"))); - assertThat( - hit1.field("author").getValues(), - equalTo( - List.of( - Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")), - Map.of("first_name", List.of("Jack"), "last_name", List.of("Austin")) - ) - ) - ); + SearchHit hit1 = searchResponse.getHits().getHits()[1]; + assertThat(hit1.field("title").getValues(), equalTo(List.of("the forth book"))); + assertThat( + hit1.field("author").getValues(), + equalTo( + List.of( + Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")), + Map.of("first_name", List.of("Jack"), "last_name", List.of("Austin")) + ) + ) + ); - SearchHit hit2 = searchResponse.getHits().getHits()[2]; - assertThat(hit2.field("title").getValues(), equalTo(List.of("the third book"))); - assertThat( - hit2.field("author").getValues(), - equalTo(List.of(Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")))) + SearchHit hit2 = searchResponse.getHits().getHits()[2]; + assertThat(hit2.field("title").getValues(), equalTo(List.of("the third book"))); + assertThat( + hit2.field("author").getValues(), + equalTo(List.of(Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")))) + ); + } ); } public void testLookupMultipleIndices() throws IOException { - SearchResponse searchResponse = prepareSearch("books").setRuntimeMappings(parseMapping(""" - { - "publisher": { - "type": "lookup", - "target_index": "publishers", - "input_field": "publisher_id", - "target_field": "_id", - "fetch_fields": ["name", "city"] + assertResponse( + prepareSearch("books").setRuntimeMappings(parseMapping(""" + { + "publisher": { + "type": "lookup", + "target_index": "publishers", + "input_field": "publisher_id", + "target_field": "_id", + "fetch_fields": ["name", "city"] + } } - } - """)) - .setFetchSource(false) - .addFetchField("title") - .addFetchField("author") - .addFetchField("publisher") - .addSort("published_date", SortOrder.DESC) - .setSize(2) - .get(); - SearchHit hit0 = searchResponse.getHits().getHits()[0]; - assertThat(hit0.field("title").getValues(), equalTo(List.of("the fifth book"))); - assertThat( - hit0.field("author").getValues(), - equalTo(List.of(Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")))) - ); - assertThat( - hit0.field("publisher").getValues(), - equalTo(List.of(Map.of("name", List.of("The second publisher"), "city", List.of("Toronto")))) - ); + """)) + .setFetchSource(false) + .addFetchField("title") + .addFetchField("author") + .addFetchField("publisher") + .addSort("published_date", SortOrder.DESC) + .setSize(2), + searchResponse -> { + SearchHit hit0 = searchResponse.getHits().getHits()[0]; + assertThat(hit0.field("title").getValues(), equalTo(List.of("the fifth book"))); + assertThat( + hit0.field("author").getValues(), + equalTo(List.of(Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")))) + ); + assertThat( + hit0.field("publisher").getValues(), + equalTo(List.of(Map.of("name", List.of("The second publisher"), "city", List.of("Toronto")))) + ); - SearchHit hit1 = searchResponse.getHits().getHits()[1]; - assertThat(hit1.field("title").getValues(), equalTo(List.of("the forth book"))); - assertThat( - hit1.field("author").getValues(), - equalTo( - List.of( - Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")), - Map.of("first_name", List.of("Jack"), "last_name", List.of("Austin")) - ) - ) - ); - assertThat( - hit1.field("publisher").getValues(), - equalTo(List.of(Map.of("name", List.of("The first publisher"), "city", List.of("Montreal", "Vancouver")))) + SearchHit hit1 = searchResponse.getHits().getHits()[1]; + assertThat(hit1.field("title").getValues(), equalTo(List.of("the forth book"))); + assertThat( + hit1.field("author").getValues(), + equalTo( + List.of( + Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")), + Map.of("first_name", List.of("Jack"), "last_name", List.of("Austin")) + ) + ) + ); + assertThat( + hit1.field("publisher").getValues(), + equalTo(List.of(Map.of("name", List.of("The first publisher"), "city", List.of("Montreal", "Vancouver")))) + ); + } ); } public void testFetchField() throws Exception { - SearchResponse searchResponse = prepareSearch("books").setRuntimeMappings(parseMapping(""" + assertNoFailuresAndResponse(prepareSearch("books").setRuntimeMappings(parseMapping(""" { "author": { "type": "lookup", @@ -225,12 +229,15 @@ public void testFetchField() throws Exception { "fetch_fields": ["first_name", {"field": "joined", "format": "MM/yyyy"}] } } - """)).addFetchField("author").addFetchField("title").addSort("published_date", SortOrder.ASC).setSize(1).get(); - ElasticsearchAssertions.assertNoFailures(searchResponse); - SearchHit hit0 = searchResponse.getHits().getHits()[0]; - // "author", "john", "first_name", "John", "last_name", "New York", "joined", "2020-03-01" - assertThat(hit0.field("title").getValues(), equalTo(List.of("the first book"))); - assertThat(hit0.field("author").getValues(), equalTo(List.of(Map.of("first_name", List.of("John"), "joined", List.of("03/2020"))))); + """)).addFetchField("author").addFetchField("title").addSort("published_date", SortOrder.ASC).setSize(1), searchResponse -> { + SearchHit hit0 = searchResponse.getHits().getHits()[0]; + // "author", "john", "first_name", "John", "last_name", "New York", "joined", "2020-03-01" + assertThat(hit0.field("title").getValues(), equalTo(List.of("the first book"))); + assertThat( + hit0.field("author").getValues(), + equalTo(List.of(Map.of("first_name", List.of("John"), "joined", List.of("03/2020")))) + ); + }); } private Map parseMapping(String mapping) throws IOException { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java index d3e312e173c29..21bbd32e6bf26 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java @@ -49,7 +49,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.empty; @@ -83,9 +83,10 @@ public void testBasic() { } refresh("test"); String pitId = openPointInTime(new String[] { "test" }, TimeValue.timeValueMinutes(2)); - SearchResponse resp1 = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); - assertThat(resp1.pointInTimeId(), equalTo(pitId)); - assertHitCount(resp1, numDocs); + assertResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp1 -> { + assertThat(resp1.pointInTimeId(), equalTo(pitId)); + assertHitCount(resp1, numDocs); + }); int deletedDocs = 0; for (int i = 0; i < numDocs; i++) { if (randomBoolean()) { @@ -96,18 +97,20 @@ public void testBasic() { } refresh("test"); if (randomBoolean()) { - SearchResponse resp2 = prepareSearch("test").setPreference(null).setQuery(new MatchAllQueryBuilder()).get(); - assertNoFailures(resp2); - assertHitCount(resp2, numDocs - deletedDocs); + final int delDocCount = deletedDocs; + assertNoFailuresAndResponse( + prepareSearch("test").setPreference(null).setQuery(new MatchAllQueryBuilder()), + resp2 -> assertHitCount(resp2, numDocs - delDocCount) + ); } try { - SearchResponse resp3 = prepareSearch().setPreference(null) - .setQuery(new MatchAllQueryBuilder()) - .setPointInTime(new PointInTimeBuilder(pitId)) - .get(); - assertNoFailures(resp3); - assertHitCount(resp3, numDocs); - assertThat(resp3.pointInTimeId(), equalTo(pitId)); + assertNoFailuresAndResponse( + prepareSearch().setPreference(null).setQuery(new MatchAllQueryBuilder()).setPointInTime(new PointInTimeBuilder(pitId)), + resp3 -> { + assertHitCount(resp3, numDocs); + assertThat(resp3.pointInTimeId(), equalTo(pitId)); + } + ); } finally { closePointInTime(pitId); } @@ -127,27 +130,24 @@ public void testMultipleIndices() { refresh(); String pitId = openPointInTime(new String[] { "*" }, TimeValue.timeValueMinutes(2)); try { - SearchResponse resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); - assertNoFailures(resp); - assertHitCount(resp, numDocs); - assertNotNull(resp.pointInTimeId()); - assertThat(resp.pointInTimeId(), equalTo(pitId)); int moreDocs = randomIntBetween(10, 50); - for (int i = 0; i < moreDocs; i++) { - String id = "more-" + i; - String index = "index-" + randomIntBetween(1, numIndices); - prepareIndex(index).setId(id).setSource("value", i).get(); - } - refresh(); - resp = prepareSearch().get(); - assertNoFailures(resp); - assertHitCount(resp, numDocs + moreDocs); - - resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); - assertNoFailures(resp); - assertHitCount(resp, numDocs); - assertNotNull(resp.pointInTimeId()); - assertThat(resp.pointInTimeId(), equalTo(pitId)); + assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertHitCount(resp, numDocs); + assertNotNull(resp.pointInTimeId()); + assertThat(resp.pointInTimeId(), equalTo(pitId)); + for (int i = 0; i < moreDocs; i++) { + String id = "more-" + i; + String index = "index-" + randomIntBetween(1, numIndices); + prepareIndex(index).setId(id).setSource("value", i).get(); + } + refresh(); + }); + assertNoFailuresAndResponse(prepareSearch(), resp -> assertHitCount(resp, numDocs + moreDocs)); + assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertHitCount(resp, numDocs); + assertNotNull(resp.pointInTimeId()); + assertThat(resp.pointInTimeId(), equalTo(pitId)); + }); } finally { closePointInTime(pitId); } @@ -187,8 +187,7 @@ public void testIndexFilter() { String[] actualIndices = searchContextId.getActualIndices(); assertEquals(1, actualIndices.length); assertEquals("index-3", actualIndices[0]); - assertResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)).setSize(50), resp -> { - assertNoFailures(resp); + assertNoFailuresAndResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)).setSize(50), resp -> { assertHitCount(resp, numDocs); assertNotNull(resp.pointInTimeId()); assertThat(resp.pointInTimeId(), equalTo(pitId)); @@ -213,10 +212,10 @@ public void testRelocation() throws Exception { refresh(); String pitId = openPointInTime(new String[] { "test" }, TimeValue.timeValueMinutes(2)); try { - SearchResponse resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); - assertNoFailures(resp); - assertHitCount(resp, numDocs); - assertThat(resp.pointInTimeId(), equalTo(pitId)); + assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertHitCount(resp, numDocs); + assertThat(resp.pointInTimeId(), equalTo(pitId)); + }); final Set dataNodes = clusterService().state() .nodes() .getDataNodes() @@ -233,10 +232,10 @@ public void testRelocation() throws Exception { } refresh(); } - resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); - assertNoFailures(resp); - assertHitCount(resp, numDocs); - assertThat(resp.pointInTimeId(), equalTo(pitId)); + assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertHitCount(resp, numDocs); + assertThat(resp.pointInTimeId(), equalTo(pitId)); + }); assertBusy(() -> { final Set assignedNodes = clusterService().state() .routingTable() @@ -246,10 +245,10 @@ public void testRelocation() throws Exception { .collect(Collectors.toSet()); assertThat(assignedNodes, everyItem(not(in(excludedNodes)))); }, 30, TimeUnit.SECONDS); - resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); - assertNoFailures(resp); - assertHitCount(resp, numDocs); - assertThat(resp.pointInTimeId(), equalTo(pitId)); + assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertHitCount(resp, numDocs); + assertThat(resp.pointInTimeId(), equalTo(pitId)); + }); } finally { closePointInTime(pitId); } @@ -264,17 +263,21 @@ public void testPointInTimeNotFound() throws Exception { } refresh(); String pit = openPointInTime(new String[] { "index" }, TimeValue.timeValueSeconds(5)); - SearchResponse resp1 = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)).get(); - assertNoFailures(resp1); - assertHitCount(resp1, index1); - if (rarely()) { - assertBusy(() -> { - final CommonStats stats = indicesAdmin().prepareStats().setSearch(true).get().getTotal(); - assertThat(stats.search.getOpenContexts(), equalTo(0L)); - }, 60, TimeUnit.SECONDS); - } else { - closePointInTime(resp1.pointInTimeId()); - } + assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)), resp1 -> { + assertHitCount(resp1, index1); + if (rarely()) { + try { + assertBusy(() -> { + final CommonStats stats = indicesAdmin().prepareStats().setSearch(true).get().getTotal(); + assertThat(stats.search.getOpenContexts(), equalTo(0L)); + }, 60, TimeUnit.SECONDS); + } catch (Exception e) { + throw new AssertionError(e); + } + } else { + closePointInTime(resp1.pointInTimeId()); + } + }); SearchPhaseExecutionException e = expectThrows( SearchPhaseExecutionException.class, () -> prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)).get() @@ -302,20 +305,23 @@ public void testIndexNotFound() { refresh(); String pit = openPointInTime(new String[] { "index-*" }, TimeValue.timeValueMinutes(2)); try { - SearchResponse resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)).get(); - assertNoFailures(resp); - assertHitCount(resp, index1 + index2); + assertNoFailuresAndResponse( + prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)), + resp -> assertHitCount(resp, index1 + index2) + ); indicesAdmin().prepareDelete("index-1").get(); if (randomBoolean()) { - resp = prepareSearch("index-*").get(); - assertNoFailures(resp); - assertHitCount(resp, index2); + assertNoFailuresAndResponse(prepareSearch("index-*"), resp -> assertHitCount(resp, index2)); } // Allow partial search result - resp = prepareSearch().setPreference(null).setAllowPartialSearchResults(true).setPointInTime(new PointInTimeBuilder(pit)).get(); - assertFailures(resp); - assertHitCount(resp, index2); + assertResponse( + prepareSearch().setPreference(null).setAllowPartialSearchResults(true).setPointInTime(new PointInTimeBuilder(pit)), + resp -> { + assertFailures(resp); + assertHitCount(resp, index2); + } + ); // Do not allow partial search result expectThrows( @@ -356,14 +362,15 @@ public void testCanMatch() throws Exception { } } prepareIndex("test").setId("1").setSource("created_date", "2020-01-01").get(); - SearchResponse resp = prepareSearch().setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) - .setSearchType(SearchType.QUERY_THEN_FETCH) - .setPreference(null) - .setPreFilterShardSize(randomIntBetween(2, 3)) - .setMaxConcurrentShardRequests(randomIntBetween(1, 2)) - .setPointInTime(new PointInTimeBuilder(pitId)) - .get(); - assertThat(resp.getHits().getHits(), arrayWithSize(0)); + assertResponse( + prepareSearch().setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) + .setSearchType(SearchType.QUERY_THEN_FETCH) + .setPreference(null) + .setPreFilterShardSize(randomIntBetween(2, 3)) + .setMaxConcurrentShardRequests(randomIntBetween(1, 2)) + .setPointInTime(new PointInTimeBuilder(pitId)), + resp -> assertThat(resp.getHits().getHits(), arrayWithSize(0)) + ); for (String node : internalCluster().nodesInclude("test")) { for (IndexService indexService : internalCluster().getInstance(IndicesService.class, node)) { for (IndexShard indexShard : indexService) { @@ -415,19 +422,20 @@ public void testPartialResults() throws Exception { refresh(); String pitId = openPointInTime(new String[] { "test-*" }, TimeValue.timeValueMinutes(2)); try { - SearchResponse resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); - assertNoFailures(resp); - assertHitCount(resp, numDocs1 + numDocs2); - assertThat(resp.pointInTimeId(), equalTo(pitId)); + assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertHitCount(resp, numDocs1 + numDocs2); + assertThat(resp.pointInTimeId(), equalTo(pitId)); + }); internalCluster().restartNode(assignedNodeForIndex1); - resp = prepareSearch().setPreference(null) - .setAllowPartialSearchResults(true) - .setPointInTime(new PointInTimeBuilder(pitId)) - .get(); - assertFailures(resp); - assertThat(resp.pointInTimeId(), equalTo(pitId)); - assertHitCount(resp, numDocs2); + assertResponse( + prepareSearch().setPreference(null).setAllowPartialSearchResults(true).setPointInTime(new PointInTimeBuilder(pitId)), + resp -> { + assertFailures(resp); + assertThat(resp.pointInTimeId(), equalTo(pitId)); + assertHitCount(resp, numDocs2); + } + ); } finally { closePointInTime(pitId); } @@ -547,40 +555,45 @@ private void assertPagination(PointInTimeBuilder pit, int expectedNumDocs, int s reverseMuls[i] = expectedSorts.get(i).order() == SortOrder.ASC ? 1 : -1; } SearchResponse response = client().search(searchRequest).get(); - Object[] lastSortValues = null; - while (response.getHits().getHits().length > 0) { - Object[] lastHitSortValues = null; - for (SearchHit hit : response.getHits().getHits()) { - assertTrue(seen.add(hit.getIndex() + hit.getId())); - - if (lastHitSortValues != null) { + try { + Object[] lastSortValues = null; + while (response.getHits().getHits().length > 0) { + Object[] lastHitSortValues = null; + for (SearchHit hit : response.getHits().getHits()) { + assertTrue(seen.add(hit.getIndex() + hit.getId())); + + if (lastHitSortValues != null) { + for (int i = 0; i < expectedSorts.size(); i++) { + Comparable value = (Comparable) hit.getRawSortValues()[i]; + int cmp = value.compareTo(lastHitSortValues[i]) * reverseMuls[i]; + if (cmp != 0) { + assertThat(cmp, equalTo(1)); + break; + } + } + } + lastHitSortValues = hit.getRawSortValues(); + } + int len = response.getHits().getHits().length; + SearchHit last = response.getHits().getHits()[len - 1]; + if (lastSortValues != null) { for (int i = 0; i < expectedSorts.size(); i++) { - Comparable value = (Comparable) hit.getRawSortValues()[i]; - int cmp = value.compareTo(lastHitSortValues[i]) * reverseMuls[i]; + Comparable value = (Comparable) last.getSortValues()[i]; + int cmp = value.compareTo(lastSortValues[i]) * reverseMuls[i]; if (cmp != 0) { assertThat(cmp, equalTo(1)); break; } } } - lastHitSortValues = hit.getRawSortValues(); - } - int len = response.getHits().getHits().length; - SearchHit last = response.getHits().getHits()[len - 1]; - if (lastSortValues != null) { - for (int i = 0; i < expectedSorts.size(); i++) { - Comparable value = (Comparable) last.getSortValues()[i]; - int cmp = value.compareTo(lastSortValues[i]) * reverseMuls[i]; - if (cmp != 0) { - assertThat(cmp, equalTo(1)); - break; - } - } + assertThat(last.getSortValues().length, equalTo(expectedSorts.size())); + lastSortValues = last.getSortValues(); + searchRequest.source().searchAfter(last.getSortValues()); + response.decRef(); + response = client().search(searchRequest).get(); } - assertThat(last.getSortValues().length, equalTo(expectedSorts.size())); - lastSortValues = last.getSortValues(); - searchRequest.source().searchAfter(last.getSortValues()); - response = client().search(searchRequest).get(); + } finally { + response.decRef(); } assertThat(seen.size(), equalTo(expectedNumDocs)); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/SearchShardsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/SearchShardsIT.java index 8b1acf11a7a5d..7da015052fe82 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/SearchShardsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/SearchShardsIT.java @@ -24,7 +24,9 @@ import java.util.Collection; import java.util.Queue; +import java.util.concurrent.ExecutionException; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.equalTo; @@ -105,7 +107,7 @@ public void testBasic() { } } - public void testRandom() { + public void testRandom() throws ExecutionException, InterruptedException { int numIndices = randomIntBetween(1, 10); for (int i = 0; i < numIndices; i++) { String index = "index-" + i; @@ -127,21 +129,22 @@ public void testRandom() { RangeQueryBuilder rangeQuery = new RangeQueryBuilder("value").from(from).to(to).includeUpper(true).includeLower(true); SearchRequest searchRequest = new SearchRequest().indices("index-*").source(new SearchSourceBuilder().query(rangeQuery)); searchRequest.setPreFilterShardSize(1); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - var searchShardsRequest = new SearchShardsRequest( - new String[] { "index-*" }, - SearchRequest.DEFAULT_INDICES_OPTIONS, - rangeQuery, - null, - preference, - randomBoolean(), - randomBoolean() ? null : randomAlphaOfLength(10) - ); - var searchShardsResponse = client().execute(TransportSearchShardsAction.TYPE, searchShardsRequest).actionGet(); + assertResponse(client().search(searchRequest), searchResponse -> { + var searchShardsRequest = new SearchShardsRequest( + new String[] { "index-*" }, + SearchRequest.DEFAULT_INDICES_OPTIONS, + rangeQuery, + null, + preference, + randomBoolean(), + randomBoolean() ? null : randomAlphaOfLength(10) + ); + var searchShardsResponse = client().execute(TransportSearchShardsAction.TYPE, searchShardsRequest).actionGet(); - assertThat(searchShardsResponse.getGroups(), hasSize(searchResponse.getTotalShards())); - long skippedShards = searchShardsResponse.getGroups().stream().filter(SearchShardsGroup::skipped).count(); - assertThat(skippedShards, equalTo((long) searchResponse.getSkippedShards())); + assertThat(searchShardsResponse.getGroups(), hasSize(searchResponse.getTotalShards())); + long skippedShards = searchShardsResponse.getGroups().stream().filter(SearchShardsGroup::skipped).count(); + assertThat(skippedShards, equalTo((long) searchResponse.getSkippedShards())); + }); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java index 31ffe560be010..5bb21dc874747 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java @@ -74,6 +74,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -121,7 +122,7 @@ protected Collection> nodePlugins() { return Collections.singletonList(TestPlugin.class); } - public void testLocalClusterAlias() { + public void testLocalClusterAlias() throws ExecutionException, InterruptedException { long nowInMillis = randomLongBetween(0, Long.MAX_VALUE); IndexRequest indexRequest = new IndexRequest("test"); indexRequest.id("1"); @@ -140,14 +141,15 @@ public void testLocalClusterAlias() { nowInMillis, randomBoolean() ); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - assertEquals(1, searchResponse.getHits().getTotalHits().value); - SearchHit[] hits = searchResponse.getHits().getHits(); - assertEquals(1, hits.length); - SearchHit hit = hits[0]; - assertEquals("local", hit.getClusterAlias()); - assertEquals("test", hit.getIndex()); - assertEquals("1", hit.getId()); + assertResponse(client().search(searchRequest), searchResponse -> { + assertEquals(1, searchResponse.getHits().getTotalHits().value); + SearchHit[] hits = searchResponse.getHits().getHits(); + assertEquals(1, hits.length); + SearchHit hit = hits[0]; + assertEquals("local", hit.getClusterAlias()); + assertEquals("test", hit.getIndex()); + assertEquals("1", hit.getId()); + }); } { SearchRequest searchRequest = SearchRequest.subSearchRequest( @@ -158,14 +160,15 @@ public void testLocalClusterAlias() { nowInMillis, randomBoolean() ); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - assertEquals(1, searchResponse.getHits().getTotalHits().value); - SearchHit[] hits = searchResponse.getHits().getHits(); - assertEquals(1, hits.length); - SearchHit hit = hits[0]; - assertEquals("", hit.getClusterAlias()); - assertEquals("test", hit.getIndex()); - assertEquals("1", hit.getId()); + assertResponse(client().search(searchRequest), searchResponse -> { + assertEquals(1, searchResponse.getHits().getTotalHits().value); + SearchHit[] hits = searchResponse.getHits().getHits(); + assertEquals(1, hits.length); + SearchHit hit = hits[0]; + assertEquals("", hit.getClusterAlias()); + assertEquals("test", hit.getIndex()); + assertEquals("1", hit.getId()); + }); } } @@ -193,8 +196,7 @@ public void testAbsoluteStartMillis() throws ExecutionException, InterruptedExce { SearchRequest searchRequest = new SearchRequest(""); searchRequest.indicesOptions(IndicesOptions.fromOptions(true, true, true, true)); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - assertEquals(0, searchResponse.getTotalShards()); + assertResponse(client().search(searchRequest), searchResponse -> assertEquals(0, searchResponse.getTotalShards())); } { SearchRequest searchRequest = SearchRequest.subSearchRequest( @@ -217,9 +219,10 @@ public void testAbsoluteStartMillis() throws ExecutionException, InterruptedExce randomBoolean() ); searchRequest.indices(""); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - assertEquals(1, searchResponse.getHits().getTotalHits().value); - assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex()); + assertResponse(client().search(searchRequest), searchResponse -> { + assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex()); + }); } { SearchRequest searchRequest = SearchRequest.subSearchRequest( @@ -236,13 +239,14 @@ public void testAbsoluteStartMillis() throws ExecutionException, InterruptedExce rangeQuery.lt("1982-01-01"); sourceBuilder.query(rangeQuery); searchRequest.source(sourceBuilder); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - assertEquals(1, searchResponse.getHits().getTotalHits().value); - assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex()); + assertResponse(client().search(searchRequest), searchResponse -> { + assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex()); + }); } } - public void testFinalReduce() { + public void testFinalReduce() throws ExecutionException, InterruptedException { long nowInMillis = randomLongBetween(0, Long.MAX_VALUE); TaskId taskId = new TaskId("node", randomNonNegativeLong()); { @@ -274,11 +278,12 @@ public void testFinalReduce() { SearchRequest searchRequest = randomBoolean() ? originalRequest : SearchRequest.subSearchRequest(taskId, originalRequest, Strings.EMPTY_ARRAY, "remote", nowInMillis, true); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - assertEquals(2, searchResponse.getHits().getTotalHits().value); - Aggregations aggregations = searchResponse.getAggregations(); - LongTerms longTerms = aggregations.get("terms"); - assertEquals(1, longTerms.getBuckets().size()); + assertResponse(client().search(searchRequest), searchResponse -> { + assertEquals(2, searchResponse.getHits().getTotalHits().value); + Aggregations aggregations = searchResponse.getAggregations(); + LongTerms longTerms = aggregations.get("terms"); + assertEquals(1, longTerms.getBuckets().size()); + }); } { SearchRequest searchRequest = SearchRequest.subSearchRequest( @@ -289,11 +294,12 @@ public void testFinalReduce() { nowInMillis, false ); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - assertEquals(2, searchResponse.getHits().getTotalHits().value); - Aggregations aggregations = searchResponse.getAggregations(); - LongTerms longTerms = aggregations.get("terms"); - assertEquals(2, longTerms.getBuckets().size()); + assertResponse(client().search(searchRequest), searchResponse -> { + assertEquals(2, searchResponse.getHits().getTotalHits().value); + Aggregations aggregations = searchResponse.getAggregations(); + LongTerms longTerms = aggregations.get("terms"); + assertEquals(2, longTerms.getBuckets().size()); + }); } } @@ -309,7 +315,7 @@ public void testWaitForRefreshIndexValidation() throws Exception { Arrays.fill(validCheckpoints, SequenceNumbers.UNASSIGNED_SEQ_NO); // no exception - prepareSearch("testAlias").setWaitForCheckpoints(Collections.singletonMap("testAlias", validCheckpoints)).get(); + prepareSearch("testAlias").setWaitForCheckpoints(Collections.singletonMap("testAlias", validCheckpoints)).get().decRef(); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, @@ -373,7 +379,7 @@ public void testShardCountLimit() throws Exception { assertAcked(prepareCreate("test2").setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numPrimaries2))); // no exception - prepareSearch("test1").get(); + prepareSearch("test1").get().decRef(); updateClusterSettings(Settings.builder().put(TransportSearchAction.SHARD_COUNT_LIMIT_SETTING.getKey(), numPrimaries1 - 1)); @@ -386,7 +392,7 @@ public void testShardCountLimit() throws Exception { updateClusterSettings(Settings.builder().put(TransportSearchAction.SHARD_COUNT_LIMIT_SETTING.getKey(), numPrimaries1)); // no exception - prepareSearch("test1").get(); + prepareSearch("test1").get().decRef(); e = expectThrows(IllegalArgumentException.class, () -> prepareSearch("test1", "test2").get()); assertThat( @@ -422,12 +428,13 @@ public void testSearchIdle() throws Exception { prepareIndex("test").setId("1").setSource("created_date", "2020-01-01").get(); prepareIndex("test").setId("2").setSource("created_date", "2020-01-02").get(); prepareIndex("test").setId("3").setSource("created_date", "2020-01-03").get(); - assertBusy(() -> { - SearchResponse resp = prepareSearch("test").setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) - .setPreFilterShardSize(randomIntBetween(1, 3)) - .get(); - assertThat(resp.getHits().getTotalHits().value, equalTo(2L)); - }); + assertBusy( + () -> assertResponse( + prepareSearch("test").setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) + .setPreFilterShardSize(randomIntBetween(1, 3)), + resp -> assertThat(resp.getHits().getTotalHits().value, equalTo(2L)) + ) + ); } public void testCircuitBreakerReduceFail() throws Exception { @@ -471,7 +478,7 @@ public void onFailure(Exception e) { assertBusy(() -> { Exception exc = expectThrows( Exception.class, - () -> client.prepareSearch("test").addAggregation(new TestAggregationBuilder("test")).get() + () -> client.prepareSearch("test").addAggregation(new TestAggregationBuilder("test")).get().decRef() ); assertThat(exc.getCause().getMessage(), containsString("")); }); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java index 837c55e81b471..1887e37cbbf47 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java @@ -20,7 +20,7 @@ import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CyclicBarrier; -import static org.hamcrest.Matchers.equalTo; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) public class IndexingMasterFailoverIT extends ESIntegTestCase { @@ -97,7 +97,7 @@ public void run() { ensureGreen("myindex"); refresh(); - assertThat(prepareSearch("myindex").get().getHits().getTotalHits().value, equalTo(10L)); + assertHitCount(prepareSearch("myindex"), 10); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java index 746ddc56870ae..09c14df3566af 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java @@ -105,10 +105,7 @@ public void testTwoNodesNoMasterBlock() throws Exception { logger.info("--> verify we get the data back"); for (int i = 0; i < 10; i++) { - assertThat( - prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(100L) - ); + assertHitCount(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 100); } String masterNode = internalCluster().getMasterName(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java index 04fba1f46074f..33719df372fb1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java @@ -31,6 +31,7 @@ import java.util.Set; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.equalTo; @ClusterScope(scope = Scope.TEST, numDataNodes = 0) @@ -51,7 +52,7 @@ public void testDecommissionNodeNoReplicas() { prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).get(); } indicesAdmin().prepareRefresh().get(); - assertThat(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(100L)); + assertHitCount(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 100); final boolean closed = randomBoolean(); if (closed) { @@ -79,7 +80,7 @@ public void testDecommissionNodeNoReplicas() { } indicesAdmin().prepareRefresh().get(); - assertThat(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(100L)); + assertHitCount(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 100); } public void testAutoExpandReplicasToFilteredNodes() { @@ -132,7 +133,7 @@ public void testDisablingAllocationFiltering() { prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).get(); } indicesAdmin().prepareRefresh().get(); - assertThat(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(100L)); + assertHitCount(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 100); final boolean closed = randomBoolean(); if (closed) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionCleanSettingsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionCleanSettingsIT.java index 5ea78a6b1e3a0..e8234fb09512b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionCleanSettingsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionCleanSettingsIT.java @@ -23,7 +23,7 @@ import java.util.List; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.equalTo; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) public class ClusterDisruptionCleanSettingsIT extends ESIntegTestCase { @@ -63,6 +63,6 @@ public void testSearchWithRelocationAndSlowClusterStateProcessing() throws Excep IndicesStoreIntegrationIT.relocateAndBlockCompletion(logger, "test", 0, node_1, node_2); // now search for the documents and see if we get a reply - assertThat(prepareSearch().setSize(0).get().getHits().getTotalHits().value, equalTo(100L)); + assertHitCount(prepareSearch().setSize(0), 100); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java index e1ab2bdc2369e..1a8f928d9c10f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java @@ -48,7 +48,14 @@ public class SearchIdleIT extends ESSingleNodeTestCase { public void testAutomaticRefreshSearch() throws InterruptedException { - runTestAutomaticRefresh(numDocs -> client().prepareSearch("test").get().getHits().getTotalHits().value); + runTestAutomaticRefresh(numDocs -> { + var resp = client().prepareSearch("test").get(); + try { + return resp.getHits().getTotalHits().value; + } finally { + resp.decRef(); + } + }); } public void testAutomaticRefreshGet() throws InterruptedException { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java index ce3fd98476725..658b9eadd772f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java @@ -408,7 +408,7 @@ public void testAllMissingStrict() throws Exception { expectThrows(IndexNotFoundException.class, () -> prepareSearch("test2", "test3").setQuery(matchAllQuery()).get()); // you should still be able to run empty searches without things blowing up - prepareSearch().setQuery(matchAllQuery()).get(); + prepareSearch().setQuery(matchAllQuery()).get().decRef(); } // For now don't handle closed indices @@ -681,7 +681,7 @@ private static void verify(ActionRequestBuilder requestBuilder, boolean fa }); } else { try { - requestBuilder.get(); + requestBuilder.get().decRef(); fail("IndexNotFoundException or IndexClosedException was expected"); } catch (IndexNotFoundException | IndexClosedException e) {} } @@ -694,7 +694,7 @@ private static void verify(ActionRequestBuilder requestBuilder, boolean fa assertThat(response.getResponses()[0].getResponse(), notNullValue()); }); } else { - requestBuilder.get(); + requestBuilder.get().decRef(); } } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java index f9f17d8e1ebbf..dd29823f8076f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java @@ -51,7 +51,7 @@ public void testNoopRequestBreaker() throws Exception { indexRandom(true, reqs); // A cardinality aggregation uses BigArrays and thus the REQUEST breaker - client.prepareSearch("cb-test").setQuery(matchAllQuery()).addAggregation(cardinality("card").field("test")).get(); + client.prepareSearch("cb-test").setQuery(matchAllQuery()).addAggregation(cardinality("card").field("test")).get().decRef(); // no exception because the breaker is a noop } @@ -68,7 +68,7 @@ public void testNoopFielddataBreaker() throws Exception { indexRandom(true, reqs); // Sorting using fielddata and thus the FIELDDATA breaker - client.prepareSearch("cb-test").setQuery(matchAllQuery()).addSort("test", SortOrder.DESC).get(); + client.prepareSearch("cb-test").setQuery(matchAllQuery()).addSort("test", SortOrder.DESC).get().decRef(); // no exception because the breaker is a noop } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java index e726c8a08002a..705fb879e9125 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java @@ -193,7 +193,7 @@ public void testRamAccountingTermsEnum() throws Exception { indexRandom(true, false, true, reqs); // execute a search that loads field data (sorting on the "test" field) - client.prepareSearch("ramtest").setQuery(matchAllQuery()).addSort("test", SortOrder.DESC).get(); + client.prepareSearch("ramtest").setQuery(matchAllQuery()).addSort("test", SortOrder.DESC).get().decRef(); // clear field data cache (thus setting the loaded field data back to 0) clearFieldData(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java index 6a52159c71ab9..2935efb4808a7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java @@ -159,7 +159,7 @@ public void testBreakerWithRandomExceptions() throws IOException, InterruptedExc boolean success = false; try { // Sort by the string and numeric fields, to load them into field data - searchRequestBuilder.get(); + searchRequestBuilder.get().decRef(); success = true; } catch (SearchPhaseExecutionException ex) { logger.info("expected SearchPhaseException: [{}]", ex.getMessage()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index 762bbdda77df1..2cbc3477cb49d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -933,7 +933,7 @@ private IndicesStatsResponse createAndPopulateIndex(String name, int nodeCount, indexRandom(true, docs); flush(); - assertThat(prepareSearch(name).setSize(0).get().getHits().getTotalHits().value, equalTo((long) numDocs)); + assertHitCount(prepareSearch(name).setSize(0), numDocs); return indicesAdmin().prepareStats(name).get(); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java index 77d38410d1ea9..b66a0b0f3be44 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java @@ -48,6 +48,7 @@ import static org.elasticsearch.indices.state.CloseIndexIT.assertIndexIsClosed; import static org.elasticsearch.indices.state.CloseIndexIT.assertIndexIsOpened; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; @@ -241,20 +242,22 @@ public void testCloseWhileRelocatingShards() throws Exception { ensureGreen(indices); for (String index : acknowledgedCloses) { - long docsCount = prepareSearch(index).setSize(0).setTrackTotalHits(true).get().getHits().getTotalHits().value; - assertEquals( - "Expected " - + docsPerIndex.get(index) - + " docs in index " - + index - + " but got " - + docsCount - + " (close acknowledged=" - + acknowledgedCloses.contains(index) - + ")", - (long) docsPerIndex.get(index), - docsCount - ); + assertResponse(prepareSearch(index).setSize(0).setTrackTotalHits(true), response -> { + long docsCount = response.getHits().getTotalHits().value; + assertEquals( + "Expected " + + docsPerIndex.get(index) + + " docs in index " + + index + + " but got " + + docsCount + + " (close acknowledged=" + + acknowledgedCloses.contains(index) + + ")", + (long) docsPerIndex.get(index), + docsCount + ); + }); } } finally { updateClusterSettings(Settings.builder().putNull(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey())); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java index ec62a1cbbd9bf..a98297e8b49ae 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -82,6 +82,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.emptyCollectionOf; @@ -150,8 +151,8 @@ public void testFieldDataStats() { assertThat(indicesStats.getTotal().getFieldData().getMemorySizeInBytes(), equalTo(0L)); // sort to load it to field data... - prepareSearch().addSort("field", SortOrder.ASC).get(); - prepareSearch().addSort("field", SortOrder.ASC).get(); + prepareSearch().addSort("field", SortOrder.ASC).get().decRef(); + prepareSearch().addSort("field", SortOrder.ASC).get().decRef(); nodesStats = clusterAdmin().prepareNodesStats("data:true").setIndices(true).get(); assertThat( @@ -166,8 +167,8 @@ public void testFieldDataStats() { assertThat(indicesStats.getTotal().getFieldData().getMemorySizeInBytes(), greaterThan(0L)); // sort to load it to field data... - prepareSearch().addSort("field2", SortOrder.ASC).get(); - prepareSearch().addSort("field2", SortOrder.ASC).get(); + prepareSearch().addSort("field2", SortOrder.ASC).get().decRef(); + prepareSearch().addSort("field2", SortOrder.ASC).get().decRef(); // now check the per field stats nodesStats = clusterAdmin().prepareNodesStats("data:true") @@ -264,8 +265,8 @@ public void testClearAllCaches() throws Exception { assertThat(indicesStats.getTotal().getQueryCache().getMemorySizeInBytes(), equalTo(0L)); // sort to load it to field data and filter to load filter cache - prepareSearch().setPostFilter(QueryBuilders.termQuery("field", "value1")).addSort("field", SortOrder.ASC).get(); - prepareSearch().setPostFilter(QueryBuilders.termQuery("field", "value2")).addSort("field", SortOrder.ASC).get(); + prepareSearch().setPostFilter(QueryBuilders.termQuery("field", "value1")).addSort("field", SortOrder.ASC).get().decRef(); + prepareSearch().setPostFilter(QueryBuilders.termQuery("field", "value2")).addSort("field", SortOrder.ASC).get().decRef(); nodesStats = clusterAdmin().prepareNodesStats("data:true").setIndices(true).get(); assertThat( @@ -355,10 +356,7 @@ public void testQueryCache() throws Exception { assertThat(indicesAdmin().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), equalTo(0L)); assertThat(indicesAdmin().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), equalTo(0L)); for (int i = 0; i < 10; i++) { - assertThat( - prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get().getHits().getTotalHits().value, - equalTo((long) numDocs) - ); + assertHitCount(prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), numDocs); assertThat( indicesAdmin().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), greaterThan(0L) @@ -389,10 +387,7 @@ public void testQueryCache() throws Exception { }); for (int i = 0; i < 10; i++) { - assertThat( - prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get().getHits().getTotalHits().value, - equalTo((long) numDocs) - ); + assertHitCount(prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), numDocs); assertThat( indicesAdmin().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), greaterThan(0L) @@ -407,29 +402,13 @@ public void testQueryCache() throws Exception { // test explicit request parameter - assertThat( - prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH) - .setSize(0) - .setRequestCache(false) - .get() - .getHits() - .getTotalHits().value, - equalTo((long) numDocs) - ); + assertHitCount(prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).setRequestCache(false), numDocs); assertThat( indicesAdmin().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), equalTo(0L) ); - assertThat( - prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH) - .setSize(0) - .setRequestCache(true) - .get() - .getHits() - .getTotalHits().value, - equalTo((long) numDocs) - ); + assertHitCount(prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).setRequestCache(true), numDocs); assertThat( indicesAdmin().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), greaterThan(0L) @@ -440,24 +419,13 @@ public void testQueryCache() throws Exception { indicesAdmin().prepareClearCache().setRequestCache(true).get(); // clean the cache updateIndexSettings(Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), false), "idx"); - assertThat( - prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get().getHits().getTotalHits().value, - equalTo((long) numDocs) - ); + assertHitCount(prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), numDocs); assertThat( indicesAdmin().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), equalTo(0L) ); - assertThat( - prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH) - .setSize(0) - .setRequestCache(true) - .get() - .getHits() - .getTotalHits().value, - equalTo((long) numDocs) - ); + assertHitCount(prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).setRequestCache(true), numDocs); assertThat( indicesAdmin().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), greaterThan(0L) @@ -983,7 +951,7 @@ public void testGroupsParam() throws Exception { prepareIndex("test1").setId(Integer.toString(1)).setSource("foo", "bar").get(); refresh(); - prepareSearch("_all").setStats("bar", "baz").get(); + prepareSearch("_all").setStats("bar", "baz").get().decRef(); IndicesStatsRequestBuilder builder = indicesAdmin().prepareStats(); IndicesStatsResponse stats = builder.get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java index e53bcb0480d7b..0e14d80aaa0cd 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java @@ -43,7 +43,6 @@ import org.elasticsearch.indices.recovery.RecoveryFileChunkRequest; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.SearchHits; import org.elasticsearch.test.BackgroundIndexer; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -80,6 +79,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHitsWithoutFailures; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.everyItem; @@ -133,7 +133,7 @@ public void testSimpleRelocationNoIndexing() { logger.info("--> verifying count"); indicesAdmin().prepareRefresh().get(); - assertThat(prepareSearch("test").setSize(0).get().getHits().getTotalHits().value, equalTo(20L)); + assertHitCount(prepareSearch("test").setSize(0), 20L); logger.info("--> start another node"); final String node_2 = internalCluster().startNode(); @@ -155,7 +155,7 @@ public void testSimpleRelocationNoIndexing() { logger.info("--> verifying count again..."); indicesAdmin().prepareRefresh().get(); - assertThat(prepareSearch("test").setSize(0).get().getHits().getTotalHits().value, equalTo(20L)); + assertHitCount(prepareSearch("test").setSize(0), 20); } public void testRelocationWhileIndexingRandom() throws Exception { @@ -229,35 +229,31 @@ public void testRelocationWhileIndexingRandom() throws Exception { logger.info("--> refreshing the index"); indicesAdmin().prepareRefresh("test").get(); logger.info("--> searching the index"); - boolean ranOnce = false; for (int i = 0; i < 10; i++) { + final int idx = i; logger.info("--> START search test round {}", i + 1); - SearchHits hits = prepareSearch("test").setQuery(matchAllQuery()) - .setSize((int) indexer.totalIndexedDocs()) - .storedFields() - .get() - .getHits(); - ranOnce = true; - if (hits.getTotalHits().value != indexer.totalIndexedDocs()) { - int[] hitIds = new int[(int) indexer.totalIndexedDocs()]; - for (int hit = 0; hit < indexer.totalIndexedDocs(); hit++) { - hitIds[hit] = hit + 1; - } - Set set = Arrays.stream(hitIds).boxed().collect(Collectors.toSet()); - for (SearchHit hit : hits.getHits()) { - int id = Integer.parseInt(hit.getId()); - if (set.remove(id) == false) { - logger.error("Extra id [{}]", id); + assertResponse( + prepareSearch("test").setQuery(matchAllQuery()).setSize((int) indexer.totalIndexedDocs()).storedFields(), + response -> { + var hits = response.getHits(); + if (hits.getTotalHits().value != indexer.totalIndexedDocs()) { + int[] hitIds = new int[(int) indexer.totalIndexedDocs()]; + for (int hit = 0; hit < indexer.totalIndexedDocs(); hit++) { + hitIds[hit] = hit + 1; + } + Set set = Arrays.stream(hitIds).boxed().collect(Collectors.toSet()); + for (SearchHit hit : hits.getHits()) { + int id = Integer.parseInt(hit.getId()); + if (set.remove(id) == false) { + logger.error("Extra id [{}]", id); + } + } + set.forEach(value -> logger.error("Missing id [{}]", value)); } + assertThat(hits.getTotalHits().value, equalTo(indexer.totalIndexedDocs())); + logger.info("--> DONE search test round {}", idx + 1); } - set.forEach(value -> logger.error("Missing id [{}]", value)); - } - assertThat(hits.getTotalHits().value, equalTo(indexer.totalIndexedDocs())); - logger.info("--> DONE search test round {}", i + 1); - - } - if (ranOnce == false) { - fail(); + ); } } } @@ -570,7 +566,7 @@ public void testRelocateWhileWaitingForRefresh() { logger.info("--> verifying count"); indicesAdmin().prepareRefresh().get(); - assertThat(prepareSearch("test").setSize(0).get().getHits().getTotalHits().value, equalTo(20L)); + assertHitCount(prepareSearch("test").setSize(0), 20); } public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() throws Exception { @@ -636,7 +632,7 @@ public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() throws E assertTrue(pendingIndexResponses.stream().allMatch(ActionFuture::isDone)); }, 1, TimeUnit.MINUTES); - assertThat(prepareSearch("test").setSize(0).get().getHits().getTotalHits().value, equalTo(120L)); + assertHitCount(prepareSearch("test").setSize(0), 120); } public void testRelocationEstablishedPeerRecoveryRetentionLeases() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java index 442a2dc99bda3..8fb56d17b93ff 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java @@ -19,6 +19,7 @@ import org.elasticsearch.xcontent.XContentFactory; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.equalTo; @@ -116,45 +117,24 @@ public void testAliasSearchRouting() throws Exception { logger.info("--> search with no routing, should fine one"); for (int i = 0; i < 5; i++) { - assertThat(prepareSearch().setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(1L)); + assertHitCount(prepareSearch().setQuery(QueryBuilders.matchAllQuery()), 1); } logger.info("--> search with wrong routing, should not find"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(0L) - ); - - assertThat( - prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(0L) - ); - - assertThat(prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(0L)); - - assertThat( - prepareSearch("alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(0L) - ); + assertHitCount(prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()), 0); + assertHitCount(prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()), 0); + assertHitCount(prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()), 0); + assertHitCount(prepareSearch("alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 0); } logger.info("--> search with correct routing, should find"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting("0").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat( - prepareSearch().setSize(0).setRouting("0").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat(prepareSearch("alias0").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(1L)); - assertThat( - prepareSearch("alias0").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); + assertHitCount(prepareSearch().setRouting("0").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch().setSize(0).setRouting("0").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch("alias0").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch("alias0").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 1); } logger.info("--> indexing with id [2], and routing [1] using alias"); @@ -162,111 +142,50 @@ public void testAliasSearchRouting() throws Exception { logger.info("--> search with no routing, should fine two"); for (int i = 0; i < 5; i++) { - assertThat(prepareSearch().setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(2L)); - assertThat( - prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch().setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } logger.info("--> search with 0 routing, should find one"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting("0").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat( - prepareSearch().setSize(0).setRouting("0").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat(prepareSearch("alias0").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(1L)); - assertThat( - prepareSearch("alias0").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); + assertHitCount(prepareSearch().setRouting("0").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch().setSize(0).setRouting("0").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch("alias0").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch("alias0").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 1); } logger.info("--> search with 1 routing, should find one"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat( - prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat(prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(1L)); - assertThat( - prepareSearch("alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); + assertHitCount(prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch("alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 1); } logger.info("--> search with 0,1 indexRoutings , should find two"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting("0", "1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); - assertThat( - prepareSearch().setSize(0) - .setRouting("0", "1") - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(2L) - ); - assertThat(prepareSearch("alias01").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(2L)); - assertThat( - prepareSearch("alias01").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch().setRouting("0", "1").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch().setSize(0).setRouting("0", "1").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch("alias01").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch("alias01").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } logger.info("--> search with two routing aliases , should find two"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch("alias0", "alias1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); - assertThat( - prepareSearch("alias0", "alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch("alias0", "alias1").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch("alias0", "alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } logger.info("--> search with alias0, alias1 and alias01, should find two"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch("alias0", "alias1", "alias01").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); - assertThat( - prepareSearch("alias0", "alias1", "alias01").setSize(0) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch("alias0", "alias1", "alias01").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch("alias0", "alias1", "alias01").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } logger.info("--> search with test, alias0 and alias1, should find two"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch("test", "alias0", "alias1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); - assertThat( - prepareSearch("test", "alias0", "alias1").setSize(0) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch("test", "alias0", "alias1").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch("test", "alias0", "alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } } @@ -316,43 +235,20 @@ public void testAliasSearchRoutingWithTwoIndices() throws Exception { logger.info("--> search with alias-a1,alias-b0, should not find"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch("alias-a1", "alias-b0").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(0L) - ); - assertThat( - prepareSearch("alias-a1", "alias-b0").setSize(0) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(0L) - ); + assertHitCount(prepareSearch("alias-a1", "alias-b0").setQuery(QueryBuilders.matchAllQuery()), 0); + assertHitCount(prepareSearch("alias-a1", "alias-b0").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 0); } logger.info("--> search with alias-ab, should find two"); for (int i = 0; i < 5; i++) { - assertThat(prepareSearch("alias-ab").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(2L)); - assertThat( - prepareSearch("alias-ab").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch("alias-ab").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch("alias-ab").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } logger.info("--> search with alias-a0,alias-b1 should find two"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch("alias-a0", "alias-b1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); - assertThat( - prepareSearch("alias-a0", "alias-b1").setSize(0) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch("alias-a0", "alias-b1").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch("alias-a0", "alias-b1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } } @@ -374,7 +270,7 @@ public void testAliasSearchRoutingWithConcreteAndAliasedIndices_issue2682() thro logger.info("--> search all on index_* should find two"); for (int i = 0; i < 5; i++) { - assertThat(prepareSearch("index_*").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(2L)); + assertHitCount(prepareSearch("index_*").setQuery(QueryBuilders.matchAllQuery()), 2); } } @@ -420,11 +316,8 @@ public void testIndexingAliasesOverTime() throws Exception { logger.info("--> verifying get and search with routing, should find"); for (int i = 0; i < 5; i++) { assertThat(client().prepareGet("test", "0").setRouting("3").get().isExists(), equalTo(true)); - assertThat(prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(1L)); - assertThat( - prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); + assertHitCount(prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 1); } logger.info("--> creating alias with routing [4]"); @@ -432,11 +325,8 @@ public void testIndexingAliasesOverTime() throws Exception { logger.info("--> verifying search with wrong routing should not find"); for (int i = 0; i < 5; i++) { - assertThat(prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(0L)); - assertThat( - prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(0L) - ); + assertHitCount(prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()), 0); + assertHitCount(prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 0); } logger.info("--> creating alias with search routing [3,4] and index routing 4"); @@ -453,11 +343,8 @@ public void testIndexingAliasesOverTime() throws Exception { for (int i = 0; i < 5; i++) { assertThat(client().prepareGet("test", "0").setRouting("3").get().isExists(), equalTo(true)); assertThat(client().prepareGet("test", "1").setRouting("4").get().isExists(), equalTo(true)); - assertThat(prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(2L)); - assertThat( - prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/routing/SimpleRoutingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/routing/SimpleRoutingIT.java index 772d8767b7dd0..f59ec4d42089e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/routing/SimpleRoutingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/routing/SimpleRoutingIT.java @@ -35,6 +35,7 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.xcontent.XContentFactory; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -134,36 +135,19 @@ public void testSimpleSearchRouting() { logger.info("--> search with no routing, should fine one"); for (int i = 0; i < 5; i++) { - assertThat(prepareSearch().setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(1L)); + assertHitCount(prepareSearch().setQuery(QueryBuilders.matchAllQuery()), 1L); } logger.info("--> search with wrong routing, should not find"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(0L) - ); - assertThat( - prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(0L) - ); + assertHitCount(prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()), 0); + assertHitCount(prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()), 0); } logger.info("--> search with correct routing, should find"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting(routingValue).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat( - prepareSearch().setSize(0) - .setRouting(routingValue) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(1L) - ); + assertHitCount(prepareSearch().setRouting(routingValue).setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch().setSize(0).setRouting(routingValue).setQuery(QueryBuilders.matchAllQuery()), 1); } String secondRoutingValue = "1"; @@ -176,86 +160,42 @@ public void testSimpleSearchRouting() { logger.info("--> search with no routing, should fine two"); for (int i = 0; i < 5; i++) { - assertThat(prepareSearch().setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(2L)); - assertThat( - prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch().setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } logger.info("--> search with {} routing, should find one", routingValue); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting(routingValue).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat( - prepareSearch().setSize(0) - .setRouting(routingValue) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(1L) - ); + assertHitCount(prepareSearch().setRouting(routingValue).setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch().setSize(0).setRouting(routingValue).setQuery(QueryBuilders.matchAllQuery()), 1); } logger.info("--> search with {} routing, should find one", secondRoutingValue); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat( - prepareSearch().setSize(0) - .setRouting(secondRoutingValue) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(1L) - ); + assertHitCount(prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch().setSize(0).setRouting(secondRoutingValue).setQuery(QueryBuilders.matchAllQuery()), 1); } logger.info("--> search with {},{} indexRoutings , should find two", routingValue, "1"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting(routingValue, secondRoutingValue) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(2L) - ); - assertThat( - prepareSearch().setSize(0) - .setRouting(routingValue, secondRoutingValue) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(2L) + assertHitCount(prepareSearch().setRouting(routingValue, secondRoutingValue).setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount( + prepareSearch().setSize(0).setRouting(routingValue, secondRoutingValue).setQuery(QueryBuilders.matchAllQuery()), + 2 ); } logger.info("--> search with {},{},{} indexRoutings , should find two", routingValue, secondRoutingValue, routingValue); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting(routingValue, secondRoutingValue, routingValue) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(2L) + assertHitCount( + prepareSearch().setRouting(routingValue, secondRoutingValue, routingValue).setQuery(QueryBuilders.matchAllQuery()), + 2 ); - assertThat( + assertHitCount( prepareSearch().setSize(0) .setRouting(routingValue, secondRoutingValue, routingValue) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(2L) + .setQuery(QueryBuilders.matchAllQuery()), + 2 ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java index 19dfe598b5318..aaf218e3579be 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java @@ -169,21 +169,25 @@ public void testCancellationOfScrollSearchesOnFollowupRequests() throws Exceptio logger.info("Executing search"); TimeValue keepAlive = TimeValue.timeValueSeconds(5); + String scrollId; SearchResponse searchResponse = prepareSearch("test").setScroll(keepAlive) .setSize(2) .setQuery(scriptQuery(new Script(ScriptType.INLINE, "mockscript", SEARCH_BLOCK_SCRIPT_NAME, Collections.emptyMap()))) .get(); + try { + assertNotNull(searchResponse.getScrollId()); - assertNotNull(searchResponse.getScrollId()); + // Enable block so the second request would block + for (ScriptedBlockPlugin plugin : plugins) { + plugin.reset(); + plugin.enableBlock(); + } - // Enable block so the second request would block - for (ScriptedBlockPlugin plugin : plugins) { - plugin.reset(); - plugin.enableBlock(); + scrollId = searchResponse.getScrollId(); + logger.info("Executing scroll with id {}", scrollId); + } finally { + searchResponse.decRef(); } - - String scrollId = searchResponse.getScrollId(); - logger.info("Executing scroll with id {}", scrollId); ActionFuture scrollResponse = client().prepareSearchScroll(searchResponse.getScrollId()) .setScroll(keepAlive) .execute(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java index e18c37aff783b..d4a4debbd61d6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java @@ -147,9 +147,11 @@ public void testDfsQueryThenFetch() throws Exception { ); } total += hits.length; + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueSeconds(30)).get(); } clearScroll(searchResponse.getScrollId()); + searchResponse.decRef(); assertEquals(100, total); } @@ -184,9 +186,11 @@ public void testDfsQueryThenFetchWithSort() throws Exception { assertThat("id[" + hit.getId() + "]", hit.getId(), equalTo(Integer.toString(total + i))); } total += hits.length; + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueSeconds(30)).get(); } clearScroll(searchResponse.getScrollId()); + searchResponse.decRef(); assertEquals(100, total); } @@ -214,9 +218,11 @@ public void testQueryThenFetch() throws Exception { assertThat("id[" + hit.getId() + "]", hit.getId(), equalTo(Integer.toString(100 - total - i - 1))); } total += hits.length; + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueSeconds(30)).get(); } clearScroll(searchResponse.getScrollId()); + searchResponse.decRef(); assertEquals(100, total); } @@ -227,26 +233,29 @@ public void testQueryThenFetchWithFrom() throws Exception { Set collectedIds = new TreeSet<>(); - SearchResponse searchResponse = client().search( - new SearchRequest("test").source(source.from(0).size(60)).searchType(QUERY_THEN_FETCH) - ).actionGet(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(60)); - for (int i = 0; i < 60; i++) { - SearchHit hit = searchResponse.getHits().getHits()[i]; - collectedIds.add(hit.getId()); - } - searchResponse = client().search(new SearchRequest("test").source(source.from(60).size(60)).searchType(QUERY_THEN_FETCH)) - .actionGet(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(40)); - for (int i = 0; i < 40; i++) { - SearchHit hit = searchResponse.getHits().getHits()[i]; - collectedIds.add(hit.getId()); - } - assertThat(collectedIds, equalTo(fullExpectedIds)); + assertNoFailuresAndResponse( + client().search(new SearchRequest("test").source(source.from(0).size(60)).searchType(QUERY_THEN_FETCH)), + searchResponse -> { + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getHits().length, equalTo(60)); + for (int i = 0; i < 60; i++) { + SearchHit hit = searchResponse.getHits().getHits()[i]; + collectedIds.add(hit.getId()); + } + } + ); + assertNoFailuresAndResponse( + client().search(new SearchRequest("test").source(source.from(60).size(60)).searchType(QUERY_THEN_FETCH)), + searchResponse -> { + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getHits().length, equalTo(40)); + for (int i = 0; i < 40; i++) { + SearchHit hit = searchResponse.getHits().getHits()[i]; + collectedIds.add(hit.getId()); + } + assertThat(collectedIds, equalTo(fullExpectedIds)); + } + ); } public void testQueryThenFetchWithSort() throws Exception { @@ -272,9 +281,11 @@ public void testQueryThenFetchWithSort() throws Exception { assertThat("id[" + hit.getId() + "]", hit.getId(), equalTo(Integer.toString(total + i))); } total += hits.length; + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueSeconds(30)).get(); } clearScroll(searchResponse.getScrollId()); + searchResponse.decRef(); assertEquals(100, total); } @@ -327,23 +338,27 @@ public void testFailedSearchWithWrongFrom() throws Exception { logger.info("Start Testing failed search with wrong from"); SearchSourceBuilder source = searchSource().query(termQuery("multi", "test")).from(1000).size(20).explain(true); - SearchResponse response = client().search(new SearchRequest("test").searchType(DFS_QUERY_THEN_FETCH).source(source)).actionGet(); - assertThat(response.getHits().getHits().length, equalTo(0)); - assertThat(response.getTotalShards(), equalTo(test.numPrimaries)); - assertThat(response.getSuccessfulShards(), equalTo(test.numPrimaries)); - assertThat(response.getFailedShards(), equalTo(0)); - - response = client().search(new SearchRequest("test").searchType(QUERY_THEN_FETCH).source(source)).actionGet(); - assertNoFailures(response); - assertThat(response.getHits().getHits().length, equalTo(0)); - - response = client().search(new SearchRequest("test").searchType(DFS_QUERY_THEN_FETCH).source(source)).actionGet(); - assertNoFailures(response); - assertThat(response.getHits().getHits().length, equalTo(0)); - - response = client().search(new SearchRequest("test").searchType(DFS_QUERY_THEN_FETCH).source(source)).actionGet(); - assertNoFailures(response); - assertThat(response.getHits().getHits().length, equalTo(0)); + assertResponse(client().search(new SearchRequest("test").searchType(DFS_QUERY_THEN_FETCH).source(source)), response -> { + assertThat(response.getHits().getHits().length, equalTo(0)); + assertThat(response.getTotalShards(), equalTo(test.numPrimaries)); + assertThat(response.getSuccessfulShards(), equalTo(test.numPrimaries)); + assertThat(response.getFailedShards(), equalTo(0)); + }); + + assertNoFailuresAndResponse( + client().search(new SearchRequest("test").searchType(QUERY_THEN_FETCH).source(source)), + response -> assertThat(response.getHits().getHits().length, equalTo(0)) + ); + + assertNoFailuresAndResponse( + client().search(new SearchRequest("test").searchType(DFS_QUERY_THEN_FETCH).source(source)), + response -> assertThat(response.getHits().getHits().length, equalTo(0)) + ); + + assertNoFailuresAndResponse( + client().search(new SearchRequest("test").searchType(DFS_QUERY_THEN_FETCH).source(source)), + response -> assertThat(response.getHits().getHits().length, equalTo(0)) + ); logger.info("Done Testing failed search"); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java index 379cdfc990207..d21619f4e6f89 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java @@ -372,7 +372,10 @@ public void testClusterDetailsAfterCCSWithFailuresOnRemoteClusterOnly() throws E boolean minimizeRoundtrips = TransportSearchAction.shouldMinimizeRoundtrips(searchRequest); - client(LOCAL_CLUSTER).search(searchRequest, queryFuture); + client(LOCAL_CLUSTER).search(searchRequest, queryFuture.delegateFailure((l, r) -> { + r.incRef(); + l.onResponse(r); + })); assertBusy(() -> assertTrue(queryFuture.isDone())); // dfs=true overrides the minimize_roundtrips=true setting and does not minimize roundtrips @@ -612,7 +615,10 @@ public void testRemoteClusterOnlyCCSWithFailuresOnAllShards() throws Exception { boolean minimizeRoundtrips = TransportSearchAction.shouldMinimizeRoundtrips(searchRequest); - client(LOCAL_CLUSTER).search(searchRequest, queryFuture); + client(LOCAL_CLUSTER).search(searchRequest, queryFuture.delegateFailure((l, r) -> { + r.incRef(); + l.onResponse(r); + })); assertBusy(() -> assertTrue(queryFuture.isDone())); if (skipUnavailable == false || minimizeRoundtrips == false) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 5c189c0c6c96a..ab72dbd4db707 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -378,8 +378,7 @@ public void testEnsureNoNegativeOffsets() throws Exception { assertNotHighlighted( prepareSearch().setQuery(matchPhraseQuery("no_long_term", "test foo highlighed").slop(3)) - .highlighter(new HighlightBuilder().field("no_long_term", 18, 1).highlighterType("fvh").postTags("").preTags("")) - .get(), + .highlighter(new HighlightBuilder().field("no_long_term", 18, 1).highlighterType("fvh").postTags("").preTags("")), 0, "no_long_term" ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index c608c253c851b..c67bdf82b5c2c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -803,7 +803,7 @@ public void testFromSize() throws Exception { request.setSize(4); request.addRescorer(new QueryRescorerBuilder(matchAllQuery()), 50); - assertEquals(4, request.get().getHits().getHits().length); + assertResponse(request, response -> assertEquals(4, response.getHits().getHits().length)); } public void testRescorePhaseWithInvalidSort() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index 2d77e170abdc5..2d6bb8176b091 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -447,8 +447,13 @@ public void testSingleField() throws NoSuchFieldException, IllegalAccessExceptio } public void testEquivalence() { - - final int numDocs = (int) prepareSearch("test").setSize(0).setQuery(matchAllQuery()).get().getHits().getTotalHits().value; + var response = prepareSearch("test").setSize(0).setQuery(matchAllQuery()).get(); + final int numDocs; + try { + numDocs = (int) response.getHits().getTotalHits().value; + } finally { + response.decRef(); + } int numIters = scaledRandomIntBetween(5, 10); for (int i = 0; i < numIters; i++) { { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java index 20b9ce38254c3..433f004acdd77 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java @@ -232,13 +232,13 @@ public void testCustomPreferenceUnaffectedByOtherShardMovements() { final String customPreference = randomAlphaOfLength(10); - final String nodeId = prepareSearch("test").setQuery(matchAllQuery()) - .setPreference(customPreference) - .get() - .getHits() - .getAt(0) - .getShard() - .getNodeId(); + final String nodeId; + var response = prepareSearch("test").setQuery(matchAllQuery()).setPreference(customPreference).get(); + try { + nodeId = response.getHits().getAt(0).getShard().getNodeId(); + } finally { + response.decRef(); + } assertSearchesSpecificNode("test", customPreference, nodeId); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java index 1362b0166a709..816fe48e5d97f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java @@ -65,7 +65,7 @@ public void testNodeSelection() { // Now after more searches, we should select a node with the lowest ARS rank. for (int i = 0; i < 5; i++) { - client.prepareSearch().setQuery(matchAllQuery()).get(); + client.prepareSearch().setQuery(matchAllQuery()).get().decRef(); } ClusterStateResponse clusterStateResponse = client.admin().cluster().prepareState().get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java index e89e51a60fa23..036467b8d0774 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java @@ -30,6 +30,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; @@ -37,56 +38,61 @@ public class DuelScrollIT extends ESIntegTestCase { public void testDuelQueryThenFetch() throws Exception { TestContext context = create(SearchType.DFS_QUERY_THEN_FETCH, SearchType.QUERY_THEN_FETCH); - SearchResponse control = prepareSearch("index").setSearchType(context.searchType) - .addSort(context.sort) - .setSize(context.numDocs) - .get(); - assertNoFailures(control); - SearchHits sh = control.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) context.numDocs)); - assertThat(sh.getHits().length, equalTo(context.numDocs)); + assertNoFailuresAndResponse( + prepareSearch("index").setSearchType(context.searchType).addSort(context.sort).setSize(context.numDocs), + control -> { + SearchHits sh = control.getHits(); + assertThat(sh.getTotalHits().value, equalTo((long) context.numDocs)); + assertThat(sh.getHits().length, equalTo(context.numDocs)); - SearchResponse searchScrollResponse = prepareSearch("index").setSearchType(context.searchType) - .addSort(context.sort) - .setSize(context.scrollRequestSize) - .setScroll("10m") - .get(); + SearchResponse searchScrollResponse = prepareSearch("index").setSearchType(context.searchType) + .addSort(context.sort) + .setSize(context.scrollRequestSize) + .setScroll("10m") + .get(); + try { - assertNoFailures(searchScrollResponse); - assertThat(searchScrollResponse.getHits().getTotalHits().value, equalTo((long) context.numDocs)); - assertThat(searchScrollResponse.getHits().getHits().length, equalTo(context.scrollRequestSize)); + assertNoFailures(searchScrollResponse); + assertThat(searchScrollResponse.getHits().getTotalHits().value, equalTo((long) context.numDocs)); + assertThat(searchScrollResponse.getHits().getHits().length, equalTo(context.scrollRequestSize)); - int counter = 0; - for (SearchHit hit : searchScrollResponse.getHits()) { - assertThat(hit.getSortValues()[0], equalTo(sh.getAt(counter++).getSortValues()[0])); - } + int counter = 0; + for (SearchHit hit : searchScrollResponse.getHits()) { + assertThat(hit.getSortValues()[0], equalTo(sh.getAt(counter++).getSortValues()[0])); + } - int iter = 1; - String scrollId = searchScrollResponse.getScrollId(); - while (true) { - searchScrollResponse = client().prepareSearchScroll(scrollId).setScroll("10m").get(); - assertNoFailures(searchScrollResponse); - assertThat(searchScrollResponse.getHits().getTotalHits().value, equalTo((long) context.numDocs)); - if (searchScrollResponse.getHits().getHits().length == 0) { - break; - } + int iter = 1; + String scrollId = searchScrollResponse.getScrollId(); + while (true) { + searchScrollResponse.decRef(); + searchScrollResponse = client().prepareSearchScroll(scrollId).setScroll("10m").get(); + assertNoFailures(searchScrollResponse); + assertThat(searchScrollResponse.getHits().getTotalHits().value, equalTo((long) context.numDocs)); + if (searchScrollResponse.getHits().getHits().length == 0) { + break; + } - int expectedLength; - int scrollSlice = ++iter * context.scrollRequestSize; - if (scrollSlice <= context.numDocs) { - expectedLength = context.scrollRequestSize; - } else { - expectedLength = context.scrollRequestSize - (scrollSlice - context.numDocs); - } - assertThat(searchScrollResponse.getHits().getHits().length, equalTo(expectedLength)); - for (SearchHit hit : searchScrollResponse.getHits()) { - assertThat(hit.getSortValues()[0], equalTo(sh.getAt(counter++).getSortValues()[0])); - } - scrollId = searchScrollResponse.getScrollId(); - } + int expectedLength; + int scrollSlice = ++iter * context.scrollRequestSize; + if (scrollSlice <= context.numDocs) { + expectedLength = context.scrollRequestSize; + } else { + expectedLength = context.scrollRequestSize - (scrollSlice - context.numDocs); + } + assertThat(searchScrollResponse.getHits().getHits().length, equalTo(expectedLength)); + for (SearchHit hit : searchScrollResponse.getHits()) { + assertThat(hit.getSortValues()[0], equalTo(sh.getAt(counter++).getSortValues()[0])); + } + scrollId = searchScrollResponse.getScrollId(); + } - assertThat(counter, equalTo(context.numDocs)); - clearScroll(scrollId); + assertThat(counter, equalTo(context.numDocs)); + clearScroll(scrollId); + } finally { + searchScrollResponse.decRef(); + } + } + ); } private TestContext create(SearchType... searchTypes) throws Exception { @@ -213,47 +219,51 @@ private int createIndex(boolean singleShard) throws Exception { private void testDuelIndexOrder(SearchType searchType, boolean trackScores, int numDocs) throws Exception { final int size = scaledRandomIntBetween(5, numDocs + 5); - final SearchResponse control = prepareSearch("test").setSearchType(searchType) - .setSize(numDocs) - .setQuery(QueryBuilders.matchQuery("foo", "true")) - .addSort(SortBuilders.fieldSort("_doc")) - .setTrackScores(trackScores) - .get(); - assertNoFailures(control); + assertNoFailuresAndResponse( + prepareSearch("test").setSearchType(searchType) + .setSize(numDocs) + .setQuery(QueryBuilders.matchQuery("foo", "true")) + .addSort(SortBuilders.fieldSort("_doc")) + .setTrackScores(trackScores), + control -> { - SearchResponse scroll = prepareSearch("test").setSearchType(searchType) - .setSize(size) - .setQuery(QueryBuilders.matchQuery("foo", "true")) - .addSort(SortBuilders.fieldSort("_doc")) - .setTrackScores(trackScores) - .setScroll("10m") - .get(); + SearchResponse scroll = prepareSearch("test").setSearchType(searchType) + .setSize(size) + .setQuery(QueryBuilders.matchQuery("foo", "true")) + .addSort(SortBuilders.fieldSort("_doc")) + .setTrackScores(trackScores) + .setScroll("10m") + .get(); - int scrollDocs = 0; - try { - while (true) { - assertNoFailures(scroll); - assertEquals(control.getHits().getTotalHits().value, scroll.getHits().getTotalHits().value); - assertEquals(control.getHits().getMaxScore(), scroll.getHits().getMaxScore(), 0.01f); - if (scroll.getHits().getHits().length == 0) { - break; + int scrollDocs = 0; + try { + while (true) { + assertNoFailures(scroll); + assertEquals(control.getHits().getTotalHits().value, scroll.getHits().getTotalHits().value); + assertEquals(control.getHits().getMaxScore(), scroll.getHits().getMaxScore(), 0.01f); + if (scroll.getHits().getHits().length == 0) { + break; + } + for (int i = 0; i < scroll.getHits().getHits().length; ++i) { + SearchHit controlHit = control.getHits().getAt(scrollDocs + i); + SearchHit scrollHit = scroll.getHits().getAt(i); + assertEquals(controlHit.getId(), scrollHit.getId()); + } + scrollDocs += scroll.getHits().getHits().length; + scroll.decRef(); + scroll = client().prepareSearchScroll(scroll.getScrollId()).setScroll("10m").get(); + } + assertEquals(control.getHits().getTotalHits().value, scrollDocs); + } catch (AssertionError e) { + logger.info("Control:\n{}", control); + logger.info("Scroll size={}, from={}:\n{}", size, scrollDocs, scroll); + throw e; + } finally { + clearScroll(scroll.getScrollId()); + scroll.decRef(); } - for (int i = 0; i < scroll.getHits().getHits().length; ++i) { - SearchHit controlHit = control.getHits().getAt(scrollDocs + i); - SearchHit scrollHit = scroll.getHits().getAt(i); - assertEquals(controlHit.getId(), scrollHit.getId()); - } - scrollDocs += scroll.getHits().getHits().length; - scroll = client().prepareSearchScroll(scroll.getScrollId()).setScroll("10m").get(); } - assertEquals(control.getHits().getTotalHits().value, scrollDocs); - } catch (AssertionError e) { - logger.info("Control:\n{}", control); - logger.info("Scroll size={}, from={}:\n{}", size, scrollDocs, scroll); - throw e; - } finally { - clearScroll(scroll.getScrollId()); - } + ); } public void testDuelIndexOrderQueryThenFetch() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java index e8b3cfdb1768a..28723a09355a9 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java @@ -47,8 +47,10 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertRequestBuilderThrows; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; @@ -92,6 +94,7 @@ public void testSimpleScrollQueryThenFetch() throws Exception { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); } + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); @@ -100,6 +103,7 @@ public void testSimpleScrollQueryThenFetch() throws Exception { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); } + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); @@ -109,6 +113,7 @@ public void testSimpleScrollQueryThenFetch() throws Exception { } } finally { clearScroll(searchResponse.getScrollId()); + searchResponse.decRef(); } } @@ -146,6 +151,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E } for (int i = 0; i < 32; i++) { + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); @@ -156,6 +162,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E } // and now, the last one is one + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); @@ -165,6 +172,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E } // a the last is zero + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); @@ -175,6 +183,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E } finally { clearScroll(searchResponse.getScrollId()); + searchResponse.decRef(); } } @@ -196,11 +205,11 @@ public void testScrollAndUpdateIndex() throws Exception { indicesAdmin().prepareRefresh().get(); - assertThat(prepareSearch().setSize(0).setQuery(matchAllQuery()).get().getHits().getTotalHits().value, equalTo(500L)); - assertThat(prepareSearch().setSize(0).setQuery(termQuery("message", "test")).get().getHits().getTotalHits().value, equalTo(500L)); - assertThat(prepareSearch().setSize(0).setQuery(termQuery("message", "test")).get().getHits().getTotalHits().value, equalTo(500L)); - assertThat(prepareSearch().setSize(0).setQuery(termQuery("message", "update")).get().getHits().getTotalHits().value, equalTo(0L)); - assertThat(prepareSearch().setSize(0).setQuery(termQuery("message", "update")).get().getHits().getTotalHits().value, equalTo(0L)); + assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), 500); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("message", "test")), 500); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("message", "test")), 500); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("message", "update")), 0); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("message", "update")), 0); SearchResponse searchResponse = prepareSearch().setQuery(queryStringQuery("user:kimchy")) .setSize(35) @@ -214,23 +223,19 @@ public void testScrollAndUpdateIndex() throws Exception { map.put("message", "update"); prepareIndex("test").setId(searchHit.getId()).setSource(map).get(); } + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); } while (searchResponse.getHits().getHits().length > 0); indicesAdmin().prepareRefresh().get(); - assertThat(prepareSearch().setSize(0).setQuery(matchAllQuery()).get().getHits().getTotalHits().value, equalTo(500L)); - assertThat(prepareSearch().setSize(0).setQuery(termQuery("message", "test")).get().getHits().getTotalHits().value, equalTo(0L)); - assertThat(prepareSearch().setSize(0).setQuery(termQuery("message", "test")).get().getHits().getTotalHits().value, equalTo(0L)); - assertThat( - prepareSearch().setSize(0).setQuery(termQuery("message", "update")).get().getHits().getTotalHits().value, - equalTo(500L) - ); - assertThat( - prepareSearch().setSize(0).setQuery(termQuery("message", "update")).get().getHits().getTotalHits().value, - equalTo(500L) - ); + assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), 500); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("message", "test")), 0); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("message", "test")), 0); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("message", "update")), 500); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("message", "update")), 500); } finally { clearScroll(searchResponse.getScrollId()); + searchResponse.decRef(); } } @@ -246,12 +251,24 @@ public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception { indicesAdmin().prepareRefresh().get(); + long counter1 = 0; + long counter2 = 0; + SearchResponse searchResponse1 = prepareSearch().setQuery(matchAllQuery()) .setSize(35) .setScroll(TimeValue.timeValueMinutes(2)) .setSearchType(SearchType.QUERY_THEN_FETCH) .addSort("field", SortOrder.ASC) .get(); + try { + assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse1.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); + } + } finally { + searchResponse1.decRef(); + } SearchResponse searchResponse2 = prepareSearch().setQuery(matchAllQuery()) .setSize(35) @@ -259,36 +276,36 @@ public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception { .setSearchType(SearchType.QUERY_THEN_FETCH) .addSort("field", SortOrder.ASC) .get(); - - long counter1 = 0; - long counter2 = 0; - - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse1.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); - } - - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse2.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); + try { + assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse2.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); + } + } finally { + searchResponse2.decRef(); } searchResponse1 = client().prepareSearchScroll(searchResponse1.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - - searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse1.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); + try { + assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse1.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); + } + } finally { + searchResponse1.decRef(); } - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse2.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); + searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); + try { + assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse2.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); + } + } finally { + searchResponse2.decRef(); } ClearScrollResponse clearResponse = client().prepareClearScroll() @@ -361,12 +378,24 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { indicesAdmin().prepareRefresh().get(); + long counter1 = 0; + long counter2 = 0; + SearchResponse searchResponse1 = prepareSearch().setQuery(matchAllQuery()) .setSize(35) .setScroll(TimeValue.timeValueMinutes(2)) .setSearchType(SearchType.QUERY_THEN_FETCH) .addSort("field", SortOrder.ASC) .get(); + try { + assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse1.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); + } + } finally { + searchResponse1.decRef(); + } SearchResponse searchResponse2 = prepareSearch().setQuery(matchAllQuery()) .setSize(35) @@ -374,36 +403,36 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { .setSearchType(SearchType.QUERY_THEN_FETCH) .addSort("field", SortOrder.ASC) .get(); - - long counter1 = 0; - long counter2 = 0; - - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse1.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); - } - - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse2.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); + try { + assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse2.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); + } + } finally { + searchResponse2.decRef(); } searchResponse1 = client().prepareSearchScroll(searchResponse1.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - - searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse1.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); + try { + assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse1.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); + } + } finally { + searchResponse1.decRef(); } - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse2.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); + searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); + try { + assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse2.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); + } + } finally { + searchResponse2.decRef(); } ClearScrollResponse clearResponse = client().prepareClearScroll().addScrollId("_all").get(); @@ -447,6 +476,7 @@ public void testDeepScrollingDoesNotBlowUp() throws Exception { if (scrollId != null) { clearScroll(scrollId); } + response.decRef(); } } } @@ -456,12 +486,16 @@ public void testThatNonExistingScrollIdReturnsCorrectException() throws Exceptio refresh(); SearchResponse searchResponse = prepareSearch("index").setSize(1).setScroll("1m").get(); - assertThat(searchResponse.getScrollId(), is(notNullValue())); + try { + assertThat(searchResponse.getScrollId(), is(notNullValue())); - ClearScrollResponse clearScrollResponse = client().prepareClearScroll().addScrollId(searchResponse.getScrollId()).get(); - assertThat(clearScrollResponse.isSucceeded(), is(true)); + ClearScrollResponse clearScrollResponse = client().prepareClearScroll().addScrollId(searchResponse.getScrollId()).get(); + assertThat(clearScrollResponse.isSucceeded(), is(true)); - assertRequestBuilderThrows(internalCluster().client().prepareSearchScroll(searchResponse.getScrollId()), RestStatus.NOT_FOUND); + assertRequestBuilderThrows(internalCluster().client().prepareSearchScroll(searchResponse.getScrollId()), RestStatus.NOT_FOUND); + } finally { + searchResponse.decRef(); + } } public void testStringSortMissingAscTerminates() throws Exception { @@ -471,30 +505,29 @@ public void testStringSortMissingAscTerminates() throws Exception { prepareIndex("test").setId("1").setSource("some_field", "test").get(); refresh(); - SearchResponse response = prepareSearch("test") - - .addSort(new FieldSortBuilder("no_field").order(SortOrder.ASC).missing("_last")) - .setScroll("1m") - .get(); - assertHitCount(response, 1); - assertSearchHits(response, "1"); - - response = client().prepareSearchScroll(response.getScrollId()).get(); - assertNoFailures(response); - assertHitCount(response, 1); - assertNoSearchHits(response); - - response = prepareSearch("test") - - .addSort(new FieldSortBuilder("no_field").order(SortOrder.ASC).missing("_first")) - .setScroll("1m") - .get(); - assertHitCount(response, 1); - assertSearchHits(response, "1"); + assertResponse( + prepareSearch("test").addSort(new FieldSortBuilder("no_field").order(SortOrder.ASC).missing("_last")).setScroll("1m"), + response -> { + assertHitCount(response, 1); + assertSearchHits(response, "1"); + assertNoFailuresAndResponse(client().prepareSearchScroll(response.getScrollId()), response2 -> { + assertHitCount(response2, 1); + assertNoSearchHits(response2); + }); + } + ); - response = client().prepareSearchScroll(response.getScrollId()).get(); - assertHitCount(response, 1); - assertThat(response.getHits().getHits().length, equalTo(0)); + assertResponse( + prepareSearch("test").addSort(new FieldSortBuilder("no_field").order(SortOrder.ASC).missing("_first")).setScroll("1m"), + response -> { + assertHitCount(response, 1); + assertSearchHits(response, "1"); + assertResponse(client().prepareSearchScroll(response.getScrollId()), response2 -> { + assertHitCount(response2, 1); + assertThat(response2.getHits().getHits().length, equalTo(0)); + }); + } + ); } public void testCloseAndReopenOrDeleteWithActiveScroll() { @@ -503,17 +536,17 @@ public void testCloseAndReopenOrDeleteWithActiveScroll() { prepareIndex("test").setId(Integer.toString(i)).setSource("field", i).get(); } refresh(); - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) - .setSize(35) - .setScroll(TimeValue.timeValueMinutes(2)) - .addSort("field", SortOrder.ASC) - .get(); - long counter = 0; - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); - } + assertResponse( + prepareSearch().setQuery(matchAllQuery()).setSize(35).setScroll(TimeValue.timeValueMinutes(2)).addSort("field", SortOrder.ASC), + searchResponse -> { + long counter = 0; + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); + } + } + ); if (randomBoolean()) { assertAcked(indicesAdmin().prepareClose("test")); assertAcked(indicesAdmin().prepareOpen("test")); @@ -572,18 +605,18 @@ public void testInvalidScrollKeepAlive() throws IOException { assertNotNull(illegalArgumentException); assertThat(illegalArgumentException.getMessage(), containsString("Keep alive for request (2h) is too large")); - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(1).setScroll(TimeValue.timeValueMinutes(5)).get(); - assertNotNull(searchResponse.getScrollId()); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - - exc = expectThrows( - Exception.class, - () -> client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueHours(3)).get() - ); - illegalArgumentException = (IllegalArgumentException) ExceptionsHelper.unwrap(exc, IllegalArgumentException.class); - assertNotNull(illegalArgumentException); - assertThat(illegalArgumentException.getMessage(), containsString("Keep alive for request (3h) is too large")); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(1).setScroll(TimeValue.timeValueMinutes(5)), searchResponse -> { + assertNotNull(searchResponse.getScrollId()); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(searchResponse.getHits().getHits().length, equalTo(1)); + Exception ex = expectThrows( + Exception.class, + () -> client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueHours(3)).get() + ); + IllegalArgumentException iae = (IllegalArgumentException) ExceptionsHelper.unwrap(ex, IllegalArgumentException.class); + assertNotNull(iae); + assertThat(iae.getMessage(), containsString("Keep alive for request (3h) is too large")); + }); } /** @@ -614,13 +647,18 @@ public void testScrollRewrittenToMatchNoDocs() { assertNoFailures(resp); while (resp.getHits().getHits().length > 0) { totalHits += resp.getHits().getHits().length; - resp = client().prepareSearchScroll(resp.getScrollId()).setScroll(TimeValue.timeValueMinutes(1)).get(); + final String scrollId = resp.getScrollId(); + resp.decRef(); + resp = client().prepareSearchScroll(scrollId).setScroll(TimeValue.timeValueMinutes(1)).get(); assertNoFailures(resp); } assertThat(totalHits, equalTo(2)); } finally { - if (resp != null && resp.getScrollId() != null) { - client().prepareClearScroll().addScrollId(resp.getScrollId()).get(); + if (resp != null) { + if (resp.getScrollId() != null) { + client().prepareClearScroll().addScrollId(resp.getScrollId()).get(); + } + resp.decRef(); } } } @@ -635,26 +673,38 @@ public void testRestartDataNodesDuringScrollSearch() throws Exception { index("prod", "prod-" + i, Map.of()); } indicesAdmin().prepareRefresh().get(); + final String respFromDemoIndexScrollId; SearchResponse respFromDemoIndex = prepareSearch("demo").setSize(randomIntBetween(1, 10)) .setQuery(new MatchAllQueryBuilder()) .setScroll(TimeValue.timeValueMinutes(5)) .get(); + try { + respFromDemoIndexScrollId = respFromDemoIndex.getScrollId(); + } finally { + respFromDemoIndex.decRef(); + } internalCluster().restartNode(dataNode, new InternalTestCluster.RestartCallback()); ensureGreen("demo", "prod"); + final String respFromProdIndexScrollId; SearchResponse respFromProdIndex = prepareSearch("prod").setSize(randomIntBetween(1, 10)) .setQuery(new MatchAllQueryBuilder()) .setScroll(TimeValue.timeValueMinutes(5)) .get(); - assertNoFailures(respFromProdIndex); + try { + assertNoFailures(respFromProdIndex); + respFromProdIndexScrollId = respFromProdIndex.getScrollId(); + } finally { + respFromProdIndex.decRef(); + } SearchPhaseExecutionException error = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearchScroll(respFromDemoIndex.getScrollId()).get() + () -> client().prepareSearchScroll(respFromDemoIndexScrollId).get() ); for (ShardSearchFailure shardSearchFailure : error.shardFailures()) { assertThat(shardSearchFailure.getCause().getMessage(), containsString("No search context found for id [1]")); } - client().prepareSearchScroll(respFromProdIndex.getScrollId()).get(); + client().prepareSearchScroll(respFromProdIndexScrollId).get().decRef(); } private void assertToXContentResponse(ClearScrollResponse response, boolean succeed, int numFreed) throws IOException { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java index 23a38c0608490..42be70e5ff8b2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java @@ -62,30 +62,37 @@ public void testScanScrollWithShardExceptions() throws Exception { .setSize(10) .setScroll(TimeValue.timeValueMinutes(1)) .get(); - assertAllSuccessful(searchResponse); - long numHits = 0; - do { - numHits += searchResponse.getHits().getHits().length; - searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(1)).get(); + try { assertAllSuccessful(searchResponse); - } while (searchResponse.getHits().getHits().length > 0); - assertThat(numHits, equalTo(100L)); - clearScroll("_all"); + long numHits = 0; + do { + numHits += searchResponse.getHits().getHits().length; + searchResponse.decRef(); + searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(1)).get(); + assertAllSuccessful(searchResponse); + } while (searchResponse.getHits().getHits().length > 0); + assertThat(numHits, equalTo(100L)); + clearScroll("_all"); - internalCluster().stopRandomNonMasterNode(); + internalCluster().stopRandomNonMasterNode(); - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).setScroll(TimeValue.timeValueMinutes(1)).get(); - assertThat(searchResponse.getSuccessfulShards(), lessThan(searchResponse.getTotalShards())); - numHits = 0; - int numberOfSuccessfulShards = searchResponse.getSuccessfulShards(); - do { - numHits += searchResponse.getHits().getHits().length; - searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(1)).get(); - assertThat(searchResponse.getSuccessfulShards(), equalTo(numberOfSuccessfulShards)); - } while (searchResponse.getHits().getHits().length > 0); - assertThat(numHits, greaterThan(0L)); + searchResponse.decRef(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).setScroll(TimeValue.timeValueMinutes(1)).get(); + assertThat(searchResponse.getSuccessfulShards(), lessThan(searchResponse.getTotalShards())); + numHits = 0; + int numberOfSuccessfulShards = searchResponse.getSuccessfulShards(); + do { + numHits += searchResponse.getHits().getHits().length; + searchResponse.decRef(); + searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(1)).get(); + assertThat(searchResponse.getSuccessfulShards(), equalTo(numberOfSuccessfulShards)); + } while (searchResponse.getHits().getHits().length > 0); + assertThat(numHits, greaterThan(0L)); - clearScroll(searchResponse.getScrollId()); + clearScroll(searchResponse.getScrollId()); + } finally { + searchResponse.decRef(); + } } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java index 6219c1b72253a..d76031d402af0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java @@ -50,6 +50,8 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayWithSize; @@ -157,15 +159,18 @@ public void testWithNullStrings() throws InterruptedException { prepareIndex("test").setId("0").setSource("field1", 0), prepareIndex("test").setId("1").setSource("field1", 100, "field2", "toto") ); - SearchResponse searchResponse = prepareSearch("test").addSort("field1", SortOrder.ASC) - .addSort("field2", SortOrder.ASC) - .setQuery(matchAllQuery()) - .searchAfter(new Object[] { 0, null }) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, Matchers.equalTo(2L)); - assertThat(searchResponse.getHits().getHits().length, Matchers.equalTo(1)); - assertThat(searchResponse.getHits().getHits()[0].getSourceAsMap().get("field1"), Matchers.equalTo(100)); - assertThat(searchResponse.getHits().getHits()[0].getSourceAsMap().get("field2"), Matchers.equalTo("toto")); + assertResponse( + prepareSearch("test").addSort("field1", SortOrder.ASC) + .addSort("field2", SortOrder.ASC) + .setQuery(matchAllQuery()) + .searchAfter(new Object[] { 0, null }), + searchResponse -> { + assertThat(searchResponse.getHits().getTotalHits().value, Matchers.equalTo(2L)); + assertThat(searchResponse.getHits().getHits().length, Matchers.equalTo(1)); + assertThat(searchResponse.getHits().getHits()[0].getSourceAsMap().get("field1"), Matchers.equalTo(100)); + assertThat(searchResponse.getHits().getHits()[0].getSourceAsMap().get("field2"), Matchers.equalTo("toto")); + } + ); } public void testWithSimpleTypes() throws Exception { @@ -229,31 +234,36 @@ public void testWithCustomFormatSortValueOfDateField() throws Exception { .add(new IndexRequest("test").id("5").source("start_date", "2017-01-20", "end_date", "2025-05-28")) .get(); - SearchResponse resp = prepareSearch("test").addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) - .addSort(SortBuilders.fieldSort("end_date").setFormat("yyyy-MM-dd")) - .setSize(2) - .get(); - assertNoFailures(resp); - assertThat(resp.getHits().getHits()[0].getSortValues(), arrayContaining("22/01/2015", "2022-07-23")); - assertThat(resp.getHits().getHits()[1].getSortValues(), arrayContaining("21/02/2016", "2024-03-24")); - - resp = prepareSearch("test").addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) - .addSort(SortBuilders.fieldSort("end_date").setFormat("yyyy-MM-dd")) - .searchAfter(new String[] { "21/02/2016", "2024-03-24" }) - .setSize(2) - .get(); - assertNoFailures(resp); - assertThat(resp.getHits().getHits()[0].getSortValues(), arrayContaining("20/01/2017", "2025-05-28")); - assertThat(resp.getHits().getHits()[1].getSortValues(), arrayContaining("23/04/2018", "2021-02-22")); + assertNoFailuresAndResponse( + prepareSearch("test").addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) + .addSort(SortBuilders.fieldSort("end_date").setFormat("yyyy-MM-dd")) + .setSize(2), + resp -> { + assertThat(resp.getHits().getHits()[0].getSortValues(), arrayContaining("22/01/2015", "2022-07-23")); + assertThat(resp.getHits().getHits()[1].getSortValues(), arrayContaining("21/02/2016", "2024-03-24")); + } + ); - resp = prepareSearch("test").addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) - .addSort(SortBuilders.fieldSort("end_date")) // it's okay because end_date has the format "yyyy-MM-dd" - .searchAfter(new String[] { "21/02/2016", "2024-03-24" }) - .setSize(2) - .get(); - assertNoFailures(resp); - assertThat(resp.getHits().getHits()[0].getSortValues(), arrayContaining("20/01/2017", 1748390400000L)); - assertThat(resp.getHits().getHits()[1].getSortValues(), arrayContaining("23/04/2018", 1613952000000L)); + assertNoFailuresAndResponse( + prepareSearch("test").addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) + .addSort(SortBuilders.fieldSort("end_date").setFormat("yyyy-MM-dd")) + .searchAfter(new String[] { "21/02/2016", "2024-03-24" }) + .setSize(2), + resp -> { + assertThat(resp.getHits().getHits()[0].getSortValues(), arrayContaining("20/01/2017", "2025-05-28")); + assertThat(resp.getHits().getHits()[1].getSortValues(), arrayContaining("23/04/2018", "2021-02-22")); + } + ); + assertNoFailuresAndResponse( + prepareSearch("test").addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) + .addSort(SortBuilders.fieldSort("end_date")) // it's okay because end_date has the format "yyyy-MM-dd" + .searchAfter(new String[] { "21/02/2016", "2024-03-24" }) + .setSize(2), + resp -> { + assertThat(resp.getHits().getHits()[0].getSortValues(), arrayContaining("20/01/2017", 1748390400000L)); + assertThat(resp.getHits().getHits()[1].getSortValues(), arrayContaining("23/04/2018", 1613952000000L)); + } + ); SearchRequestBuilder searchRequest = prepareSearch("test").addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) .addSort(SortBuilders.fieldSort("end_date").setFormat("epoch_millis")) @@ -332,11 +342,15 @@ private void assertSearchFromWithSortValues(String indexName, List> req.searchAfter(sortValues); } SearchResponse searchResponse = req.get(); - for (SearchHit hit : searchResponse.getHits()) { - List toCompare = convertSortValues(documents.get(offset++)); - assertThat(LST_COMPARATOR.compare(toCompare, Arrays.asList(hit.getSortValues())), equalTo(0)); + try { + for (SearchHit hit : searchResponse.getHits()) { + List toCompare = convertSortValues(documents.get(offset++)); + assertThat(LST_COMPARATOR.compare(toCompare, Arrays.asList(hit.getSortValues())), equalTo(0)); + } + sortValues = searchResponse.getHits().getHits()[searchResponse.getHits().getHits().length - 1].getSortValues(); + } finally { + searchResponse.decRef(); } - sortValues = searchResponse.getHits().getHits()[searchResponse.getHits().getHits().length - 1].getSortValues(); } } @@ -445,11 +459,13 @@ public void testScrollAndSearchAfterWithBigIndex() { assertThat(((Number) timestamp).longValue(), equalTo(timestamps.get(foundHits))); foundHits++; } + resp.decRef(); resp = client().prepareSearchScroll(resp.getScrollId()).setScroll(TimeValue.timeValueMinutes(5)).get(); } while (resp.getHits().getHits().length > 0); assertThat(foundHits, equalTo(timestamps.size())); } finally { client().prepareClearScroll().addScrollId(resp.getScrollId()).get(); + resp.decRef(); } } // search_after with sort with point in time @@ -479,11 +495,13 @@ public void testScrollAndSearchAfterWithBigIndex() { assertNotNull(after); assertThat("Sorted by timestamp and pit tier breaker", after, arrayWithSize(2)); searchRequest.source().searchAfter(after); + resp.decRef(); resp = client().search(searchRequest).actionGet(); } while (resp.getHits().getHits().length > 0); assertThat(foundHits, equalTo(timestamps.size())); } finally { client().execute(TransportClosePointInTimeAction.TYPE, new ClosePointInTimeRequest(pitID)).actionGet(); + resp.decRef(); } } @@ -512,12 +530,14 @@ public void testScrollAndSearchAfterWithBigIndex() { assertNotNull(after); assertThat("sorted by pit tie breaker", after, arrayWithSize(1)); searchRequest.source().searchAfter(after); + resp.decRef(); resp = client().search(searchRequest).actionGet(); } while (resp.getHits().getHits().length > 0); Collections.sort(foundSeqNos); assertThat(foundSeqNos, equalTo(timestamps)); } finally { client().execute(TransportClosePointInTimeAction.TYPE, new ClosePointInTimeRequest(pitID)).actionGet(); + resp.decRef(); } } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java index 527d8bed8bc68..93340bedbdae3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java @@ -39,6 +39,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.startsWith; @@ -111,8 +112,8 @@ public void testWithPreferenceAndRoutings() throws Exception { int numShards = 10; int totalDocs = randomIntBetween(100, 1000); setupIndex(totalDocs, numShards); - { - SearchResponse sr = prepareSearch("test").setQuery(matchAllQuery()).setPreference("_shards:1,4").setSize(0).get(); + + assertResponse(prepareSearch("test").setQuery(matchAllQuery()).setPreference("_shards:1,4").setSize(0), sr -> { int numDocs = (int) sr.getHits().getTotalHits().value; int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); @@ -122,9 +123,9 @@ public void testWithPreferenceAndRoutings() throws Exception { .setPreference("_shards:1,4") .addSort(SortBuilders.fieldSort("_doc")); assertSearchSlicesWithScroll(request, "_id", max, numDocs); - } - { - SearchResponse sr = prepareSearch("test").setQuery(matchAllQuery()).setRouting("foo", "bar").setSize(0).get(); + }); + + assertResponse(prepareSearch("test").setQuery(matchAllQuery()).setRouting("foo", "bar").setSize(0), sr -> { int numDocs = (int) sr.getHits().getTotalHits().value; int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); @@ -134,15 +135,15 @@ public void testWithPreferenceAndRoutings() throws Exception { .setRouting("foo", "bar") .addSort(SortBuilders.fieldSort("_doc")); assertSearchSlicesWithScroll(request, "_id", max, numDocs); - } - { - assertAcked( - indicesAdmin().prepareAliases() - .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias1").routing("foo")) - .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias2").routing("bar")) - .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias3").routing("baz")) - ); - SearchResponse sr = prepareSearch("alias1", "alias3").setQuery(matchAllQuery()).setSize(0).get(); + }); + + assertAcked( + indicesAdmin().prepareAliases() + .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias1").routing("foo")) + .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias2").routing("bar")) + .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias3").routing("baz")) + ); + assertResponse(prepareSearch("alias1", "alias3").setQuery(matchAllQuery()).setSize(0), sr -> { int numDocs = (int) sr.getHits().getTotalHits().value; int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); @@ -151,7 +152,7 @@ public void testWithPreferenceAndRoutings() throws Exception { .setSize(fetchSize) .addSort(SortBuilders.fieldSort("_doc")); assertSearchSlicesWithScroll(request, "_id", max, numDocs); - } + }); } private void assertSearchSlicesWithScroll(SearchRequestBuilder request, String field, int numSlice, int numDocs) { @@ -160,27 +161,32 @@ private void assertSearchSlicesWithScroll(SearchRequestBuilder request, String f for (int id = 0; id < numSlice; id++) { SliceBuilder sliceBuilder = new SliceBuilder(field, id, numSlice); SearchResponse searchResponse = request.slice(sliceBuilder).get(); - totalResults += searchResponse.getHits().getHits().length; - int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value; - int numSliceResults = searchResponse.getHits().getHits().length; - String scrollId = searchResponse.getScrollId(); - for (SearchHit hit : searchResponse.getHits().getHits()) { - assertTrue(keys.add(hit.getId())); - } - while (searchResponse.getHits().getHits().length > 0) { - searchResponse = client().prepareSearchScroll("test") - .setScrollId(scrollId) - .setScroll(new Scroll(TimeValue.timeValueSeconds(10))) - .get(); - scrollId = searchResponse.getScrollId(); + try { totalResults += searchResponse.getHits().getHits().length; - numSliceResults += searchResponse.getHits().getHits().length; + int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value; + int numSliceResults = searchResponse.getHits().getHits().length; + String scrollId = searchResponse.getScrollId(); for (SearchHit hit : searchResponse.getHits().getHits()) { assertTrue(keys.add(hit.getId())); } + while (searchResponse.getHits().getHits().length > 0) { + searchResponse.decRef(); + searchResponse = client().prepareSearchScroll("test") + .setScrollId(scrollId) + .setScroll(new Scroll(TimeValue.timeValueSeconds(10))) + .get(); + scrollId = searchResponse.getScrollId(); + totalResults += searchResponse.getHits().getHits().length; + numSliceResults += searchResponse.getHits().getHits().length; + for (SearchHit hit : searchResponse.getHits().getHits()) { + assertTrue(keys.add(hit.getId())); + } + } + assertThat(numSliceResults, equalTo(expectedSliceResults)); + clearScroll(scrollId); + } finally { + searchResponse.decRef(); } - assertThat(numSliceResults, equalTo(expectedSliceResults)); - clearScroll(scrollId); } assertThat(totalResults, equalTo(numDocs)); assertThat(keys.size(), equalTo(numDocs)); @@ -222,24 +228,29 @@ private void assertSearchSlicesWithPointInTime(String sliceField, String sortFie .setSize(randomIntBetween(10, 100)); SearchResponse searchResponse = request.get(); - int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value; + try { + int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value; - while (true) { - int numHits = searchResponse.getHits().getHits().length; - if (numHits == 0) { - break; - } + while (true) { + int numHits = searchResponse.getHits().getHits().length; + if (numHits == 0) { + break; + } - totalResults += numHits; - numSliceResults += numHits; - for (SearchHit hit : searchResponse.getHits().getHits()) { - assertTrue(keys.add(hit.getId())); - } + totalResults += numHits; + numSliceResults += numHits; + for (SearchHit hit : searchResponse.getHits().getHits()) { + assertTrue(keys.add(hit.getId())); + } - Object[] sortValues = searchResponse.getHits().getHits()[numHits - 1].getSortValues(); - searchResponse = request.searchAfter(sortValues).get(); + Object[] sortValues = searchResponse.getHits().getHits()[numHits - 1].getSortValues(); + searchResponse.decRef(); + searchResponse = request.searchAfter(sortValues).get(); + } + assertThat(numSliceResults, equalTo(expectedSliceResults)); + } finally { + searchResponse.decRef(); } - assertThat(numSliceResults, equalTo(expectedSliceResults)); } assertThat(totalResults, equalTo(numDocs)); assertThat(keys.size(), equalTo(numDocs)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java index 2967bdc454aed..4a10bf6cf8fab 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java @@ -86,8 +86,8 @@ public void testWithRouting() { assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).field("_routing"), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); - - response = prepareSearch("test").storedFields("_none_").get(); + }); + assertResponse(prepareSearch("test").storedFields("_none_"), response -> { assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); }); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java index 0d850a3708044..160cba19700ac 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java @@ -119,7 +119,8 @@ public void testFieldUsageStats() throws ExecutionException, InterruptedExceptio .addAggregation(AggregationBuilders.terms("agg1").field("field.keyword")) .setSize(0) .setPreference("fixed") - .get(); + .get() + .decRef(); stats = aggregated(client().execute(FieldUsageStatsAction.INSTANCE, new FieldUsageStatsRequest()).get().getStats().get("test")); logger.info("Stats after second query: {}", stats); @@ -148,7 +149,8 @@ public void testFieldUsageStats() throws ExecutionException, InterruptedExceptio .setQuery(QueryBuilders.rangeQuery("date_field").from("2016/01/01")) .setSize(100) .setPreference("fixed") - .get(); + .get() + .decRef(); stats = aggregated(client().execute(FieldUsageStatsAction.INSTANCE, new FieldUsageStatsRequest()).get().getStats().get("test")); logger.info("Stats after third query: {}", stats); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index b5f7468d1645c..9ca565cef7843 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -58,6 +58,7 @@ import static org.elasticsearch.common.util.CollectionUtils.iterableAsArrayList; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasScore; @@ -945,10 +946,11 @@ public void testThatStatsAreWorking() throws Exception { ensureGreen(); // load the fst index into ram prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", SuggestBuilders.completionSuggestion(FIELD).prefix("f"))) - .get(); + .get() + .decRef(); prepareSearch(INDEX).suggest( new SuggestBuilder().addSuggestion("foo", SuggestBuilders.completionSuggestion(otherField).prefix("f")) - ).get(); + ).get().decRef(); // Get all stats IndicesStatsResponse indicesStatsResponse = indicesAdmin().prepareStats(INDEX).setIndices(INDEX).setCompletion(true).get(); @@ -1278,7 +1280,7 @@ public void testPrunedSegments() throws IOException { refresh(); assertSuggestions("b"); - assertThat(2L, equalTo(prepareSearch(INDEX).setSize(0).get().getHits().getTotalHits().value)); + assertHitCount(prepareSearch(INDEX).setSize(0), 2); for (IndexShardSegments seg : indicesAdmin().prepareSegments().get().getIndices().get(INDEX)) { ShardSegments[] shards = seg.shards(); for (ShardSegments shardSegments : shards) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java index 42c19a903b452..b04aa321f70f1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java @@ -55,6 +55,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileExists; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -953,7 +954,7 @@ public void testQueuedSnapshotsWaitingForShardReady() throws Exception { indexDoc(testIndex, Integer.toString(i), "foo", "bar" + i); } refresh(); - assertThat(prepareSearch(testIndex).setSize(0).get().getHits().getTotalHits().value, equalTo(100L)); + assertHitCount(prepareSearch(testIndex).setSize(0), 100); logger.info("--> start relocations"); allowNodes(testIndex, 1); diff --git a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index 570d583335a12..0b5b953df84fc 100644 --- a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -502,9 +502,14 @@ protected void indexRandomDocs(String index, int numdocs) throws InterruptedExce } protected long getCountForIndex(String indexName) { - return client().search( + var resp = client().search( new SearchRequest(new SearchRequest(indexName).source(new SearchSourceBuilder().size(0).trackTotalHits(true))) - ).actionGet().getHits().getTotalHits().value; + ).actionGet(); + try { + return resp.getHits().getTotalHits().value; + } finally { + resp.decRef(); + } } protected void assertDocCount(String index, long count) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 2f6286092b535..e0083d5570baa 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -1064,18 +1064,17 @@ public void waitForDocs(final long numDocs, final BackgroundIndexer indexer) thr if (lastKnownCount >= numDocs) { try { - long count = prepareSearch().setTrackTotalHits(true) - .setSize(0) - .setQuery(matchAllQuery()) - .get() - .getHits() - .getTotalHits().value; - - if (count == lastKnownCount) { - // no progress - try to refresh for the next time - indicesAdmin().prepareRefresh().get(); + var resp = prepareSearch().setTrackTotalHits(true).setSize(0).setQuery(matchAllQuery()).get(); + try { + long count = resp.getHits().getTotalHits().value; + if (count == lastKnownCount) { + // no progress - try to refresh for the next time + indicesAdmin().prepareRefresh().get(); + } + lastKnownCount = count; + } finally { + resp.decRef(); } - lastKnownCount = count; } catch (Exception e) { // count now acts like search and barfs if all shards failed... logger.debug("failed to executed count", e); throw e; From f99b4459d7dd3b46cb6e75116a054669f18d4404 Mon Sep 17 00:00:00 2001 From: Riahiamirreza <54557628+Riahiamirreza@users.noreply.github.com> Date: Tue, 5 Dec 2023 00:14:12 +0330 Subject: [PATCH 132/181] Remove redundant character in mlt-query.asciidoc (#102945) --- docs/reference/query-dsl/mlt-query.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/query-dsl/mlt-query.asciidoc b/docs/reference/query-dsl/mlt-query.asciidoc index 8550a34efaa4d..c470beaa9e41d 100644 --- a/docs/reference/query-dsl/mlt-query.asciidoc +++ b/docs/reference/query-dsl/mlt-query.asciidoc @@ -107,7 +107,7 @@ analyzes it, usually using the same analyzer at the field, then selects the top K terms with highest tf-idf to form a disjunctive query of these terms. IMPORTANT: The fields on which to perform MLT must be indexed and of type -`text` or `keyword``. Additionally, when using `like` with documents, either +`text` or `keyword`. Additionally, when using `like` with documents, either `_source` must be enabled or the fields must be `stored` or store `term_vector`. In order to speed up analysis, it could help to store term vectors at index time. From eee5f98550ae4c815e1b3549b11df8bab2a861fa Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Mon, 4 Dec 2023 21:48:51 +0100 Subject: [PATCH 133/181] [Connector API] Implement configuration internal representation + update endpoint (#102927) --- .../api/connector.update_configuration.json | 39 ++ .../335_connector_update_configuration.yml | 183 ++++++++ .../xpack/application/EnterpriseSearch.java | 5 + .../application/connector/Connector.java | 32 +- .../connector/ConnectorConfiguration.java | 442 ++++++++++++++++++ .../connector/ConnectorIndexService.java | 39 ++ ...estUpdateConnectorConfigurationAction.java | 45 ++ ...ortUpdateConnectorConfigurationAction.java | 55 +++ .../UpdateConnectorConfigurationAction.java | 202 ++++++++ .../ConfigurationDependency.java | 129 +++++ .../ConfigurationDisplayType.java | 32 ++ .../configuration/ConfigurationFieldType.java | 35 ++ .../ConfigurationSelectOption.java | 106 +++++ .../ConfigurationValidation.java | 131 ++++++ .../ConfigurationValidationType.java | 33 ++ .../connector/syncjob/ConnectorSyncJob.java | 3 +- .../syncjob/ConnectorSyncJobIndexService.java | 5 +- .../ConnectorConfigurationTests.java | 97 ++++ .../connector/ConnectorIndexServiceTests.java | 51 ++ .../connector/ConnectorTestUtils.java | 65 ++- .../application/connector/ConnectorTests.java | 276 ++++++----- ...ationActionRequestBWCSerializingTests.java | 52 +++ ...tionActionResponseBWCSerializingTests.java | 43 ++ .../xpack/security/operator/Constants.java | 1 + 24 files changed, 1957 insertions(+), 144 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_configuration.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorConfiguration.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorConfigurationAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorConfigurationAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationDependency.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationDisplayType.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationFieldType.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationSelectOption.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidation.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidationType.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationActionResponseBWCSerializingTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_configuration.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_configuration.json new file mode 100644 index 0000000000000..347418940b4c9 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_configuration.json @@ -0,0 +1,39 @@ +{ + "connector.update_configuration": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Updates the connector configuration." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/{connector_id}/_configuration", + "methods": [ + "PUT" + ], + "parts": { + "connector_id": { + "type": "string", + "description": "The unique identifier of the connector to be updated." + } + } + } + ] + }, + "body": { + "description": "Mapping between field names to configuration.", + "required": true + } + } +} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml new file mode 100644 index 0000000000000..260e1784d29e2 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml @@ -0,0 +1,183 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + + - do: + connector.put: + connector_id: test-connector + body: + index_name: search-1-test + name: my-connector + language: pl + is_native: false + service_type: super-connector + +--- +"Update Connector Configuration": + - do: + connector.update_configuration: + connector_id: test-connector + body: + configuration: + some_field: + default_value: null + depends_on: + - field: some_field + value: 31 + display: numeric + label: Very important field + options: [ ] + order: 4 + required: true + sensitive: false + tooltip: Wow, this tooltip is useful. + type: str + ui_restrictions: [ ] + validations: + - constraint: 0 + type: greater_than + value: 123 + + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { configuration.some_field.value: 123 } + - match: { configuration.some_field.sensitive: false } + - match: { configuration.some_field.display: numeric } + - match: { status: configured } + + + - do: + connector.update_configuration: + connector_id: test-connector + body: + configuration: + some_field: + default_value: null + depends_on: + - field: some_field + value: 31 + display: numeric + label: Very important field + options: [ ] + order: 4 + required: true + sensitive: false + tooltip: Wow, this tooltip is useful. + type: str + ui_restrictions: [ ] + validations: + - constraint: 0 + type: greater_than + value: 456 + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { configuration.some_field.value: 456 } + - match: { status: configured } + +--- +"Update Connector Configuration - Connector doesn't exist": + - do: + catch: "missing" + connector.update_configuration: + connector_id: test-non-existent-connector + body: + configuration: + some_field: + default_value: null + depends_on: + - field: some_field + value: 31 + display: numeric + label: Very important field + options: [ ] + order: 4 + required: true + sensitive: false + tooltip: Wow, this tooltip is useful. + type: str + ui_restrictions: [ ] + validations: + - constraint: 0 + type: greater_than + value: 123 + +--- +"Update Connector Configuration - Required fields are missing": + - do: + catch: "bad_request" + connector.update_configuration: + connector_id: test-connector + body: + configuration: + some_field: + default_value: null + depends_on: + - field: some_field + value: 31 + display: numeric + label: Very important field + +--- +"Update Connector Configuration - Unknown field type": + - do: + catch: "bad_request" + connector.update_configuration: + connector_id: test-connector + body: + configuration: + some_field: + default_value: null + depends_on: + - field: some_field + value: 31 + display: some_display_type + label: Very important field + options: [ ] + order: 4 + required: true + sensitive: false + tooltip: Wow, this tooltip is useful. + type: str + ui_restrictions: [ ] + validations: + - constraint: 0 + type: greater_than + value: 123 + +--- +"Update Connector Configuration - Unknown constraint": + - do: + catch: "bad_request" + connector.update_configuration: + connector_id: test-connector + body: + configuration: + some_field: + default_value: null + depends_on: + - field: some_field + value: 31 + display: numeric + label: Very important field + options: [ ] + order: 4 + required: true + sensitive: false + tooltip: Wow, this tooltip is useful. + type: str + ui_restrictions: [ ] + validations: + - constraint: 0 + type: unknown_constraint + value: 123 diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index f93177666f3d8..1a8ae73c41935 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -50,6 +50,7 @@ import org.elasticsearch.xpack.application.connector.action.RestGetConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestListConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestPutConnectorAction; +import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorLastSeenAction; @@ -60,12 +61,14 @@ import org.elasticsearch.xpack.application.connector.action.TransportGetConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportListConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportPutConnectorAction; +import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorLastSyncStatsAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorSchedulingAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; @@ -207,6 +210,7 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(GetConnectorAction.INSTANCE, TransportGetConnectorAction.class), new ActionHandler<>(ListConnectorAction.INSTANCE, TransportListConnectorAction.class), new ActionHandler<>(PutConnectorAction.INSTANCE, TransportPutConnectorAction.class), + new ActionHandler<>(UpdateConnectorConfigurationAction.INSTANCE, TransportUpdateConnectorConfigurationAction.class), new ActionHandler<>(UpdateConnectorErrorAction.INSTANCE, TransportUpdateConnectorErrorAction.class), new ActionHandler<>(UpdateConnectorFilteringAction.INSTANCE, TransportUpdateConnectorFilteringAction.class), new ActionHandler<>(UpdateConnectorLastSeenAction.INSTANCE, TransportUpdateConnectorLastSeenAction.class), @@ -275,6 +279,7 @@ public List getRestHandlers( new RestGetConnectorAction(), new RestListConnectorAction(), new RestPutConnectorAction(), + new RestUpdateConnectorConfigurationAction(), new RestUpdateConnectorErrorAction(), new RestUpdateConnectorFilteringAction(), new RestUpdateConnectorLastSeenAction(), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java index d68cc9f7227bc..73d066f64d197 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java @@ -70,7 +70,7 @@ public class Connector implements NamedWriteable, ToXContentObject { @Nullable private final String apiKeyId; @Nullable - private final Map configuration; // TODO: add explicit types + private final Map configuration; @Nullable private final Map customScheduling; @Nullable @@ -131,7 +131,7 @@ public class Connector implements NamedWriteable, ToXContentObject { private Connector( String connectorId, String apiKeyId, - Map configuration, + Map configuration, Map customScheduling, String description, String error, @@ -175,7 +175,7 @@ private Connector( public Connector(StreamInput in) throws IOException { this.connectorId = in.readString(); this.apiKeyId = in.readOptionalString(); - this.configuration = in.readMap(StreamInput::readGenericValue); + this.configuration = in.readMap(ConnectorConfiguration::new); this.customScheduling = in.readMap(ConnectorCustomSchedule::new); this.description = in.readOptionalString(); this.error = in.readOptionalString(); @@ -220,7 +220,7 @@ public Connector(StreamInput in) throws IOException { int i = 0; return new Builder().setConnectorId((String) args[i++]) .setApiKeyId((String) args[i++]) - .setConfiguration((Map) args[i++]) + .setConfiguration((Map) args[i++]) .setCustomScheduling((Map) args[i++]) .setDescription((String) args[i++]) .setError((String) args[i++]) @@ -258,7 +258,7 @@ public Connector(StreamInput in) throws IOException { PARSER.declareString(optionalConstructorArg(), API_KEY_ID_FIELD); PARSER.declareField( optionalConstructorArg(), - (parser, context) -> parser.map(), + (p, c) -> p.map(HashMap::new, ConnectorConfiguration::fromXContent), CONFIGURATION_FIELD, ObjectParser.ValueType.OBJECT ); @@ -378,10 +378,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(API_KEY_ID_FIELD.getPreferredName(), apiKeyId); } if (configuration != null) { - builder.field(CONFIGURATION_FIELD.getPreferredName(), configuration); + builder.xContentValuesMap(CONFIGURATION_FIELD.getPreferredName(), configuration); } if (customScheduling != null) { - builder.field(CUSTOM_SCHEDULING_FIELD.getPreferredName(), customScheduling); + builder.xContentValuesMap(CUSTOM_SCHEDULING_FIELD.getPreferredName(), customScheduling); } if (description != null) { builder.field(DESCRIPTION_FIELD.getPreferredName(), description); @@ -433,7 +433,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public void writeTo(StreamOutput out) throws IOException { out.writeString(connectorId); out.writeOptionalString(apiKeyId); - out.writeMap(configuration, StreamOutput::writeGenericValue); + out.writeMap(configuration, StreamOutput::writeWriteable); out.writeMap(customScheduling, StreamOutput::writeWriteable); out.writeOptionalString(description); out.writeOptionalString(error); @@ -461,10 +461,6 @@ public String getApiKeyId() { return apiKeyId; } - public Map getConfiguration() { - return configuration; - } - public Map getCustomScheduling() { return customScheduling; } @@ -513,8 +509,8 @@ public String getServiceType() { return serviceType; } - public ConnectorStatus getStatus() { - return status; + public Map getConfiguration() { + return configuration; } public Object getSyncCursor() { @@ -533,6 +529,10 @@ public Instant getLastSeen() { return lastSeen; } + public ConnectorStatus getStatus() { + return status; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -595,7 +595,7 @@ public static class Builder { private String connectorId; private String apiKeyId; - private Map configuration = Collections.emptyMap(); + private Map configuration = Collections.emptyMap(); private Map customScheduling = Collections.emptyMap(); private String description; private String error; @@ -625,7 +625,7 @@ public Builder setApiKeyId(String apiKeyId) { return this; } - public Builder setConfiguration(Map configuration) { + public Builder setConfiguration(Map configuration) { this.configuration = configuration; return this; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorConfiguration.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorConfiguration.java new file mode 100644 index 0000000000000..103c647f180b4 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorConfiguration.java @@ -0,0 +1,442 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParseException; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.configuration.ConfigurationDependency; +import org.elasticsearch.xpack.application.connector.configuration.ConfigurationDisplayType; +import org.elasticsearch.xpack.application.connector.configuration.ConfigurationFieldType; +import org.elasticsearch.xpack.application.connector.configuration.ConfigurationSelectOption; +import org.elasticsearch.xpack.application.connector.configuration.ConfigurationValidation; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +/** + * Represents the configuration field settings for a connector. + */ +public class ConnectorConfiguration implements Writeable, ToXContentObject { + + @Nullable + private final String category; + private final Object defaultValue; + private final List dependsOn; + private final ConfigurationDisplayType display; + private final String label; + private final List options; + @Nullable + private final Integer order; + @Nullable + private final String placeholder; + private final boolean required; + private final boolean sensitive; + private final String tooltip; + private final ConfigurationFieldType type; + private final List uiRestrictions; + private final List validations; + private final Object value; + + /** + * Constructs a new {@link ConnectorConfiguration} instance with specified properties. + * + * @param category The category of the configuration field. + * @param defaultValue The default value for the configuration. + * @param dependsOn A list of {@link ConfigurationDependency} indicating dependencies on other configurations. + * @param display The display type, defined by {@link ConfigurationDisplayType}. + * @param label The display label associated with the config field. + * @param options A list of {@link ConfigurationSelectOption} for selectable options. + * @param order The order in which this configuration appears. + * @param placeholder A placeholder text for the configuration field. + * @param required A boolean indicating whether the configuration is required. + * @param sensitive A boolean indicating whether the configuration contains sensitive information. + * @param tooltip A tooltip text providing additional information about the configuration. + * @param type The type of the configuration field, defined by {@link ConfigurationFieldType}. + * @param uiRestrictions A list of UI restrictions in string format. + * @param validations A list of {@link ConfigurationValidation} for validating the configuration. + * @param value The current value of the configuration. + */ + private ConnectorConfiguration( + String category, + Object defaultValue, + List dependsOn, + ConfigurationDisplayType display, + String label, + List options, + Integer order, + String placeholder, + boolean required, + boolean sensitive, + String tooltip, + ConfigurationFieldType type, + List uiRestrictions, + List validations, + Object value + ) { + this.category = category; + this.defaultValue = defaultValue; + this.dependsOn = dependsOn; + this.display = display; + this.label = label; + this.options = options; + this.order = order; + this.placeholder = placeholder; + this.required = required; + this.sensitive = sensitive; + this.tooltip = tooltip; + this.type = type; + this.uiRestrictions = uiRestrictions; + this.validations = validations; + this.value = value; + } + + public ConnectorConfiguration(StreamInput in) throws IOException { + this.category = in.readString(); + this.defaultValue = in.readGenericValue(); + this.dependsOn = in.readOptionalCollectionAsList(ConfigurationDependency::new); + this.display = in.readEnum(ConfigurationDisplayType.class); + this.label = in.readString(); + this.options = in.readOptionalCollectionAsList(ConfigurationSelectOption::new); + this.order = in.readOptionalInt(); + this.placeholder = in.readOptionalString(); + this.required = in.readBoolean(); + this.sensitive = in.readBoolean(); + this.tooltip = in.readOptionalString(); + this.type = in.readEnum(ConfigurationFieldType.class); + this.uiRestrictions = in.readOptionalStringCollectionAsList(); + this.validations = in.readOptionalCollectionAsList(ConfigurationValidation::new); + this.value = in.readGenericValue(); + } + + static final ParseField CATEGORY_FIELD = new ParseField("category"); + static final ParseField DEFAULT_VALUE_FIELD = new ParseField("default_value"); + static final ParseField DEPENDS_ON_FIELD = new ParseField("depends_on"); + static final ParseField DISPLAY_FIELD = new ParseField("display"); + static final ParseField LABEL_FIELD = new ParseField("label"); + static final ParseField OPTIONS_FIELD = new ParseField("options"); + static final ParseField ORDER_FIELD = new ParseField("order"); + static final ParseField PLACEHOLDER_FIELD = new ParseField("placeholder"); + static final ParseField REQUIRED_FIELD = new ParseField("required"); + static final ParseField SENSITIVE_FIELD = new ParseField("sensitive"); + static final ParseField TOOLTIP_FIELD = new ParseField("tooltip"); + static final ParseField TYPE_FIELD = new ParseField("type"); + static final ParseField UI_RESTRICTIONS_FIELD = new ParseField("ui_restrictions"); + static final ParseField VALIDATIONS_FIELD = new ParseField("validations"); + static final ParseField VALUE_FIELD = new ParseField("value"); + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_configuration_dependency", + true, + args -> { + int i = 0; + return new ConnectorConfiguration.Builder().setCategory((String) args[i++]) + .setDefaultValue(args[i++]) + .setDependsOn((List) args[i++]) + .setDisplay((ConfigurationDisplayType) args[i++]) + .setLabel((String) args[i++]) + .setOptions((List) args[i++]) + .setOrder((Integer) args[i++]) + .setPlaceholder((String) args[i++]) + .setRequired((boolean) args[i++]) + .setSensitive((boolean) args[i++]) + .setTooltip((String) args[i++]) + .setType((ConfigurationFieldType) args[i++]) + .setUiRestrictions((List) args[i++]) + .setValidations((List) args[i++]) + .setValue(args[i]) + .build(); + } + ); + + static { + PARSER.declareString(optionalConstructorArg(), CATEGORY_FIELD); + PARSER.declareField(optionalConstructorArg(), (p, c) -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return p.text(); + } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { + return p.numberValue(); + } else if (p.currentToken() == XContentParser.Token.VALUE_BOOLEAN) { + return p.booleanValue(); + } else if (p.currentToken() == XContentParser.Token.VALUE_NULL) { + return null; + } + throw new XContentParseException("Unsupported token [" + p.currentToken() + "]"); + }, DEFAULT_VALUE_FIELD, ObjectParser.ValueType.VALUE); + PARSER.declareObjectArray(constructorArg(), (p, c) -> ConfigurationDependency.fromXContent(p), DEPENDS_ON_FIELD); + PARSER.declareField( + constructorArg(), + (p, c) -> ConfigurationDisplayType.displayType(p.text()), + DISPLAY_FIELD, + ObjectParser.ValueType.STRING + ); + PARSER.declareString(constructorArg(), LABEL_FIELD); + PARSER.declareObjectArray(constructorArg(), (p, c) -> ConfigurationSelectOption.fromXContent(p), OPTIONS_FIELD); + PARSER.declareInt(optionalConstructorArg(), ORDER_FIELD); + PARSER.declareString(optionalConstructorArg(), PLACEHOLDER_FIELD); + PARSER.declareBoolean(constructorArg(), REQUIRED_FIELD); + PARSER.declareBoolean(constructorArg(), SENSITIVE_FIELD); + PARSER.declareStringOrNull(constructorArg(), TOOLTIP_FIELD); + PARSER.declareField( + constructorArg(), + (p, c) -> ConfigurationFieldType.fieldType(p.text()), + TYPE_FIELD, + ObjectParser.ValueType.STRING + ); + PARSER.declareStringArray(constructorArg(), UI_RESTRICTIONS_FIELD); + PARSER.declareObjectArray(constructorArg(), (p, c) -> ConfigurationValidation.fromXContent(p), VALIDATIONS_FIELD); + PARSER.declareField(constructorArg(), (p, c) -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return p.text(); + } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { + return p.numberValue(); + } else if (p.currentToken() == XContentParser.Token.VALUE_BOOLEAN) { + return p.booleanValue(); + } else if (p.currentToken() == XContentParser.Token.VALUE_NULL) { + return null; + } + throw new XContentParseException("Unsupported token [" + p.currentToken() + "]"); + }, VALUE_FIELD, ObjectParser.ValueType.VALUE); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + if (category != null) { + builder.field(CATEGORY_FIELD.getPreferredName(), category); + } + builder.field(DEFAULT_VALUE_FIELD.getPreferredName(), defaultValue); + builder.xContentList(DEPENDS_ON_FIELD.getPreferredName(), dependsOn); + builder.field(DISPLAY_FIELD.getPreferredName(), display.toString()); + builder.field(LABEL_FIELD.getPreferredName(), label); + builder.xContentList(OPTIONS_FIELD.getPreferredName(), options); + if (order != null) { + builder.field(ORDER_FIELD.getPreferredName(), order); + } + if (placeholder != null) { + builder.field(PLACEHOLDER_FIELD.getPreferredName(), placeholder); + } + builder.field(REQUIRED_FIELD.getPreferredName(), required); + builder.field(SENSITIVE_FIELD.getPreferredName(), sensitive); + builder.field(TOOLTIP_FIELD.getPreferredName(), tooltip); + builder.field(TYPE_FIELD.getPreferredName(), type.toString()); + builder.stringListField(UI_RESTRICTIONS_FIELD.getPreferredName(), uiRestrictions); + builder.xContentList(VALIDATIONS_FIELD.getPreferredName(), validations); + builder.field(VALUE_FIELD.getPreferredName(), value); + } + builder.endObject(); + return builder; + } + + public static ConnectorConfiguration fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + public static ConnectorConfiguration fromXContentBytes(BytesReference source, XContentType xContentType) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return ConnectorConfiguration.fromXContent(parser); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse a connector configuration field.", e); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(category); + out.writeGenericValue(defaultValue); + out.writeOptionalCollection(dependsOn); + out.writeEnum(display); + out.writeString(label); + out.writeOptionalCollection(options); + out.writeOptionalInt(order); + out.writeOptionalString(placeholder); + out.writeBoolean(required); + out.writeBoolean(sensitive); + out.writeOptionalString(tooltip); + out.writeEnum(type); + out.writeOptionalStringCollection(uiRestrictions); + out.writeOptionalCollection(validations); + out.writeGenericValue(value); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ConnectorConfiguration that = (ConnectorConfiguration) o; + return required == that.required + && sensitive == that.sensitive + && Objects.equals(category, that.category) + && Objects.equals(defaultValue, that.defaultValue) + && Objects.equals(dependsOn, that.dependsOn) + && display == that.display + && Objects.equals(label, that.label) + && Objects.equals(options, that.options) + && Objects.equals(order, that.order) + && Objects.equals(placeholder, that.placeholder) + && Objects.equals(tooltip, that.tooltip) + && type == that.type + && Objects.equals(uiRestrictions, that.uiRestrictions) + && Objects.equals(validations, that.validations) + && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash( + category, + defaultValue, + dependsOn, + display, + label, + options, + order, + placeholder, + required, + sensitive, + tooltip, + type, + uiRestrictions, + validations, + value + ); + } + + public static class Builder { + + private String category; + private Object defaultValue; + private List dependsOn; + private ConfigurationDisplayType display; + private String label; + private List options; + private Integer order; + private String placeholder; + private boolean required; + private boolean sensitive; + private String tooltip; + private ConfigurationFieldType type; + private List uiRestrictions; + private List validations; + private Object value; + + public Builder setCategory(String category) { + this.category = category; + return this; + } + + public Builder setDefaultValue(Object defaultValue) { + this.defaultValue = defaultValue; + return this; + } + + public Builder setDependsOn(List dependsOn) { + this.dependsOn = dependsOn; + return this; + } + + public Builder setDisplay(ConfigurationDisplayType display) { + this.display = display; + return this; + } + + public Builder setLabel(String label) { + this.label = label; + return this; + } + + public Builder setOptions(List options) { + this.options = options; + return this; + } + + public Builder setOrder(Integer order) { + this.order = order; + return this; + } + + public Builder setPlaceholder(String placeholder) { + this.placeholder = placeholder; + return this; + } + + public Builder setRequired(boolean required) { + this.required = required; + return this; + } + + public Builder setSensitive(boolean sensitive) { + this.sensitive = sensitive; + return this; + } + + public Builder setTooltip(String tooltip) { + this.tooltip = tooltip; + return this; + } + + public Builder setType(ConfigurationFieldType type) { + this.type = type; + return this; + } + + public Builder setUiRestrictions(List uiRestrictions) { + this.uiRestrictions = uiRestrictions; + return this; + } + + public Builder setValidations(List validations) { + this.validations = validations; + return this; + } + + public Builder setValue(Object value) { + this.value = value; + return this; + } + + public ConnectorConfiguration build() { + return new ConnectorConfiguration( + category, + defaultValue, + dependsOn, + display, + label, + options, + order, + placeholder, + required, + sensitive, + tooltip, + type, + uiRestrictions, + validations, + value + ); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index 744a4d2028990..624697edfcd85 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -31,6 +31,7 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; @@ -41,6 +42,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.function.BiConsumer; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; @@ -174,6 +176,43 @@ public void onFailure(Exception e) { } } + /** + * Updates the {@link ConnectorConfiguration} property of a {@link Connector}. + * + * @param request Request for updating connector configuration property. + * @param listener Listener to respond to a successful response or an error. + */ + public void updateConnectorConfiguration(UpdateConnectorConfigurationAction.Request request, ActionListener listener) { + try { + String connectorId = request.getConnectorId(); + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( + new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) + .id(connectorId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source( + Map.of( + Connector.CONFIGURATION_FIELD.getPreferredName(), + request.getConfiguration(), + Connector.STATUS_FIELD.getPreferredName(), + ConnectorStatus.CONFIGURED.toString() + ) + ) + ); + clientWithOrigin.update( + updateRequest, + new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { + if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { + l.onFailure(new ResourceNotFoundException(connectorId)); + return; + } + l.onResponse(updateResponse); + }) + ); + } catch (Exception e) { + listener.onFailure(e); + } + } + /** * Updates the {@link ConnectorFiltering} property of a {@link Connector}. * diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorConfigurationAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorConfigurationAction.java new file mode 100644 index 0000000000000..aa46353d47999 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorConfigurationAction.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.PUT; + +public class RestUpdateConnectorConfigurationAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_update_configuration_action"; + } + + @Override + public List routes() { + return List.of(new Route(PUT, "/" + EnterpriseSearch.CONNECTOR_API_ENDPOINT + "/{connector_id}/_configuration")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + UpdateConnectorConfigurationAction.Request request = UpdateConnectorConfigurationAction.Request.fromXContentBytes( + restRequest.param("connector_id"), + restRequest.content(), + restRequest.getXContentType() + ); + return channel -> client.execute( + UpdateConnectorConfigurationAction.INSTANCE, + request, + new RestToXContentListener<>(channel, UpdateConnectorConfigurationAction.Response::status, r -> null) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorConfigurationAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorConfigurationAction.java new file mode 100644 index 0000000000000..211c3b5a3a670 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorConfigurationAction.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.ConnectorIndexService; + +public class TransportUpdateConnectorConfigurationAction extends HandledTransportAction< + UpdateConnectorConfigurationAction.Request, + UpdateConnectorConfigurationAction.Response> { + + protected final ConnectorIndexService connectorIndexService; + + @Inject + public TransportUpdateConnectorConfigurationAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + UpdateConnectorConfigurationAction.NAME, + transportService, + actionFilters, + UpdateConnectorConfigurationAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorIndexService = new ConnectorIndexService(client); + } + + @Override + protected void doExecute( + Task task, + UpdateConnectorConfigurationAction.Request request, + ActionListener listener + ) { + connectorIndexService.updateConnectorConfiguration( + request, + listener.map(r -> new UpdateConnectorConfigurationAction.Response(r.getResult())) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationAction.java new file mode 100644 index 0000000000000..6b5f52f3afda7 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationAction.java @@ -0,0 +1,202 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.ConnectorConfiguration; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class UpdateConnectorConfigurationAction extends ActionType { + + public static final UpdateConnectorConfigurationAction INSTANCE = new UpdateConnectorConfigurationAction(); + public static final String NAME = "cluster:admin/xpack/connector/update_configuration"; + + public UpdateConnectorConfigurationAction() { + super(NAME, UpdateConnectorConfigurationAction.Response::new); + } + + public static class Request extends ActionRequest implements ToXContentObject { + + private final String connectorId; + private final Map configuration; + + public Request(String connectorId, Map configuration) { + this.connectorId = connectorId; + this.configuration = configuration; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorId = in.readString(); + this.configuration = in.readMap(ConnectorConfiguration::new); + } + + public String getConnectorId() { + return connectorId; + } + + public Map getConfiguration() { + return configuration; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorId)) { + validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + } + + if (Objects.isNull(configuration)) { + validationException = addValidationError("[configuration] cannot be null.", validationException); + } + + return validationException; + } + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>( + "connector_update_configuration_request", + false, + ((args, connectorId) -> new UpdateConnectorConfigurationAction.Request( + connectorId, + (Map) args[0] + )) + ); + + static { + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> p.map(HashMap::new, ConnectorConfiguration::fromXContent), + Connector.CONFIGURATION_FIELD, + ObjectParser.ValueType.OBJECT + ); + } + + public static UpdateConnectorConfigurationAction.Request fromXContentBytes( + String connectorId, + BytesReference source, + XContentType xContentType + ) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return UpdateConnectorConfigurationAction.Request.fromXContent(parser, connectorId); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse connector configuration.", e); + } + } + + public static UpdateConnectorConfigurationAction.Request fromXContent(XContentParser parser, String connectorId) + throws IOException { + return PARSER.parse(parser, connectorId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(Connector.CONFIGURATION_FIELD.getPreferredName(), configuration); + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorId); + out.writeMap(configuration, StreamOutput::writeWriteable); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorId, request.connectorId) && Objects.equals(configuration, request.configuration); + } + + @Override + public int hashCode() { + return Objects.hash(connectorId, configuration); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + final DocWriteResponse.Result result; + + public Response(StreamInput in) throws IOException { + super(in); + result = DocWriteResponse.Result.readFrom(in); + } + + public Response(DocWriteResponse.Result result) { + this.result = result; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + this.result.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("result", this.result.getLowercase()); + builder.endObject(); + return builder; + } + + public RestStatus status() { + return switch (result) { + case NOT_FOUND -> RestStatus.NOT_FOUND; + default -> RestStatus.OK; + }; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response that = (Response) o; + return Objects.equals(result, that.result); + } + + @Override + public int hashCode() { + return Objects.hash(result); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationDependency.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationDependency.java new file mode 100644 index 0000000000000..1efd3f47fdff0 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationDependency.java @@ -0,0 +1,129 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.configuration; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParseException; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +/** + * Represents a dependency within a connector configuration, defining a specific field and its associated value. + * This class is used to encapsulate configuration dependencies in a structured format. + */ +public class ConfigurationDependency implements Writeable, ToXContentObject { + + private final String field; + private final Object value; + + /** + * Constructs a new instance of ConfigurationDependency. + * + * @param field The name of the field in the configuration dependency. + * @param value The value associated with the field. + */ + public ConfigurationDependency(String field, Object value) { + this.field = field; + this.value = value; + } + + public ConfigurationDependency(StreamInput in) throws IOException { + this.field = in.readString(); + this.value = in.readGenericValue(); + } + + private static final ParseField FIELD_FIELD = new ParseField("field"); + private static final ParseField VALUE_FIELD = new ParseField("value"); + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_configuration_dependency", + true, + args -> new ConfigurationDependency.Builder().setField((String) args[0]).setValue(args[1]).build() + ); + + static { + PARSER.declareString(constructorArg(), FIELD_FIELD); + PARSER.declareField(constructorArg(), (p, c) -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return p.text(); + } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { + return p.numberValue(); + } else if (p.currentToken() == XContentParser.Token.VALUE_BOOLEAN) { + return p.booleanValue(); + } else if (p.currentToken() == XContentParser.Token.VALUE_NULL) { + return null; + } + throw new XContentParseException("Unsupported token [" + p.currentToken() + "]"); + }, VALUE_FIELD, ObjectParser.ValueType.VALUE); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(FIELD_FIELD.getPreferredName(), field); + builder.field(VALUE_FIELD.getPreferredName(), value); + } + builder.endObject(); + return builder; + } + + public static ConfigurationDependency fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(field); + out.writeGenericValue(value); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ConfigurationDependency that = (ConfigurationDependency) o; + return Objects.equals(field, that.field) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(field, value); + } + + public static class Builder { + + private String field; + private Object value; + + public Builder setField(String field) { + this.field = field; + return this; + } + + public Builder setValue(Object value) { + this.value = value; + return this; + } + + public ConfigurationDependency build() { + return new ConfigurationDependency(field, value); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationDisplayType.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationDisplayType.java new file mode 100644 index 0000000000000..d6b3d83d705b9 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationDisplayType.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.configuration; + +import java.util.Locale; + +public enum ConfigurationDisplayType { + TEXTBOX, + TEXTAREA, + NUMERIC, + TOGGLE, + DROPDOWN; + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + + public static ConfigurationDisplayType displayType(String type) { + for (ConfigurationDisplayType displayType : ConfigurationDisplayType.values()) { + if (displayType.name().equalsIgnoreCase(type)) { + return displayType; + } + } + throw new IllegalArgumentException("Unknown DisplayType: " + type); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationFieldType.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationFieldType.java new file mode 100644 index 0000000000000..20162735985c6 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationFieldType.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.configuration; + +public enum ConfigurationFieldType { + STRING("str"), + INTEGER("int"), + LIST("list"), + BOOLEAN("bool"); + + private final String value; + + ConfigurationFieldType(String value) { + this.value = value; + } + + @Override + public String toString() { + return this.value; + } + + public static ConfigurationFieldType fieldType(String type) { + for (ConfigurationFieldType fieldType : ConfigurationFieldType.values()) { + if (fieldType.value.equals(type)) { + return fieldType; + } + } + throw new IllegalArgumentException("Unknown FieldType: " + type); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationSelectOption.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationSelectOption.java new file mode 100644 index 0000000000000..ba281c69702e0 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationSelectOption.java @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.configuration; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public class ConfigurationSelectOption implements Writeable, ToXContentObject { + private final String label; + private final String value; + + private ConfigurationSelectOption(String label, String value) { + this.label = label; + this.value = value; + } + + public ConfigurationSelectOption(StreamInput in) throws IOException { + this.label = in.readString(); + this.value = in.readString(); + } + + private static final ParseField LABEL_FIELD = new ParseField("label"); + private static final ParseField VALUE_FIELD = new ParseField("value"); + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_configuration_select_option", + true, + args -> new ConfigurationSelectOption.Builder().setLabel((String) args[0]).setValue((String) args[1]).build() + ); + + static { + PARSER.declareString(constructorArg(), LABEL_FIELD); + PARSER.declareString(constructorArg(), VALUE_FIELD); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(LABEL_FIELD.getPreferredName(), label); + builder.field(VALUE_FIELD.getPreferredName(), value); + } + builder.endObject(); + return builder; + } + + public static ConfigurationSelectOption fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(label); + out.writeString(value); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ConfigurationSelectOption that = (ConfigurationSelectOption) o; + return Objects.equals(label, that.label) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(label, value); + } + + public static class Builder { + + private String label; + private String value; + + public Builder setLabel(String label) { + this.label = label; + return this; + } + + public Builder setValue(String value) { + this.value = value; + return this; + } + + public ConfigurationSelectOption build() { + return new ConfigurationSelectOption(label, value); + } + } + +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidation.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidation.java new file mode 100644 index 0000000000000..476ae113398dc --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidation.java @@ -0,0 +1,131 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.configuration; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParseException; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +/** + * Represents a configuration validation entity, encapsulating a validation constraint and its corresponding type. + * This class is used to define and handle specific validation rules or requirements within a configuration context. + */ +public class ConfigurationValidation implements Writeable, ToXContentObject { + + private final Object constraint; + private final ConfigurationValidationType type; + + /** + * Constructs a new ConfigurationValidation instance with specified constraint and type. + * This constructor initializes the object with a given validation constraint and its associated validation type. + * + * @param constraint The validation constraint, represented as an Object. + * @param type The type of configuration validation, specified as an instance of {@link ConfigurationValidationType}. + */ + private ConfigurationValidation(Object constraint, ConfigurationValidationType type) { + this.constraint = constraint; + this.type = type; + } + + public ConfigurationValidation(StreamInput in) throws IOException { + this.constraint = in.readGenericValue(); + this.type = in.readEnum(ConfigurationValidationType.class); + } + + private static final ParseField CONSTRAINT_FIELD = new ParseField("constraint"); + private static final ParseField TYPE_FIELD = new ParseField("type"); + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_configuration_validation", + true, + args -> new ConfigurationValidation.Builder().setConstraint(args[0]).setType((ConfigurationValidationType) args[1]).build() + ); + + static { + PARSER.declareField(constructorArg(), (p, c) -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return p.text(); + } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { + return p.numberValue(); + } + throw new XContentParseException("Unsupported token [" + p.currentToken() + "]"); + }, CONSTRAINT_FIELD, ObjectParser.ValueType.VALUE); + PARSER.declareField( + constructorArg(), + (p, c) -> ConfigurationValidationType.validationType(p.text()), + TYPE_FIELD, + ObjectParser.ValueType.STRING + ); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(CONSTRAINT_FIELD.getPreferredName(), constraint); + builder.field(TYPE_FIELD.getPreferredName(), type.toString()); + } + builder.endObject(); + return builder; + } + + public static ConfigurationValidation fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeGenericValue(constraint); + out.writeEnum(type); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ConfigurationValidation that = (ConfigurationValidation) o; + return Objects.equals(constraint, that.constraint) && type == that.type; + } + + @Override + public int hashCode() { + return Objects.hash(constraint, type); + } + + public static class Builder { + + private Object constraint; + private ConfigurationValidationType type; + + public Builder setConstraint(Object constraint) { + this.constraint = constraint; + return this; + } + + public Builder setType(ConfigurationValidationType type) { + this.type = type; + return this; + } + + public ConfigurationValidation build() { + return new ConfigurationValidation(constraint, type); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidationType.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidationType.java new file mode 100644 index 0000000000000..2118014f4a286 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidationType.java @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.configuration; + +import java.util.Locale; + +public enum ConfigurationValidationType { + LESS_THAN, + GREATER_THAN, + LIST_TYPE, + INCLUDED_IN, + REGEX, + UNSET; + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + + public static ConfigurationValidationType validationType(String type) { + for (ConfigurationValidationType displayType : ConfigurationValidationType.values()) { + if (displayType.name().equalsIgnoreCase(type)) { + return displayType; + } + } + throw new IllegalArgumentException("Unknown ValidationType: " + type); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java index 2a302ddb68199..0c6caa3376c7b 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java @@ -23,6 +23,7 @@ import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.ConnectorConfiguration; import org.elasticsearch.xpack.application.connector.ConnectorFiltering; import org.elasticsearch.xpack.application.connector.ConnectorIngestPipeline; import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; @@ -317,7 +318,7 @@ public ConnectorSyncJob(StreamInput in) throws IOException { .setLanguage((String) args[i++]) .setPipeline((ConnectorIngestPipeline) args[i++]) .setServiceType((String) args[i++]) - .setConfiguration((Map) args[i++]) + .setConfiguration((Map) args[i++]) .build(); } ); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index 5e1686dde80f2..6a7aec2fc7430 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -29,6 +29,7 @@ import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.ConnectorConfiguration; import org.elasticsearch.xpack.application.connector.ConnectorFiltering; import org.elasticsearch.xpack.application.connector.ConnectorIndexService; import org.elasticsearch.xpack.application.connector.ConnectorIngestPipeline; @@ -282,7 +283,9 @@ public void onResponse(GetResponse response) { .setLanguage((String) source.get(Connector.LANGUAGE_FIELD.getPreferredName())) .setPipeline((ConnectorIngestPipeline) source.get(Connector.PIPELINE_FIELD.getPreferredName())) .setServiceType((String) source.get(Connector.SERVICE_TYPE_FIELD.getPreferredName())) - .setConfiguration((Map) source.get(Connector.CONFIGURATION_FIELD.getPreferredName())) + .setConfiguration( + (Map) source.get(Connector.CONFIGURATION_FIELD.getPreferredName()) + ) .build(); listener.onResponse(syncJobConnectorInfo); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java new file mode 100644 index 0000000000000..44d9c0fcf9e76 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector; + +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.search.SearchModule; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentType; +import org.junit.Before; + +import java.io.IOException; +import java.util.List; + +import static java.util.Collections.emptyList; +import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; +import static org.hamcrest.CoreMatchers.equalTo; + +public class ConnectorConfigurationTests extends ESTestCase { + + private NamedWriteableRegistry namedWriteableRegistry; + + @Before + public void registerNamedObjects() { + SearchModule searchModule = new SearchModule(Settings.EMPTY, emptyList()); + + List namedWriteables = searchModule.getNamedWriteables(); + namedWriteableRegistry = new NamedWriteableRegistry(namedWriteables); + } + + public final void testRandomSerialization() throws IOException { + for (int runs = 0; runs < 10; runs++) { + ConnectorConfiguration testInstance = ConnectorTestUtils.getRandomConnectorConfigurationField(); + assertTransportSerialization(testInstance); + } + } + + public void testToXContent() throws IOException { + String content = XContentHelper.stripWhitespace(""" + { + "default_value": null, + "depends_on": [ + { + "field": "some_field", + "value": true + } + ], + "display": "textbox", + "label": "Very important field", + "options": [], + "order": 4, + "required": true, + "sensitive": false, + "tooltip": "Wow, this tooltip is useful.", + "type": "str", + "ui_restrictions": [], + "validations": [ + { + "constraint": 0, + "type": "greater_than" + } + ], + "value": "" + } + """); + + ConnectorConfiguration configuration = ConnectorConfiguration.fromXContentBytes(new BytesArray(content), XContentType.JSON); + boolean humanReadable = true; + BytesReference originalBytes = toShuffledXContent(configuration, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable); + ConnectorConfiguration parsed; + try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) { + parsed = ConnectorConfiguration.fromXContent(parser); + } + assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON); + } + + private void assertTransportSerialization(ConnectorConfiguration testInstance) throws IOException { + ConnectorConfiguration deserializedInstance = copyInstance(testInstance); + assertNotSame(testInstance, deserializedInstance); + assertThat(testInstance, equalTo(deserializedInstance)); + } + + private ConnectorConfiguration copyInstance(ConnectorConfiguration instance) throws IOException { + return copyWriteable(instance, namedWriteableRegistry, ConnectorConfiguration::new); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index 0f2c6c3fa3e8e..ffa532012d982 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; @@ -24,6 +25,7 @@ import java.util.ArrayList; import java.util.List; +import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; @@ -68,6 +70,29 @@ public void testDeleteConnector() throws Exception { expectThrows(ResourceNotFoundException.class, () -> awaitDeleteConnector(connectorIdToDelete)); } + public void testUpdateConnectorConfiguration() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + + DocWriteResponse resp = awaitPutConnector(connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + Map connectorConfiguration = connector.getConfiguration() + .entrySet() + .stream() + .collect(Collectors.toMap(Map.Entry::getKey, entry -> ConnectorTestUtils.getRandomConnectorConfigurationField())); + + UpdateConnectorConfigurationAction.Request updateConfigurationRequest = new UpdateConnectorConfigurationAction.Request( + connector.getConnectorId(), + connectorConfiguration + ); + + DocWriteResponse updateResponse = awaitUpdateConnectorConfiguration(updateConfigurationRequest); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + Connector indexedConnector = awaitGetConnector(connector.getConnectorId()); + assertThat(connectorConfiguration, equalTo(indexedConnector.getConfiguration())); + assertThat(indexedConnector.getStatus(), equalTo(ConnectorStatus.CONFIGURED)); + } + public void testUpdateConnectorPipeline() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); DocWriteResponse resp = awaitPutConnector(connector); @@ -290,6 +315,32 @@ public void onFailure(Exception e) { return resp.get(); } + private UpdateResponse awaitUpdateConnectorConfiguration(UpdateConnectorConfigurationAction.Request updateConfiguration) + throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorIndexService.updateConnectorConfiguration(updateConfiguration, new ActionListener<>() { + @Override + public void onResponse(UpdateResponse indexResponse) { + resp.set(indexResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for update configuration request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from update configuration request", resp.get()); + return resp.get(); + } + private UpdateResponse awaitUpdateConnectorFiltering(UpdateConnectorFilteringAction.Request updateFiltering) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java index 98d0112d8910f..3488c7d9c8ba7 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java @@ -8,6 +8,12 @@ package org.elasticsearch.xpack.application.connector; import org.elasticsearch.xpack.application.connector.action.PutConnectorAction; +import org.elasticsearch.xpack.application.connector.configuration.ConfigurationDependency; +import org.elasticsearch.xpack.application.connector.configuration.ConfigurationDisplayType; +import org.elasticsearch.xpack.application.connector.configuration.ConfigurationFieldType; +import org.elasticsearch.xpack.application.connector.configuration.ConfigurationSelectOption; +import org.elasticsearch.xpack.application.connector.configuration.ConfigurationValidation; +import org.elasticsearch.xpack.application.connector.configuration.ConfigurationValidationType; import org.elasticsearch.xpack.application.connector.filtering.FilteringAdvancedSnippet; import org.elasticsearch.xpack.application.connector.filtering.FilteringPolicy; import org.elasticsearch.xpack.application.connector.filtering.FilteringRule; @@ -19,6 +25,7 @@ import java.time.Instant; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; @@ -177,10 +184,51 @@ public static Connector getRandomSyncJobConnectorInfo() { .build(); } + private static ConfigurationDependency getRandomConfigurationDependency() { + return new ConfigurationDependency.Builder().setField(randomAlphaOfLength(10)).setValue(randomAlphaOfLength(10)).build(); + } + + private static ConfigurationSelectOption getRandomConfigurationSelectOption() { + return new ConfigurationSelectOption.Builder().setLabel(randomAlphaOfLength(10)).setValue(randomAlphaOfLength(10)).build(); + } + + private static ConfigurationValidation getRandomConfigurationValidation() { + return new ConfigurationValidation.Builder().setConstraint(randomAlphaOfLength(10)) + .setType(getRandomConfigurationValidationType()) + .build(); + } + + public static ConnectorConfiguration getRandomConnectorConfigurationField() { + return new ConnectorConfiguration.Builder().setCategory(randomAlphaOfLength(10)) + .setDefaultValue(randomAlphaOfLength(10)) + .setDependsOn(List.of(getRandomConfigurationDependency())) + .setDisplay(getRandomConfigurationDisplayType()) + .setLabel(randomAlphaOfLength(10)) + .setOptions(List.of(getRandomConfigurationSelectOption(), getRandomConfigurationSelectOption())) + .setOrder(randomInt()) + .setPlaceholder(randomAlphaOfLength(10)) + .setRequired(randomBoolean()) + .setSensitive(randomBoolean()) + .setTooltip(randomAlphaOfLength(10)) + .setType(getRandomConfigurationFieldType()) + .setUiRestrictions(List.of(randomAlphaOfLength(10), randomAlphaOfLength(10))) + .setValidations(List.of(getRandomConfigurationValidation())) + .setValue(randomAlphaOfLength(10)) + .build(); + } + + public static Map getRandomConnectorConfiguration() { + Map configMap = new HashMap<>(); + for (int i = 0; i < 3; i++) { + configMap.put(randomAlphaOfLength(10), getRandomConnectorConfigurationField()); + } + return configMap; + } + public static Connector getRandomConnector() { return new Connector.Builder().setConnectorId(randomAlphaOfLength(10)) .setApiKeyId(randomFrom(new String[] { null, randomAlphaOfLength(10) })) - .setConfiguration(Collections.emptyMap()) + .setConfiguration(getRandomConnectorConfiguration()) .setCustomScheduling(Map.of(randomAlphaOfLengthBetween(5, 10), getRandomConnectorCustomSchedule())) .setDescription(randomFrom(new String[] { null, randomAlphaOfLength(10) })) .setError(randomFrom(new String[] { null, randomAlphaOfLength(10) })) @@ -251,4 +299,19 @@ private static FilteringValidationState getRandomFilteringValidationState() { FilteringValidationState[] values = FilteringValidationState.values(); return values[randomInt(values.length - 1)]; } + + private static ConfigurationDisplayType getRandomConfigurationDisplayType() { + ConfigurationDisplayType[] values = ConfigurationDisplayType.values(); + return values[randomInt(values.length - 1)]; + } + + private static ConfigurationFieldType getRandomConfigurationFieldType() { + ConfigurationFieldType[] values = ConfigurationFieldType.values(); + return values[randomInt(values.length - 1)]; + } + + private static ConfigurationValidationType getRandomConfigurationValidationType() { + ConfigurationValidationType[] values = ConfigurationValidationType.values(); + return values[randomInt(values.length - 1)]; + } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java index a83537f32f413..481f50bb41711 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java @@ -49,135 +49,161 @@ public final void testRandomSerialization() throws IOException { public void testToXContent() throws IOException { String content = XContentHelper.stripWhitespace(""" { - "api_key_id": "test", - "connector_id": "test-connector", - "custom_scheduling": { - "schedule-key": { - "configuration_overrides": { - "domain_allowlist": [ - "https://example.com" - ], - "max_crawl_depth": 1, - "seed_urls": [ - "https://example.com/blog", - "https://example.com/info" - ], - "sitemap_discovery_disabled": true, - "sitemap_urls": [ - "https://example.com/sitemap.xml" - ] - }, - "enabled": true, - "interval": "0 0 12 * * ?", - "last_synced": null, - "name": "My Schedule" - } - }, - "configuration": {}, - "description": "test-connector", - "features": { - "document_level_security": { - "enabled": true - }, - "filtering_advanced_config": true, - "sync_rules": { - "advanced": { - "enabled": false + "api_key_id":"test", + "connector_id":"test-connector", + "custom_scheduling":{ + "schedule-key":{ + "configuration_overrides":{ + "domain_allowlist":[ + "https://example.com" + ], + "max_crawl_depth":1, + "seed_urls":[ + "https://example.com/blog", + "https://example.com/info" + ], + "sitemap_discovery_disabled":true, + "sitemap_urls":[ + "https://example.com/sitemap.xml" + ] + }, + "enabled":true, + "interval":"0 0 12 * * ?", + "last_synced":null, + "name":"My Schedule" + } + }, + "configuration":{ + "some_field":{ + "default_value":null, + "depends_on":[ + { + "field":"some_field", + "value":true + } + ], + "display":"textbox", + "label":"Very important field", + "options":[], + "order":4, + "required":true, + "sensitive":false, + "tooltip":"Wow, this tooltip is useful.", + "type":"str", + "ui_restrictions":[], + "validations":[ + { + "constraint":0, + "type":"greater_than" + } + ], + "value":"" + } + }, + "description":"test-connector", + "features":{ + "document_level_security":{ + "enabled":true + }, + "filtering_advanced_config":true, + "sync_rules":{ + "advanced":{ + "enabled":false + }, + "basic":{ + "enabled":true + } + } + }, + "filtering":[ + { + "active":{ + "advanced_snippet":{ + "created_at":"2023-11-09T15:13:08.231Z", + "updated_at":"2023-11-09T15:13:08.231Z", + "value":{} }, - "basic": { - "enabled": true + "rules":[ + { + "created_at":"2023-11-09T15:13:08.231Z", + "field":"_", + "id":"DEFAULT", + "order":0, + "policy":"include", + "rule":"regex", + "updated_at":"2023-11-09T15:13:08.231Z", + "value":".*" + } + ], + "validation":{ + "errors":[], + "state":"valid" } - } - }, - "filtering": [ - { - "active": { - "advanced_snippet": { - "created_at": "2023-11-09T15:13:08.231Z", - "updated_at": "2023-11-09T15:13:08.231Z", - "value": {} - }, - "rules": [ - { - "created_at": "2023-11-09T15:13:08.231Z", - "field": "_", - "id": "DEFAULT", - "order": 0, - "policy": "include", - "rule": "regex", - "updated_at": "2023-11-09T15:13:08.231Z", - "value": ".*" - } - ], - "validation": { - "errors": [], - "state": "valid" - } + }, + "domain":"DEFAULT", + "draft":{ + "advanced_snippet":{ + "created_at":"2023-11-09T15:13:08.231Z", + "updated_at":"2023-11-09T15:13:08.231Z", + "value":{} }, - "domain": "DEFAULT", - "draft": { - "advanced_snippet": { - "created_at": "2023-11-09T15:13:08.231Z", - "updated_at": "2023-11-09T15:13:08.231Z", - "value": {} - }, - "rules": [ - { - "created_at": "2023-11-09T15:13:08.231Z", - "field": "_", - "id": "DEFAULT", - "order": 0, - "policy": "include", - "rule": "regex", - "updated_at": "2023-11-09T15:13:08.231Z", - "value": ".*" - } - ], - "validation": { - "errors": [], - "state": "valid" - } + "rules":[ + { + "created_at":"2023-11-09T15:13:08.231Z", + "field":"_", + "id":"DEFAULT", + "order":0, + "policy":"include", + "rule":"regex", + "updated_at":"2023-11-09T15:13:08.231Z", + "value":".*" + } + ], + "validation":{ + "errors":[], + "state":"valid" } - } - ], - "index_name": "search-test", - "is_native": true, - "language": "polish", - "last_access_control_sync_error": "some error", - "last_access_control_sync_scheduled_at": "2023-11-09T15:13:08.231Z", - "last_access_control_sync_status": "pending", - "last_deleted_document_count": 42, - "last_incremental_sync_scheduled_at": "2023-11-09T15:13:08.231Z", - "last_indexed_document_count": 42, - "last_seen": "2023-11-09T15:13:08.231Z", - "last_sync_error": "some error", - "last_sync_scheduled_at": "2024-11-09T15:13:08.231Z", - "last_sync_status": "completed", - "last_synced": "2024-11-09T15:13:08.231Z", - "name": "test-name", - "pipeline": { - "extract_binary_content": true, - "name": "ent-search-generic-ingestion", - "reduce_whitespace": true, - "run_ml_inference": false - }, - "scheduling": { - "access_control": { - "enabled": false, - "interval": "0 0 0 * * ?" - }, - "full": { - "enabled": false, - "interval": "0 0 0 * * ?" - }, - "incremental": { - "enabled": false, - "interval": "0 0 0 * * ?" - } - }, - "service_type": "google_drive", - "status": "needs_configuration", - "sync_now": false + } + } + ], + "index_name":"search-test", + "is_native":true, + "language":"polish", + "last_access_control_sync_error":"some error", + "last_access_control_sync_scheduled_at":"2023-11-09T15:13:08.231Z", + "last_access_control_sync_status":"pending", + "last_deleted_document_count":42, + "last_incremental_sync_scheduled_at":"2023-11-09T15:13:08.231Z", + "last_indexed_document_count":42, + "last_seen":"2023-11-09T15:13:08.231Z", + "last_sync_error":"some error", + "last_sync_scheduled_at":"2024-11-09T15:13:08.231Z", + "last_sync_status":"completed", + "last_synced":"2024-11-09T15:13:08.231Z", + "name":"test-name", + "pipeline":{ + "extract_binary_content":true, + "name":"ent-search-generic-ingestion", + "reduce_whitespace":true, + "run_ml_inference":false + }, + "scheduling":{ + "access_control":{ + "enabled":false, + "interval":"0 0 0 * * ?" + }, + "full":{ + "enabled":false, + "interval":"0 0 0 * * ?" + }, + "incremental":{ + "enabled":false, + "interval":"0 0 0 * * ?" + } + }, + "service_type":"google_drive", + "status":"needs_configuration", + "sync_now":false }"""); Connector connector = Connector.fromXContentBytes(new BytesArray(content), XContentType.JSON); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..7e8b026a23105 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationActionRequestBWCSerializingTests.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorConfigurationActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase< + UpdateConnectorConfigurationAction.Request> { + + private String connectorId; + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorConfigurationAction.Request::new; + } + + @Override + protected UpdateConnectorConfigurationAction.Request createTestInstance() { + this.connectorId = randomUUID(); + return new UpdateConnectorConfigurationAction.Request(connectorId, ConnectorTestUtils.getRandomConnectorConfiguration()); + } + + @Override + protected UpdateConnectorConfigurationAction.Request mutateInstance(UpdateConnectorConfigurationAction.Request instance) + throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorConfigurationAction.Request doParseInstance(XContentParser parser) throws IOException { + return UpdateConnectorConfigurationAction.Request.fromXContent(parser, this.connectorId); + } + + @Override + protected UpdateConnectorConfigurationAction.Request mutateInstanceForVersion( + UpdateConnectorConfigurationAction.Request instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationActionResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..d4aa4f12b36d3 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationActionResponseBWCSerializingTests.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorConfigurationActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< + UpdateConnectorConfigurationAction.Response> { + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorConfigurationAction.Response::new; + } + + @Override + protected UpdateConnectorConfigurationAction.Response createTestInstance() { + return new UpdateConnectorConfigurationAction.Response(randomFrom(DocWriteResponse.Result.values())); + } + + @Override + protected UpdateConnectorConfigurationAction.Response mutateInstance(UpdateConnectorConfigurationAction.Response instance) + throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorConfigurationAction.Response mutateInstanceForVersion( + UpdateConnectorConfigurationAction.Response instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 3409f549cb579..44399e84ee6e0 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -127,6 +127,7 @@ public class Constants { "cluster:admin/xpack/connector/get", "cluster:admin/xpack/connector/list", "cluster:admin/xpack/connector/put", + "cluster:admin/xpack/connector/update_configuration", "cluster:admin/xpack/connector/update_error", "cluster:admin/xpack/connector/update_filtering", "cluster:admin/xpack/connector/update_last_seen", From f6fcfe51d0fb6fe801f29a3025dacb9752c97f3f Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Mon, 4 Dec 2023 17:32:08 -0500 Subject: [PATCH 134/181] [ML] If trained model download task is in progress, wait for it to finish before executing start trained model deployment (#102944) --- docs/changelog/102944.yaml | 6 + .../xpack/core/ml/job/messages/Messages.java | 2 + .../TransportGetTrainedModelsAction.java | 1 + ...portStartTrainedModelDeploymentAction.java | 289 +++++++++++------- .../test/ml/3rd_party_deployment.yml | 70 +++-- 5 files changed, 243 insertions(+), 125 deletions(-) create mode 100644 docs/changelog/102944.yaml diff --git a/docs/changelog/102944.yaml b/docs/changelog/102944.yaml new file mode 100644 index 0000000000000..58a1bb8f6bbaa --- /dev/null +++ b/docs/changelog/102944.yaml @@ -0,0 +1,6 @@ +pr: 102944 +summary: "If trained model download task is in progress, wait for it to finish before\ + \ executing start trained model deployment" +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java index ce26bb53dc223..36b4c0f1815ff 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java @@ -123,6 +123,8 @@ public final class Messages { "Unable to delete model [{0}] as it is required by machine learning"; public static final String MODEL_DEFINITION_TRUNCATED = "Model definition truncated. Unable to deserialize trained model definition [{0}]"; + public static final String MODEL_DOWNLOAD_IN_PROGRESS = + "Model download task is currently running. Wait for trained model [{0}] download task to complete then try again"; public static final String UNABLE_TO_DEPLOY_MODEL_BAD_PARTS = "Unable to deploy model, please delete and recreate the model definition"; public static final String INFERENCE_FAILED_TO_DESERIALIZE = "Could not deserialize trained model [{0}]"; public static final String INFERENCE_TOO_MANY_DEFINITIONS_REQUESTED = diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsAction.java index cda76d3c3ee1d..e6d1fe30d7646 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsAction.java @@ -89,6 +89,7 @@ protected void doExecute(Task task, Request request, ActionListener li new OriginSettingClient(client, ML_ORIGIN), configs.get(0), false, // missing docs are not an error + null, // if download is in progress, don't wait for it to complete ActionListener.wrap(modelIdAndLength -> { configs.get(0).setFullDefinition(modelIdAndLength.v2() > 0); listener.onResponse(responseBuilder.setModels(configs).build()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java index 113a093b3ae65..4a569b374582a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java @@ -3,6 +3,8 @@ * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. + * + * this file was contributed to by a generative AI */ package org.elasticsearch.xpack.ml.action; @@ -13,6 +15,7 @@ import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.internal.Client; @@ -64,6 +67,7 @@ import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelDefinitionDoc; import org.elasticsearch.xpack.ml.notifications.InferenceAuditor; import org.elasticsearch.xpack.ml.process.MlMemoryTracker; +import org.elasticsearch.xpack.ml.utils.TaskRetriever; import java.util.Collection; import java.util.HashMap; @@ -269,7 +273,13 @@ protected void masterOperation( error -> { if (ExceptionsHelper.unwrapCause(error) instanceof ResourceNotFoundException) { // no name clash, continue with the deployment - checkFullModelDefinitionIsPresent(client, trainedModelConfig, true, modelSizeListener); + checkFullModelDefinitionIsPresent( + client, + trainedModelConfig, + true, + request.getTimeout(), + modelSizeListener + ); } else { listener.onFailure(error); } @@ -277,7 +287,7 @@ protected void masterOperation( ) ); } else { - checkFullModelDefinitionIsPresent(client, trainedModelConfig, true, modelSizeListener); + checkFullModelDefinitionIsPresent(client, trainedModelConfig, true, request.getTimeout(), modelSizeListener); } }, listener::onFailure); @@ -342,27 +352,30 @@ private void deleteFailedDeployment( * individual per-document definition lengths and checking * the total is equal to the total definition length as * stored in the docs. - * + *

    * On success the response is a tuple * {@code (model id, total definition length)} - * + *

    * If {@code errorIfDefinitionIsMissing == false} and some * definition docs are missing then {@code listener::onResponse} * is called with the total definition length == 0. * This usage is to answer yes/no questions if the full model * definition is present. * - * @param mlOriginClient A client using ML_ORIGIN - * @param config trained model config + * @param mlOriginClient A client using ML_ORIGIN + * @param config trained model config * @param errorIfDefinitionIsMissing If true missing definition parts cause errors. * If false and some parts are missing the total * definition length in the response is set to 0. - * @param listener response listener + * @param timeout The timeout value in seconds that the request should fail if it does not complete. + * If null, then this will not wait for the download to complete before returning. + * @param listener response listener */ static void checkFullModelDefinitionIsPresent( OriginSettingClient mlOriginClient, TrainedModelConfig config, boolean errorIfDefinitionIsMissing, + TimeValue timeout, ActionListener> listener ) { if (config.getLocation() instanceof IndexLocation == false) { @@ -373,27 +386,86 @@ static void checkFullModelDefinitionIsPresent( final String modelId = config.getModelId(); String index = ((IndexLocation) config.getLocation()).getIndexName(); - mlOriginClient.prepareSearch(index) - .setQuery( - QueryBuilders.constantScoreQuery( - QueryBuilders.boolQuery() - .filter(QueryBuilders.termQuery(TrainedModelConfig.MODEL_ID.getPreferredName(), modelId)) - .filter( - QueryBuilders.termQuery(InferenceIndexConstants.DOC_TYPE.getPreferredName(), TrainedModelDefinitionDoc.NAME) - ) - ) - ) - .setFetchSource(false) - .addDocValueField(TrainedModelDefinitionDoc.DEFINITION_LENGTH.getPreferredName()) - .addDocValueField(TrainedModelDefinitionDoc.TOTAL_DEFINITION_LENGTH.getPreferredName()) - .setSize(MAX_NUM_NATIVE_DEFINITION_PARTS) - .setTrackTotalHits(true) - .addSort(SortBuilders.fieldSort(TrainedModelDefinitionDoc.DOC_NUM.getPreferredName()).order(SortOrder.ASC).unmappedType("long")) - .execute(ActionListener.wrap(response -> { - SearchHit[] hits = response.getHits().getHits(); - if (hits.length == 0) { + + // Step 3 + // Verify the model definition parts are all present + ActionListener step3SearchResultsVerificationListener = step3VerifyModelPartsArePresent( + errorIfDefinitionIsMissing, + listener, + modelId + ); + + // Step 2 + // Search for the model definition parts to ensure they are all present + ActionListener step2DocsSearchListener = step2SearchForModelParts( + mlOriginClient, + listener, + index, + modelId, + step3SearchResultsVerificationListener + ); + + // Step 1 (there is no step zero) + // Check if there is a download task for this model, and wait for it to complete or timeout + step1CheckForDownloadTask(mlOriginClient, errorIfDefinitionIsMissing, timeout, listener, modelId, step2DocsSearchListener); + } + + private static ActionListener step3VerifyModelPartsArePresent( + boolean errorIfDefinitionIsMissing, + ActionListener> listener, + String modelId + ) { + return ActionListener.wrap(response -> { + SearchHit[] hits = response.getHits().getHits(); + if (hits.length == 0) { + failOrRespondWith0( + () -> new ResourceNotFoundException(Messages.getMessage(Messages.MODEL_DEFINITION_NOT_FOUND, modelId)), + errorIfDefinitionIsMissing, + modelId, + listener + ); + return; + } + + long firstTotalLength; + DocumentField firstTotalLengthField = hits[0].field(TrainedModelDefinitionDoc.TOTAL_DEFINITION_LENGTH.getPreferredName()); + if (firstTotalLengthField != null && firstTotalLengthField.getValue() instanceof Long firstTotalDefinitionLength) { + firstTotalLength = firstTotalDefinitionLength; + } else { + failOrRespondWith0( + () -> missingFieldsError( + modelId, + hits[0].getId(), + List.of(TrainedModelDefinitionDoc.TOTAL_DEFINITION_LENGTH.getPreferredName()) + ), + errorIfDefinitionIsMissing, + modelId, + listener + ); + return; + } + + Set missingFields = new HashSet<>(); + long summedLengths = 0; + for (SearchHit hit : hits) { + long totalLength = -1; + DocumentField totalLengthField = hit.field(TrainedModelDefinitionDoc.TOTAL_DEFINITION_LENGTH.getPreferredName()); + if (totalLengthField != null && totalLengthField.getValue() instanceof Long totalDefinitionLength) { + totalLength = totalDefinitionLength; + } else { + missingFields.add(TrainedModelDefinitionDoc.TOTAL_DEFINITION_LENGTH.getPreferredName()); + } + + DocumentField definitionLengthField = hit.field(TrainedModelDefinitionDoc.DEFINITION_LENGTH.getPreferredName()); + if (definitionLengthField != null && definitionLengthField.getValue() instanceof Long definitionLength) { + summedLengths += definitionLength; + } else { + missingFields.add(TrainedModelDefinitionDoc.DEFINITION_LENGTH.getPreferredName()); + } + + if (missingFields.isEmpty() == false) { failOrRespondWith0( - () -> new ResourceNotFoundException(Messages.getMessage(Messages.MODEL_DEFINITION_NOT_FOUND, modelId)), + () -> missingFieldsError(modelId, hit.getId(), missingFields), errorIfDefinitionIsMissing, modelId, listener @@ -401,16 +473,18 @@ static void checkFullModelDefinitionIsPresent( return; } - long firstTotalLength; - DocumentField firstTotalLengthField = hits[0].field(TrainedModelDefinitionDoc.TOTAL_DEFINITION_LENGTH.getPreferredName()); - if (firstTotalLengthField != null && firstTotalLengthField.getValue() instanceof Long firstTotalDefinitionLength) { - firstTotalLength = firstTotalDefinitionLength; - } else { + if (totalLength != firstTotalLength) { + final long finalTotalLength = totalLength; failOrRespondWith0( - () -> missingFieldsError( + () -> ExceptionsHelper.badRequestException( + "[{}] [total_definition_length] must be the same in all model definition parts. " + + "The value [{}] in model definition part [{}] does not match the value [{}] in part [{}]. " + + Messages.UNABLE_TO_DEPLOY_MODEL_BAD_PARTS, modelId, - hits[0].getId(), - List.of(TrainedModelDefinitionDoc.TOTAL_DEFINITION_LENGTH.getPreferredName()) + finalTotalLength, + TrainedModelDefinitionDoc.docNum(modelId, Objects.requireNonNull(hit.getId())), + firstTotalLength, + TrainedModelDefinitionDoc.docNum(modelId, Objects.requireNonNull(hits[0].getId())) ), errorIfDefinitionIsMissing, modelId, @@ -419,76 +493,87 @@ static void checkFullModelDefinitionIsPresent( return; } - Set missingFields = new HashSet<>(); - long summedLengths = 0; - for (SearchHit hit : hits) { - long totalLength = -1; - DocumentField totalLengthField = hit.field(TrainedModelDefinitionDoc.TOTAL_DEFINITION_LENGTH.getPreferredName()); - if (totalLengthField != null && totalLengthField.getValue() instanceof Long totalDefinitionLength) { - totalLength = totalDefinitionLength; - } else { - missingFields.add(TrainedModelDefinitionDoc.TOTAL_DEFINITION_LENGTH.getPreferredName()); - } - - DocumentField definitionLengthField = hit.field(TrainedModelDefinitionDoc.DEFINITION_LENGTH.getPreferredName()); - if (definitionLengthField != null && definitionLengthField.getValue() instanceof Long definitionLength) { - summedLengths += definitionLength; - } else { - missingFields.add(TrainedModelDefinitionDoc.DEFINITION_LENGTH.getPreferredName()); - } + } + if (summedLengths != firstTotalLength) { + failOrRespondWith0( + () -> ExceptionsHelper.badRequestException(Messages.getMessage(Messages.MODEL_DEFINITION_TRUNCATED, modelId)), + errorIfDefinitionIsMissing, + modelId, + listener + ); + return; + } + listener.onResponse(new Tuple<>(modelId, summedLengths)); + }, e -> { + if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { + failOrRespondWith0(() -> { + Exception ex = new ResourceNotFoundException(Messages.getMessage(Messages.MODEL_DEFINITION_NOT_FOUND, modelId)); + ex.addSuppressed(e); + return ex; + }, errorIfDefinitionIsMissing, modelId, listener); + } else { + listener.onFailure(e); + } + }); + } - if (missingFields.isEmpty() == false) { - failOrRespondWith0( - () -> missingFieldsError(modelId, hit.getId(), missingFields), - errorIfDefinitionIsMissing, - modelId, - listener - ); - return; - } + private static ActionListener step2SearchForModelParts( + OriginSettingClient mlOriginClient, + ActionListener> listener, + String index, + String modelId, + ActionListener nextStepListener + ) { + return ActionListener.wrap(r -> { + mlOriginClient.prepareSearch(index) + .setQuery( + QueryBuilders.constantScoreQuery( + QueryBuilders.boolQuery() + .filter(QueryBuilders.termQuery(TrainedModelConfig.MODEL_ID.getPreferredName(), modelId)) + .filter( + QueryBuilders.termQuery(InferenceIndexConstants.DOC_TYPE.getPreferredName(), TrainedModelDefinitionDoc.NAME) + ) + ) + ) + .setFetchSource(false) + .addDocValueField(TrainedModelDefinitionDoc.DEFINITION_LENGTH.getPreferredName()) + .addDocValueField(TrainedModelDefinitionDoc.TOTAL_DEFINITION_LENGTH.getPreferredName()) + .setSize(MAX_NUM_NATIVE_DEFINITION_PARTS) + .setTrackTotalHits(true) + .addSort( + SortBuilders.fieldSort(TrainedModelDefinitionDoc.DOC_NUM.getPreferredName()).order(SortOrder.ASC).unmappedType("long") + ) + .execute(nextStepListener); - if (totalLength != firstTotalLength) { - final long finalTotalLength = totalLength; - failOrRespondWith0( - () -> ExceptionsHelper.badRequestException( - "[{}] [total_definition_length] must be the same in all model definition parts. " - + "The value [{}] in model definition part [{}] does not match the value [{}] in part [{}]. " - + Messages.UNABLE_TO_DEPLOY_MODEL_BAD_PARTS, - modelId, - finalTotalLength, - TrainedModelDefinitionDoc.docNum(modelId, Objects.requireNonNull(hit.getId())), - firstTotalLength, - TrainedModelDefinitionDoc.docNum(modelId, Objects.requireNonNull(hits[0].getId())) - ), - errorIfDefinitionIsMissing, - modelId, - listener - ); - return; - } + }, listener::onFailure); + } - } - if (summedLengths != firstTotalLength) { - failOrRespondWith0( - () -> ExceptionsHelper.badRequestException(Messages.getMessage(Messages.MODEL_DEFINITION_TRUNCATED, modelId)), - errorIfDefinitionIsMissing, - modelId, - listener - ); - return; - } - listener.onResponse(new Tuple<>(modelId, summedLengths)); - }, e -> { - if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { - failOrRespondWith0(() -> { - Exception ex = new ResourceNotFoundException(Messages.getMessage(Messages.MODEL_DEFINITION_NOT_FOUND, modelId)); - ex.addSuppressed(e); - return ex; - }, errorIfDefinitionIsMissing, modelId, listener); - } else { - listener.onFailure(e); - } - })); + /* + @param timeout null value indicates that the request should not wait for the download to complete before returning + */ + private static void step1CheckForDownloadTask( + OriginSettingClient mlOriginClient, + boolean errorIfDefinitionIsMissing, + TimeValue timeout, + ActionListener> failureListener, + String modelId, + ActionListener nextStepListener + ) { + TaskRetriever.getDownloadTaskInfo(mlOriginClient, modelId, timeout != null, ActionListener.wrap(taskInfo -> { + if (taskInfo == null) { + nextStepListener.onResponse(null); + } else { + failOrRespondWith0( + () -> new ElasticsearchStatusException( + Messages.getMessage(Messages.MODEL_DOWNLOAD_IN_PROGRESS, modelId), + RestStatus.REQUEST_TIMEOUT + ), + errorIfDefinitionIsMissing, + modelId, + failureListener + ); + } + }, failureListener::onFailure), timeout); } private static void failOrRespondWith0( diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml index 1fa675ff4284f..2b9bc06bdd1d0 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml @@ -153,11 +153,11 @@ setup: - do: ml.get_trained_models: model_id: "bert_fill_mask_model" - - match: {trained_model_configs.0.inference_config.fill_mask.mask_token: "[MASK]"} + - match: { trained_model_configs.0.inference_config.fill_mask.mask_token: "[MASK]" } - do: ml.get_trained_models: model_id: "roberta_fill_mask_model" - - match: {trained_model_configs.0.inference_config.fill_mask.mask_token: ""} + - match: { trained_model_configs.0.inference_config.fill_mask.mask_token: "" } - do: catch: /IllegalArgumentException. Mask token requested was \[\] but must be \[\[MASK\]\] for this model/ ml.put_trained_model: @@ -215,6 +215,30 @@ setup: catch: /Could not find trained model definition \[distilbert-finetuned-sst\]/ ml.start_trained_model_deployment: model_id: distilbert-finetuned-sst + +--- +"Test start deployment fails while model download in progress": + + - do: + ml.put_trained_model: + model_id: .elser_model_2 + body: > + { + "input": { + "field_names": ["text_field"] + } + } + - do: + catch: /Model download task is currently running\. Wait for trained model \[.elser_model_2\] download task to complete then try again/ + ml.start_trained_model_deployment: + model_id: .elser_model_2 + - do: + ml.delete_trained_model: + model_id: .elser_model_2 + - do: + catch: /No known trained model with model_id \[.elser_model_2\]/ + ml.start_trained_model_deployment: + model_id: .elser_model_2 --- "Test start and stop deployment with no cache": - do: @@ -222,9 +246,9 @@ setup: model_id: test_model cache_size: 0 wait_for: started - - match: {assignment.assignment_state: started} - - match: {assignment.task_parameters.model_id: test_model} - - match: {assignment.task_parameters.cache_size: "0"} + - match: { assignment.assignment_state: started } + - match: { assignment.task_parameters.model_id: test_model } + - match: { assignment.task_parameters.cache_size: "0" } - do: ml.stop_trained_model_deployment: @@ -240,9 +264,9 @@ setup: model_id: test_model cache_size: 10kb wait_for: started - - match: {assignment.assignment_state: started} - - match: {assignment.task_parameters.model_id: test_model} - - match: {assignment.task_parameters.cache_size: 10kb} + - match: { assignment.assignment_state: started } + - match: { assignment.task_parameters.model_id: test_model } + - match: { assignment.task_parameters.cache_size: 10kb } - do: allowed_warnings: @@ -380,9 +404,9 @@ setup: deployment_id: test_model_deployment_cache_test cache_size: 10kb wait_for: started - - match: {assignment.assignment_state: started} - - match: {assignment.task_parameters.model_id: test_model} - - match: {assignment.task_parameters.cache_size: 10kb} + - match: { assignment.assignment_state: started } + - match: { assignment.task_parameters.model_id: test_model } + - match: { assignment.task_parameters.cache_size: 10kb } - do: allowed_warnings: @@ -535,7 +559,7 @@ setup: model_id: test_model deployment_id: test_model_for_search wait_for: started - - match: {assignment.assignment_state: started} + - match: { assignment.assignment_state: started } - do: ml.stop_trained_model_deployment: @@ -565,9 +589,9 @@ setup: deployment_id: test_model_for_search priority: low wait_for: started - - match: {assignment.assignment_state: started} - - match: {assignment.task_parameters.model_id: test_model} - - match: {assignment.task_parameters.deployment_id: test_model_for_search} + - match: { assignment.assignment_state: started } + - match: { assignment.task_parameters.model_id: test_model } + - match: { assignment.task_parameters.deployment_id: test_model_for_search } - do: allowed_warnings: - '[POST /_ml/trained_models/{model_id}/deployment/_infer] is deprecated! Use [POST /_ml/trained_models/{model_id}/_infer] instead.' @@ -586,9 +610,9 @@ setup: deployment_id: test_model_for_ingest priority: low wait_for: started - - match: {assignment.assignment_state: started} - - match: {assignment.task_parameters.model_id: test_model} - - match: {assignment.task_parameters.deployment_id: test_model_for_ingest} + - match: { assignment.assignment_state: started } + - match: { assignment.task_parameters.model_id: test_model } + - match: { assignment.task_parameters.deployment_id: test_model_for_ingest } - do: allowed_warnings: - '[POST /_ml/trained_models/{model_id}/deployment/_infer] is deprecated! Use [POST /_ml/trained_models/{model_id}/_infer] instead.' @@ -625,7 +649,7 @@ setup: model_id: test_model deployment_id: test_model_deployment wait_for: started - - match: {assignment.assignment_state: started} + - match: { assignment.assignment_state: started } - do: catch: /Could not start model deployment because an existing deployment with the same id \[test_model_deployment\] exist/ @@ -648,9 +672,9 @@ setup: ml.start_trained_model_deployment: model_id: test_model wait_for: started - - match: {assignment.assignment_state: started} - - match: {assignment.task_parameters.model_id: test_model} - - match: {assignment.task_parameters.deployment_id: test_model} + - match: { assignment.assignment_state: started } + - match: { assignment.task_parameters.model_id: test_model } + - match: { assignment.task_parameters.deployment_id: test_model } --- "Test cannot create model with a deployment Id": @@ -659,7 +683,7 @@ setup: model_id: test_model wait_for: started deployment_id: test_model_deployment - - match: {assignment.assignment_state: started} + - match: { assignment.assignment_state: started } - do: catch: /Cannot create model \[test_model_deployment\] the id is the same as an current model deployment/ From b69f78da5fd5b468be1a4b8c892558231db8bee5 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Mon, 4 Dec 2023 17:40:23 -0500 Subject: [PATCH 135/181] Optimize _count type API requests (#102888) When calling `_count` or simply `_search?size=0&track_total_hits=true` with no aggregations, we end up doing too much work on the coordinator, allocating large arrays and keeping unnecessary references to various objects. This commit optimizes this scenario by only collecting the count and allowing result references to be collected. --- docs/changelog/102888.yaml | 5 + .../rest-api-spec/test/count/30_min_score.yml | 42 +++++++ .../search/simple/SimpleSearchIT.java | 32 +++++ .../CountOnlyQueryPhaseResultConsumer.java | 118 ++++++++++++++++++ .../action/search/CountedCollector.java | 4 +- .../action/search/DfsQueryPhase.java | 8 +- .../SearchDfsQueryThenFetchAsyncAction.java | 5 +- .../action/search/SearchPhaseController.java | 15 ++- .../SearchQueryThenFetchAsyncAction.java | 2 +- .../action/search/TransportSearchAction.java | 3 +- .../org/elasticsearch/common/util/Maps.java | 13 ++ .../action/search/DfsQueryPhaseTests.java | 12 +- .../action/search/FetchSearchPhaseTests.java | 13 +- .../search/SearchPhaseControllerTests.java | 22 ++-- 14 files changed, 260 insertions(+), 34 deletions(-) create mode 100644 docs/changelog/102888.yaml create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/30_min_score.yml create mode 100644 server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java diff --git a/docs/changelog/102888.yaml b/docs/changelog/102888.yaml new file mode 100644 index 0000000000000..79ea9cbe712de --- /dev/null +++ b/docs/changelog/102888.yaml @@ -0,0 +1,5 @@ +pr: 102888 +summary: "Optimize `_count` type API requests" +area: Search +type: enhancement +issues: [] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/30_min_score.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/30_min_score.yml new file mode 100644 index 0000000000000..278a7095add5e --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/30_min_score.yml @@ -0,0 +1,42 @@ +--- +"count with min_score": + - do: + indices.create: + index: test_count_min_score + + - do: + index: + index: test_count_min_score + id: "1" + body: { field: foo bar } + + - do: + index: + index: test_count_min_score + id: "2" + body: { field: foo bar bar bar bar } + + - do: + indices.refresh: + index: [test_count_min_score] + + - do: + count: + index: test_count_min_score + q: field:foo + min_score: 0.2 + - match: {count : 1} + + - do: + count: + index: test_count_min_score + q: field:foo + min_score: 0.1 + - match: { count: 2 } + + - do: + count: + index: test_count_min_score + q: field:foo + min_score: 0.5 + - match: { count: 0 } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java index f47303b83b6e3..cb13fca85541f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; @@ -493,6 +494,37 @@ public void testTooLongRegexInRegexpQuery() throws Exception { ); } + public void testStrictlyCountRequest() throws Exception { + createIndex("test_count_1"); + indexRandom( + true, + prepareIndex("test_count_1").setId("1").setSource("field", "value"), + prepareIndex("test_count_1").setId("2").setSource("field", "value"), + prepareIndex("test_count_1").setId("3").setSource("field", "value"), + prepareIndex("test_count_1").setId("4").setSource("field", "value"), + prepareIndex("test_count_1").setId("5").setSource("field", "value"), + prepareIndex("test_count_1").setId("6").setSource("field", "value") + ); + + createIndex("test_count_2"); + indexRandom( + true, + prepareIndex("test_count_2").setId("1").setSource("field", "value_2"), + prepareIndex("test_count_2").setId("2").setSource("field", "value_2"), + prepareIndex("test_count_2").setId("3").setSource("field", "value_2"), + prepareIndex("test_count_2").setId("4").setSource("field", "value_2"), + prepareIndex("test_count_2").setId("6").setSource("field", "value_2") + ); + assertNoFailuresAndResponse( + prepareSearch("test_count_1", "test_count_2").setTrackTotalHits(true).setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(11L)); + assertThat(response.getHits().getHits().length, equalTo(0)); + } + ); + + } + private void assertWindowFails(SearchRequestBuilder search) { SearchPhaseExecutionException e = expectThrows(SearchPhaseExecutionException.class, () -> search.get()); assertThat( diff --git a/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java b/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java new file mode 100644 index 0000000000000..1e67522f6a671 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java @@ -0,0 +1,118 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.search; + +import org.apache.lucene.search.TotalHits; +import org.elasticsearch.common.util.Maps; +import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.search.SearchPhaseResult; + +import java.util.Collections; +import java.util.List; +import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; +import java.util.concurrent.atomic.LongAdder; +import java.util.stream.Stream; + +/** + * Optimized phase result consumer that only counts the number of hits and does not + * store any other information. + */ +class CountOnlyQueryPhaseResultConsumer extends SearchPhaseResults { + AtomicReference relationAtomicReference = new AtomicReference<>(TotalHits.Relation.EQUAL_TO); + LongAdder totalHits = new LongAdder(); + + private final AtomicBoolean terminatedEarly = new AtomicBoolean(false); + private final AtomicBoolean timedOut = new AtomicBoolean(false); + private final Set results; + private final SearchProgressListener progressListener; + + CountOnlyQueryPhaseResultConsumer(SearchProgressListener progressListener, int numShards) { + super(numShards); + this.progressListener = progressListener; + this.results = Collections.newSetFromMap(Maps.newConcurrentHashMapWithExpectedSize(numShards)); + } + + @Override + Stream getSuccessfulResults() { + return Stream.empty(); + } + + @Override + public void consumeResult(SearchPhaseResult result, Runnable next) { + assert results.contains(result.getShardIndex()) == false : "shardIndex: " + result.getShardIndex() + " is already set"; + results.add(result.getShardIndex()); + // set the relation to the first non-equal relation + relationAtomicReference.compareAndSet(TotalHits.Relation.EQUAL_TO, result.queryResult().getTotalHits().relation); + totalHits.add(result.queryResult().getTotalHits().value); + terminatedEarly.compareAndSet(false, (result.queryResult().terminatedEarly() != null && result.queryResult().terminatedEarly())); + timedOut.compareAndSet(false, result.queryResult().searchTimedOut()); + progressListener.notifyQueryResult(result.getShardIndex(), result.queryResult()); + next.run(); + } + + @Override + boolean hasResult(int shardIndex) { + return results.contains(shardIndex); + } + + @Override + public SearchPhaseController.ReducedQueryPhase reduce() throws Exception { + SearchPhaseController.ReducedQueryPhase reducePhase = new SearchPhaseController.ReducedQueryPhase( + new TotalHits(totalHits.sum(), relationAtomicReference.get()), + 0, + Float.NaN, + timedOut.get(), + terminatedEarly.get(), + null, + null, + null, + SearchPhaseController.SortedTopDocs.EMPTY, + null, + null, + 1, + 0, + 0, + false + ); + if (progressListener != SearchProgressListener.NOOP) { + progressListener.notifyFinalReduce( + List.of(), + reducePhase.totalHits(), + reducePhase.aggregations(), + reducePhase.numReducePhases() + ); + } + return reducePhase; + } + + @Override + AtomicArray getAtomicArray() { + return new AtomicArray<>(0); + } + + @Override + public void incRef() {} + + @Override + public boolean tryIncRef() { + return true; + } + + @Override + public boolean decRef() { + return true; + } + + @Override + public boolean hasReferences() { + return false; + } +} diff --git a/server/src/main/java/org/elasticsearch/action/search/CountedCollector.java b/server/src/main/java/org/elasticsearch/action/search/CountedCollector.java index d5605b280f385..3a12b72570caf 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CountedCollector.java +++ b/server/src/main/java/org/elasticsearch/action/search/CountedCollector.java @@ -18,12 +18,12 @@ * where the given index is used to set the result on the array. */ final class CountedCollector { - private final ArraySearchPhaseResults resultConsumer; + private final SearchPhaseResults resultConsumer; private final CountDown counter; private final Runnable onFinish; private final SearchPhaseContext context; - CountedCollector(ArraySearchPhaseResults resultConsumer, int expectedOps, Runnable onFinish, SearchPhaseContext context) { + CountedCollector(SearchPhaseResults resultConsumer, int expectedOps, Runnable onFinish, SearchPhaseContext context) { this.resultConsumer = resultConsumer; resultConsumer.incRef(); this.counter = new CountDown(expectedOps); diff --git a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java index ce2c86be4b4e6..54408cd560314 100644 --- a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java @@ -37,11 +37,11 @@ * @see CountedCollector#onFailure(int, SearchShardTarget, Exception) */ final class DfsQueryPhase extends SearchPhase { - private final QueryPhaseResultConsumer queryResult; + private final SearchPhaseResults queryResult; private final List searchResults; private final AggregatedDfs dfs; private final List knnResults; - private final Function, SearchPhase> nextPhaseFactory; + private final Function, SearchPhase> nextPhaseFactory; private final SearchPhaseContext context; private final SearchTransportService searchTransportService; private final SearchProgressListener progressListener; @@ -50,8 +50,8 @@ final class DfsQueryPhase extends SearchPhase { List searchResults, AggregatedDfs dfs, List knnResults, - QueryPhaseResultConsumer queryResult, - Function, SearchPhase> nextPhaseFactory, + SearchPhaseResults queryResult, + Function, SearchPhase> nextPhaseFactory, SearchPhaseContext context ) { super("dfs_query"); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java index 2fcb792f821c9..68d1bec590318 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.GroupShardsIterator; +import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.dfs.AggregatedDfs; import org.elasticsearch.search.dfs.DfsKnnResults; @@ -26,7 +27,7 @@ final class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction { - private final QueryPhaseResultConsumer queryPhaseResultConsumer; + private final SearchPhaseResults queryPhaseResultConsumer; private final SearchProgressListener progressListener; SearchDfsQueryThenFetchAsyncAction( @@ -36,7 +37,7 @@ final class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction final Map aliasFilter, final Map concreteIndexBoosts, final Executor executor, - final QueryPhaseResultConsumer queryPhaseResultConsumer, + final SearchPhaseResults queryPhaseResultConsumer, final SearchRequest request, final ActionListener listener, final GroupShardsIterator shardsIts, diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index e262003935969..d4808def29d1f 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -783,7 +783,7 @@ AggregationReduceContext.Builder getReduceContext(Supplier isCanceled, /** * Returns a new {@link QueryPhaseResultConsumer} instance that reduces search responses incrementally. */ - QueryPhaseResultConsumer newSearchPhaseResults( + SearchPhaseResults newSearchPhaseResults( Executor executor, CircuitBreaker circuitBreaker, Supplier isCanceled, @@ -792,6 +792,19 @@ QueryPhaseResultConsumer newSearchPhaseResults( int numShards, Consumer onPartialMergeFailure ) { + final int size = request.source() == null || request.source().size() == -1 ? SearchService.DEFAULT_SIZE : request.source().size(); + // Use CountOnlyQueryPhaseResultConsumer for requests without aggs, suggest, etc. things only wanting a total count and + // returning no hits + if (size == 0 + && (request.source() == null + || (request.source().aggregations() == null + && request.source().suggest() == null + && request.source().rankBuilder() == null + && request.source().knnSearch().isEmpty() + && request.source().profile() == false)) + && request.resolveTrackTotalHitsUpTo() == SearchContext.TRACK_TOTAL_HITS_ACCURATE) { + return new CountOnlyQueryPhaseResultConsumer(listener, numShards); + } return new QueryPhaseResultConsumer( request, executor, diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java index 8cf4ee9b75f76..51d330f55aee1 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java @@ -43,7 +43,7 @@ class SearchQueryThenFetchAsyncAction extends AbstractSearchAsyncAction aliasFilter, final Map concreteIndexBoosts, final Executor executor, - final QueryPhaseResultConsumer resultConsumer, + final SearchPhaseResults resultConsumer, final SearchRequest request, final ActionListener listener, final GroupShardsIterator shardsIts, diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 9010fa1ea0e75..1b3b321a530e6 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -60,6 +60,7 @@ import org.elasticsearch.index.shard.ShardNotFoundException; import org.elasticsearch.indices.ExecutorSelector; import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregations; @@ -1286,7 +1287,7 @@ public SearchPhase newSearchPhase( && task.getProgressListener() == SearchProgressListener.NOOP) { task.setProgressListener(new CCSSingleCoordinatorSearchProgressListener()); } - final QueryPhaseResultConsumer queryResultConsumer = searchPhaseController.newSearchPhaseResults( + final SearchPhaseResults queryResultConsumer = searchPhaseController.newSearchPhaseResults( executor, circuitBreaker, task::isCancelled, diff --git a/server/src/main/java/org/elasticsearch/common/util/Maps.java b/server/src/main/java/org/elasticsearch/common/util/Maps.java index da5089983ceb5..1b46e71dadd12 100644 --- a/server/src/main/java/org/elasticsearch/common/util/Maps.java +++ b/server/src/main/java/org/elasticsearch/common/util/Maps.java @@ -18,6 +18,7 @@ import java.util.Objects; import java.util.Set; import java.util.TreeMap; +import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collector; @@ -281,6 +282,18 @@ public static Map newHashMapWithExpectedSize(int expectedSize) { return new HashMap<>(capacity(expectedSize)); } + /** + * Returns a concurrent hash map with a capacity sufficient to keep expectedSize elements without being resized. + * + * @param expectedSize the expected amount of elements in the map + * @param the key type + * @param the value type + * @return a new pre-sized {@link HashMap} + */ + public static Map newConcurrentHashMapWithExpectedSize(int expectedSize) { + return new ConcurrentHashMap<>(capacity(expectedSize)); + } + /** * Returns a linked hash map with a capacity sufficient to keep expectedSize elements without being resized. * diff --git a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java index 21c1e9b0470b5..b14d24cf95f62 100644 --- a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java @@ -125,7 +125,7 @@ public void sendExecuteQuery( SearchPhaseController searchPhaseController = searchPhaseController(); MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); mockSearchPhaseContext.searchTransport = searchTransportService; - QueryPhaseResultConsumer consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( EsExecutors.DIRECT_EXECUTOR_SERVICE, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -138,7 +138,7 @@ public void sendExecuteQuery( DfsQueryPhase phase = new DfsQueryPhase(results.asList(), null, null, consumer, (response) -> new SearchPhase("test") { @Override public void run() throws IOException { - responseRef.set(response.results); + responseRef.set(((QueryPhaseResultConsumer) response).results); } }, mockSearchPhaseContext); assertEquals("dfs_query", phase.getName()); @@ -211,7 +211,7 @@ public void sendExecuteQuery( SearchPhaseController searchPhaseController = searchPhaseController(); MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); mockSearchPhaseContext.searchTransport = searchTransportService; - QueryPhaseResultConsumer consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( EsExecutors.DIRECT_EXECUTOR_SERVICE, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -224,7 +224,7 @@ public void sendExecuteQuery( DfsQueryPhase phase = new DfsQueryPhase(results.asList(), null, null, consumer, (response) -> new SearchPhase("test") { @Override public void run() throws IOException { - responseRef.set(response.results); + responseRef.set(((QueryPhaseResultConsumer) response).results); } }, mockSearchPhaseContext); assertEquals("dfs_query", phase.getName()); @@ -299,7 +299,7 @@ public void sendExecuteQuery( SearchPhaseController searchPhaseController = searchPhaseController(); MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); mockSearchPhaseContext.searchTransport = searchTransportService; - QueryPhaseResultConsumer consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( EsExecutors.DIRECT_EXECUTOR_SERVICE, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -312,7 +312,7 @@ public void sendExecuteQuery( DfsQueryPhase phase = new DfsQueryPhase(results.asList(), null, null, consumer, (response) -> new SearchPhase("test") { @Override public void run() throws IOException { - responseRef.set(response.results); + responseRef.set(((QueryPhaseResultConsumer) response).results); } }, mockSearchPhaseContext); assertEquals("dfs_query", phase.getName()); diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java index 3d66c4bc2793f..24b2610c8d190 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.QueryFetchSearchResult; @@ -49,7 +50,7 @@ public class FetchSearchPhaseTests extends ESTestCase { public void testShortcutQueryAndFetchOptimization() { SearchPhaseController controller = new SearchPhaseController((t, s) -> InternalAggregationTestCase.emptyReduceContextBuilder()); MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1); - QueryPhaseResultConsumer results = controller.newSearchPhaseResults( + SearchPhaseResults results = controller.newSearchPhaseResults( EsExecutors.DIRECT_EXECUTOR_SERVICE, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -139,7 +140,7 @@ private void assertProfiles(boolean profiled, int totalShards, SearchResponse se public void testFetchTwoDocument() { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); SearchPhaseController controller = new SearchPhaseController((t, s) -> InternalAggregationTestCase.emptyReduceContextBuilder()); - QueryPhaseResultConsumer results = controller.newSearchPhaseResults( + SearchPhaseResults results = controller.newSearchPhaseResults( EsExecutors.DIRECT_EXECUTOR_SERVICE, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -256,7 +257,7 @@ public void run() { public void testFailFetchOneDoc() { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); SearchPhaseController controller = new SearchPhaseController((t, s) -> InternalAggregationTestCase.emptyReduceContextBuilder()); - QueryPhaseResultConsumer results = controller.newSearchPhaseResults( + SearchPhaseResults results = controller.newSearchPhaseResults( EsExecutors.DIRECT_EXECUTOR_SERVICE, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -384,7 +385,7 @@ public void testFetchDocsConcurrently() throws InterruptedException { boolean profiled = randomBoolean(); SearchPhaseController controller = new SearchPhaseController((t, s) -> InternalAggregationTestCase.emptyReduceContextBuilder()); MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(numHits); - QueryPhaseResultConsumer results = controller.newSearchPhaseResults( + SearchPhaseResults results = controller.newSearchPhaseResults( EsExecutors.DIRECT_EXECUTOR_SERVICE, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -495,7 +496,7 @@ public void run() { public void testExceptionFailsPhase() { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); SearchPhaseController controller = new SearchPhaseController((t, s) -> InternalAggregationTestCase.emptyReduceContextBuilder()); - QueryPhaseResultConsumer results = controller.newSearchPhaseResults( + SearchPhaseResults results = controller.newSearchPhaseResults( EsExecutors.DIRECT_EXECUTOR_SERVICE, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -606,7 +607,7 @@ public void run() { public void testCleanupIrrelevantContexts() { // contexts that are not fetched should be cleaned up MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); SearchPhaseController controller = new SearchPhaseController((t, s) -> InternalAggregationTestCase.emptyReduceContextBuilder()); - QueryPhaseResultConsumer results = controller.newSearchPhaseResults( + SearchPhaseResults results = controller.newSearchPhaseResults( EsExecutors.DIRECT_EXECUTOR_SERVICE, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index 0dcb6abe3a86e..cd86a2e4f55d6 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -609,7 +609,7 @@ private void consumerTestCase(int numEmptyResponses) throws Exception { SearchRequest request = randomSearchRequest(); request.source(new SearchSourceBuilder().aggregation(new MaxAggregationBuilder("test"))); request.setBatchedReduceSize(bufferSize); - ArraySearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( fixedExecutor, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -734,7 +734,7 @@ public void testConsumerConcurrently() throws Exception { SearchRequest request = randomSearchRequest(); request.source(new SearchSourceBuilder().aggregation(new MaxAggregationBuilder("test"))); request.setBatchedReduceSize(bufferSize); - ArraySearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( fixedExecutor, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -807,7 +807,7 @@ public void testConsumerOnlyAggs() throws Exception { SearchRequest request = randomSearchRequest(); request.source(new SearchSourceBuilder().aggregation(new MaxAggregationBuilder("test")).size(0)); request.setBatchedReduceSize(bufferSize); - QueryPhaseResultConsumer consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( fixedExecutor, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -869,7 +869,7 @@ public void testConsumerOnlyHits() throws Exception { request.source(new SearchSourceBuilder().size(randomIntBetween(1, 10))); } request.setBatchedReduceSize(bufferSize); - QueryPhaseResultConsumer consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( fixedExecutor, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -936,7 +936,7 @@ public void testReduceTopNWithFromOffset() throws Exception { SearchRequest request = new SearchRequest(); request.source(new SearchSourceBuilder().size(5).from(5)); request.setBatchedReduceSize(randomIntBetween(2, 4)); - QueryPhaseResultConsumer consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( fixedExecutor, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -994,7 +994,7 @@ public void testConsumerSortByField() throws Exception { SearchRequest request = randomSearchRequest(); int size = randomIntBetween(1, 10); request.setBatchedReduceSize(bufferSize); - QueryPhaseResultConsumer consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( fixedExecutor, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -1050,7 +1050,7 @@ public void testConsumerFieldCollapsing() throws Exception { SearchRequest request = randomSearchRequest(); int size = randomIntBetween(5, 10); request.setBatchedReduceSize(bufferSize); - QueryPhaseResultConsumer consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( fixedExecutor, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -1109,7 +1109,7 @@ public void testConsumerSuggestions() throws Exception { int bufferSize = randomIntBetween(2, 200); SearchRequest request = randomSearchRequest(); request.setBatchedReduceSize(bufferSize); - QueryPhaseResultConsumer consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( fixedExecutor, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -1256,7 +1256,7 @@ public void onFinalReduce(List shards, TotalHits totalHits, Interna assertEquals(numReduceListener.incrementAndGet(), reducePhase); } }; - QueryPhaseResultConsumer consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( fixedExecutor, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -1347,7 +1347,7 @@ private void testReduceCase(int numShards, int bufferSize, boolean shouldFail) t if (shouldFailPartial) { circuitBreaker.shouldBreak.set(true); } - QueryPhaseResultConsumer consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( fixedExecutor, circuitBreaker, () -> false, @@ -1419,7 +1419,7 @@ public void testFailConsumeAggs() throws Exception { request.source(new SearchSourceBuilder().aggregation(AggregationBuilders.avg("foo")).size(0)); request.setBatchedReduceSize(bufferSize); AtomicBoolean hasConsumedFailure = new AtomicBoolean(); - QueryPhaseResultConsumer consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( fixedExecutor, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, From 7273ef69a6b67cc8adc12b44fdb6804ce843ebdf Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Mon, 4 Dec 2023 23:52:07 +0100 Subject: [PATCH 136/181] [Enterprise Search] Add update ingestion stats endpoint (#102926) Add update connector sync job ingestion stats endpoint. --- .../api/connector_sync_job.update_stats.json | 39 +++ .../460_connector_sync_job_update_stats.yml | 160 ++++++++++++ .../xpack/application/EnterpriseSearch.java | 12 +- .../connector/syncjob/ConnectorSyncJob.java | 10 +- .../syncjob/ConnectorSyncJobIndexService.java | 55 ++++ ...eConnectorSyncJobIngestionStatsAction.java | 52 ++++ ...eConnectorSyncJobIngestionStatsAction.java | 53 ++++ ...eConnectorSyncJobIngestionStatsAction.java | 242 ++++++++++++++++++ .../ConnectorSyncJobIndexServiceTests.java | 131 ++++++++++ .../syncjob/ConnectorSyncJobTestUtils.java | 32 +++ ...ectorSyncJobIngestionStatsActionTests.java | 81 ++++++ ...StatsActionRequestBWCSerializingTests.java | 61 +++++ ...ncJobIngestionStatsActionRequestTests.java | 109 ++++++++ .../xpack/security/operator/Constants.java | 1 + 14 files changed, 1031 insertions(+), 7 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.update_stats.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/460_connector_sync_job_update_stats.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobIngestionStatsAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobIngestionStatsAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobIngestionStatsActionTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsActionRequestTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.update_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.update_stats.json new file mode 100644 index 0000000000000..52f5a55cc8458 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.update_stats.json @@ -0,0 +1,39 @@ +{ + "connector_sync_job.update_stats": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Updates the stats fields in the connector sync job document." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/_sync_job/{connector_sync_job_id}/_stats", + "methods": [ + "PUT" + ], + "parts": { + "connector_sync_job_id": { + "type": "string", + "description": "The unique identifier of the connector sync job to be updated." + } + } + } + ] + }, + "body": { + "description": "The stats to update for the connector sync job.", + "required": true + } + } +} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/460_connector_sync_job_update_stats.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/460_connector_sync_job_update_stats.yml new file mode 100644 index 0000000000000..0e69866ce8b6c --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/460_connector_sync_job_update_stats.yml @@ -0,0 +1,160 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + - do: + connector.put: + connector_id: test-connector + body: + index_name: search-test + name: my-connector + language: de + is_native: false + service_type: super-connector + +--- +"Update the ingestion stats for a connector sync job - only mandatory parameters": + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: full + trigger_method: on_demand + - set: { id: id } + - do: + connector_sync_job.update_stats: + connector_sync_job_id: $id + body: + deleted_document_count: 10 + indexed_document_count: 20 + indexed_document_volume: 1000 + + - match: { acknowledged: true } + + +--- +"Update the ingestion stats for a connector sync job - negative deleted document count error": + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: full + trigger_method: on_demand + - set: { id: id } + - do: + connector_sync_job.update_stats: + connector_sync_job_id: $id + body: + deleted_document_count: -10 + indexed_document_count: 20 + indexed_document_volume: 1000 + catch: bad_request + + +--- +"Update the ingestion stats for a connector sync job - negative indexed document count error": + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: full + trigger_method: on_demand + - set: { id: id } + - do: + connector_sync_job.update_stats: + connector_sync_job_id: $id + body: + deleted_document_count: 10 + indexed_document_count: -20 + indexed_document_volume: 1000 + catch: bad_request + + +--- +"Update the ingestion stats for a connector sync job - negative indexed document volume error": + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: full + trigger_method: on_demand + - set: { id: id } + - do: + connector_sync_job.update_stats: + connector_sync_job_id: $id + body: + deleted_document_count: 10 + indexed_document_count: 20 + indexed_document_volume: -1000 + catch: bad_request + +--- +"Update the ingestion stats for a connector sync job - negative optional total document count error": + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: full + trigger_method: on_demand + - set: { id: id } + - do: + connector_sync_job.update_stats: + connector_sync_job_id: $id + body: + deleted_document_count: 10 + indexed_document_count: 20 + indexed_document_volume: 1000 + total_document_count: -10 + catch: bad_request + +--- +"Update the ingestion stats for a connector sync job - with optional total_document_count": + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: full + trigger_method: on_demand + - set: { id: id } + - do: + connector_sync_job.update_stats: + connector_sync_job_id: $id + body: + deleted_document_count: 10 + indexed_document_count: 20 + indexed_document_volume: 1000 + total_document_count: 20 + + - match: { acknowledged: true } + + +--- +"Update the ingestion stats for a connector sync job - with optional last_seen": + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: full + trigger_method: on_demand + - set: { id: id } + - do: + connector_sync_job.update_stats: + connector_sync_job_id: $id + body: + deleted_document_count: 10 + indexed_document_count: 20 + indexed_document_volume: 1000 + last_seen: 2023-12-04T08:45:50.567149Z + + - match: { acknowledged: true } + +--- +"Update the ingestion stats for a Connector Sync Job - Connector Sync Job does not exist": + - do: + connector_sync_job.update_stats: + connector_sync_job_id: test-nonexistent-connector-sync-job-id + body: + deleted_document_count: 10 + indexed_document_count: 20 + indexed_document_volume: 1000 + catch: missing diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index 1a8ae73c41935..c4dbee214f37a 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -85,11 +85,14 @@ import org.elasticsearch.xpack.application.connector.syncjob.action.RestDeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestGetConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestPostConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.RestUpdateConnectorSyncJobIngestionStatsAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportCancelConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportCheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportDeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportGetConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportPostConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.TransportUpdateConnectorSyncJobIngestionStatsAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction; import org.elasticsearch.xpack.application.rules.QueryRulesConfig; import org.elasticsearch.xpack.application.rules.QueryRulesIndexService; import org.elasticsearch.xpack.application.rules.RuleQueryBuilder; @@ -223,7 +226,11 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(PostConnectorSyncJobAction.INSTANCE, TransportPostConnectorSyncJobAction.class), new ActionHandler<>(DeleteConnectorSyncJobAction.INSTANCE, TransportDeleteConnectorSyncJobAction.class), new ActionHandler<>(CheckInConnectorSyncJobAction.INSTANCE, TransportCheckInConnectorSyncJobAction.class), - new ActionHandler<>(CancelConnectorSyncJobAction.INSTANCE, TransportCancelConnectorSyncJobAction.class) + new ActionHandler<>(CancelConnectorSyncJobAction.INSTANCE, TransportCancelConnectorSyncJobAction.class), + new ActionHandler<>( + UpdateConnectorSyncJobIngestionStatsAction.INSTANCE, + TransportUpdateConnectorSyncJobIngestionStatsAction.class + ) ) ); } @@ -292,7 +299,8 @@ public List getRestHandlers( new RestPostConnectorSyncJobAction(), new RestDeleteConnectorSyncJobAction(), new RestCancelConnectorSyncJobAction(), - new RestCheckInConnectorSyncJobAction() + new RestCheckInConnectorSyncJobAction(), + new RestUpdateConnectorSyncJobIngestionStatsAction() ) ); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java index 0c6caa3376c7b..0781bb515fe93 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java @@ -75,19 +75,19 @@ public class ConnectorSyncJob implements Writeable, ToXContentObject { static final ParseField CREATED_AT_FIELD = new ParseField("created_at"); - static final ParseField DELETED_DOCUMENT_COUNT_FIELD = new ParseField("deleted_document_count"); + public static final ParseField DELETED_DOCUMENT_COUNT_FIELD = new ParseField("deleted_document_count"); static final ParseField ERROR_FIELD = new ParseField("error"); public static final ParseField ID_FIELD = new ParseField("id"); - static final ParseField INDEXED_DOCUMENT_COUNT_FIELD = new ParseField("indexed_document_count"); + public static final ParseField INDEXED_DOCUMENT_COUNT_FIELD = new ParseField("indexed_document_count"); - static final ParseField INDEXED_DOCUMENT_VOLUME_FIELD = new ParseField("indexed_document_volume"); + public static final ParseField INDEXED_DOCUMENT_VOLUME_FIELD = new ParseField("indexed_document_volume"); public static final ParseField JOB_TYPE_FIELD = new ParseField("job_type"); - static final ParseField LAST_SEEN_FIELD = new ParseField("last_seen"); + public static final ParseField LAST_SEEN_FIELD = new ParseField("last_seen"); static final ParseField METADATA_FIELD = new ParseField("metadata"); @@ -95,7 +95,7 @@ public class ConnectorSyncJob implements Writeable, ToXContentObject { static final ParseField STATUS_FIELD = new ParseField("status"); - static final ParseField TOTAL_DOCUMENT_COUNT_FIELD = new ParseField("total_document_count"); + public static final ParseField TOTAL_DOCUMENT_COUNT_FIELD = new ParseField("total_document_count"); public static final ParseField TRIGGER_METHOD_FIELD = new ParseField("trigger_method"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index 6a7aec2fc7430..f105e6ece72aa 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -36,9 +36,11 @@ import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; import org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction; import java.io.IOException; import java.time.Instant; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -249,6 +251,59 @@ public void cancelConnectorSyncJob(String connectorSyncJobId, ActionListener listener + ) { + String syncJobId = request.getConnectorSyncJobId(); + + Map fieldsToUpdate = new HashMap<>( + Map.of( + ConnectorSyncJob.DELETED_DOCUMENT_COUNT_FIELD.getPreferredName(), + request.getDeletedDocumentCount(), + ConnectorSyncJob.INDEXED_DOCUMENT_COUNT_FIELD.getPreferredName(), + request.getIndexedDocumentCount(), + ConnectorSyncJob.INDEXED_DOCUMENT_VOLUME_FIELD.getPreferredName(), + request.getIndexedDocumentVolume() + ) + ); + + if (Objects.nonNull(request.getTotalDocumentCount())) { + fieldsToUpdate.put(ConnectorSyncJob.TOTAL_DOCUMENT_COUNT_FIELD.getPreferredName(), request.getTotalDocumentCount()); + } + // TODO: what to do, if no total document count is specified? Calculate it via the current count and params of the request? fetch + // the size of the target index? + + Instant lastSeen = Objects.nonNull(request.getLastSeen()) ? request.getLastSeen() : Instant.now(); + fieldsToUpdate.put(ConnectorSyncJob.LAST_SEEN_FIELD.getPreferredName(), lastSeen); + + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_SYNC_JOB_INDEX_NAME, syncJobId).setRefreshPolicy( + WriteRequest.RefreshPolicy.IMMEDIATE + ).doc(fieldsToUpdate); + + try { + clientWithOrigin.update( + updateRequest, + new DelegatingIndexNotFoundOrDocumentMissingActionListener<>(syncJobId, listener, (l, updateResponse) -> { + if (updateResponse.getResult() == DocWriteResponse.Result.NOT_FOUND) { + l.onFailure(new ResourceNotFoundException(syncJobId)); + return; + } + l.onResponse(updateResponse); + }) + ); + } catch (Exception e) { + listener.onFailure(e); + } + + } + private String generateId() { /* Workaround: only needed for generating an id upfront, autoGenerateId() has a side effect generating a timestamp, * which would raise an error on the response layer later ("autoGeneratedTimestamp should not be set externally"). diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobIngestionStatsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobIngestionStatsAction.java new file mode 100644 index 0000000000000..aedd1605b8bfb --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobIngestionStatsAction.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants.CONNECTOR_SYNC_JOB_ID_PARAM; + +public class RestUpdateConnectorSyncJobIngestionStatsAction extends BaseRestHandler { + @Override + public String getName() { + return "connector_sync_job_update_ingestion_stats"; + } + + @Override + public List routes() { + return List.of( + new Route( + RestRequest.Method.PUT, + "/" + EnterpriseSearch.CONNECTOR_SYNC_JOB_API_ENDPOINT + "/{" + CONNECTOR_SYNC_JOB_ID_PARAM + "}/_stats" + ) + ); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + UpdateConnectorSyncJobIngestionStatsAction.Request request = UpdateConnectorSyncJobIngestionStatsAction.Request.fromXContentBytes( + restRequest.param(CONNECTOR_SYNC_JOB_ID_PARAM), + restRequest.content(), + restRequest.getXContentType() + ); + + return channel -> client.execute( + UpdateConnectorSyncJobIngestionStatsAction.INSTANCE, + request, + new RestToXContentListener<>(channel, r -> RestStatus.OK) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobIngestionStatsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobIngestionStatsAction.java new file mode 100644 index 0000000000000..864da6ca3095b --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobIngestionStatsAction.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobIndexService; + +public class TransportUpdateConnectorSyncJobIngestionStatsAction extends HandledTransportAction< + UpdateConnectorSyncJobIngestionStatsAction.Request, + AcknowledgedResponse> { + + protected final ConnectorSyncJobIndexService connectorSyncJobIndexService; + + @Inject + public TransportUpdateConnectorSyncJobIngestionStatsAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + UpdateConnectorSyncJobIngestionStatsAction.NAME, + transportService, + actionFilters, + UpdateConnectorSyncJobIngestionStatsAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorSyncJobIndexService = new ConnectorSyncJobIndexService(client); + } + + @Override + protected void doExecute( + Task task, + UpdateConnectorSyncJobIngestionStatsAction.Request request, + ActionListener listener + ) { + connectorSyncJobIndexService.updateConnectorSyncJobIngestionStats(request, listener.map(r -> AcknowledgedResponse.TRUE)); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsAction.java new file mode 100644 index 0000000000000..34d8be2af4881 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsAction.java @@ -0,0 +1,242 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJob; + +import java.io.IOException; +import java.time.Instant; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants.EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE; + +public class UpdateConnectorSyncJobIngestionStatsAction extends ActionType { + + public static final UpdateConnectorSyncJobIngestionStatsAction INSTANCE = new UpdateConnectorSyncJobIngestionStatsAction(); + public static final String NAME = "cluster:admin/xpack/connector/sync_job/update_stats"; + + public UpdateConnectorSyncJobIngestionStatsAction() { + super(NAME, AcknowledgedResponse::readFrom); + } + + public static class Request extends ActionRequest implements ToXContentObject { + public static final ParseField CONNECTOR_SYNC_JOB_ID_FIELD = new ParseField("connector_sync_job_id"); + public static final String DELETED_DOCUMENT_COUNT_NEGATIVE_ERROR_MESSAGE = "[deleted_document_count] cannot be negative."; + public static final String INDEXED_DOCUMENT_COUNT_NEGATIVE_ERROR_MESSAGE = "[indexed_document_count] cannot be negative."; + public static final String INDEXED_DOCUMENT_VOLUME_NEGATIVE_ERROR_MESSAGE = "[indexed_document_volume] cannot be negative."; + public static final String TOTAL_DOCUMENT_COUNT_NEGATIVE_ERROR_MESSAGE = "[total_document_count] cannot be negative."; + + private final String connectorSyncJobId; + private final Long deletedDocumentCount; + private final Long indexedDocumentCount; + private final Long indexedDocumentVolume; + private final Long totalDocumentCount; + private final Instant lastSeen; + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorSyncJobId = in.readString(); + this.deletedDocumentCount = in.readLong(); + this.indexedDocumentCount = in.readLong(); + this.indexedDocumentVolume = in.readLong(); + this.totalDocumentCount = in.readOptionalLong(); + this.lastSeen = in.readOptionalInstant(); + } + + public Request( + String connectorSyncJobId, + Long deletedDocumentCount, + Long indexedDocumentCount, + Long indexedDocumentVolume, + Long totalDocumentCount, + Instant lastSeen + ) { + this.connectorSyncJobId = connectorSyncJobId; + this.deletedDocumentCount = deletedDocumentCount; + this.indexedDocumentCount = indexedDocumentCount; + this.indexedDocumentVolume = indexedDocumentVolume; + this.totalDocumentCount = totalDocumentCount; + this.lastSeen = lastSeen; + } + + public static UpdateConnectorSyncJobIngestionStatsAction.Request parse(XContentParser parser) { + return PARSER.apply(parser, null); + } + + public String getConnectorSyncJobId() { + return connectorSyncJobId; + } + + public Long getDeletedDocumentCount() { + return deletedDocumentCount; + } + + public Long getIndexedDocumentCount() { + return indexedDocumentCount; + } + + public Long getIndexedDocumentVolume() { + return indexedDocumentVolume; + } + + public Long getTotalDocumentCount() { + return totalDocumentCount; + } + + public Instant getLastSeen() { + return lastSeen; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorSyncJobId)) { + validationException = addValidationError(EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE, validationException); + } + + if (deletedDocumentCount < 0L) { + validationException = addValidationError(DELETED_DOCUMENT_COUNT_NEGATIVE_ERROR_MESSAGE, validationException); + } + + if (indexedDocumentCount < 0L) { + validationException = addValidationError(INDEXED_DOCUMENT_COUNT_NEGATIVE_ERROR_MESSAGE, validationException); + } + + if (indexedDocumentVolume < 0L) { + validationException = addValidationError(INDEXED_DOCUMENT_VOLUME_NEGATIVE_ERROR_MESSAGE, validationException); + } + + if (Objects.nonNull(totalDocumentCount) && totalDocumentCount < 0L) { + validationException = addValidationError(TOTAL_DOCUMENT_COUNT_NEGATIVE_ERROR_MESSAGE, validationException); + } + + return validationException; + } + + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("connector_sync_job_update_ingestion_stats", false, (args, connectorSyncJobId) -> { + Long deletedDocumentCount = (Long) args[0]; + Long indexedDocumentCount = (Long) args[1]; + Long indexedDocumentVolume = (Long) args[2]; + + Long totalDocumentVolume = args[3] != null ? (Long) args[3] : null; + Instant lastSeen = args[4] != null ? (Instant) args[4] : null; + + return new Request( + connectorSyncJobId, + deletedDocumentCount, + indexedDocumentCount, + indexedDocumentVolume, + totalDocumentVolume, + lastSeen + ); + }); + + static { + PARSER.declareLong(constructorArg(), ConnectorSyncJob.DELETED_DOCUMENT_COUNT_FIELD); + PARSER.declareLong(constructorArg(), ConnectorSyncJob.INDEXED_DOCUMENT_COUNT_FIELD); + PARSER.declareLong(constructorArg(), ConnectorSyncJob.INDEXED_DOCUMENT_VOLUME_FIELD); + PARSER.declareLong(optionalConstructorArg(), ConnectorSyncJob.TOTAL_DOCUMENT_COUNT_FIELD); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> Instant.parse(p.text()), + ConnectorSyncJob.LAST_SEEN_FIELD, + ObjectParser.ValueType.OBJECT_OR_STRING + ); + } + + public static UpdateConnectorSyncJobIngestionStatsAction.Request fromXContentBytes( + String connectorSyncJobId, + BytesReference source, + XContentType xContentType + ) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return UpdateConnectorSyncJobIngestionStatsAction.Request.fromXContent(parser, connectorSyncJobId); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse: " + source.utf8ToString()); + } + } + + public static Request fromXContent(XContentParser parser, String connectorSyncJobId) throws IOException { + return PARSER.parse(parser, connectorSyncJobId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(ConnectorSyncJob.DELETED_DOCUMENT_COUNT_FIELD.getPreferredName(), deletedDocumentCount); + builder.field(ConnectorSyncJob.INDEXED_DOCUMENT_COUNT_FIELD.getPreferredName(), indexedDocumentCount); + builder.field(ConnectorSyncJob.INDEXED_DOCUMENT_VOLUME_FIELD.getPreferredName(), indexedDocumentVolume); + builder.field(ConnectorSyncJob.TOTAL_DOCUMENT_COUNT_FIELD.getPreferredName(), totalDocumentCount); + builder.field(ConnectorSyncJob.LAST_SEEN_FIELD.getPreferredName(), lastSeen); + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorSyncJobId); + out.writeLong(deletedDocumentCount); + out.writeLong(indexedDocumentCount); + out.writeLong(indexedDocumentVolume); + out.writeOptionalLong(totalDocumentCount); + out.writeOptionalInstant(lastSeen); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorSyncJobId, request.connectorSyncJobId) + && Objects.equals(deletedDocumentCount, request.deletedDocumentCount) + && Objects.equals(indexedDocumentCount, request.indexedDocumentCount) + && Objects.equals(indexedDocumentVolume, request.indexedDocumentVolume) + && Objects.equals(totalDocumentCount, request.totalDocumentCount) + && Objects.equals(lastSeen, request.lastSeen); + } + + @Override + public int hashCode() { + return Objects.hash( + connectorSyncJobId, + deletedDocumentCount, + indexedDocumentCount, + indexedDocumentVolume, + totalDocumentCount, + lastSeen + ); + } + } + +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java index 8613078e3074e..2dcf43c6f3f22 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction; import org.junit.Before; import java.time.Instant; @@ -252,6 +253,136 @@ public void testCancelConnectorSyncJob_WithMissingSyncJobId_ExpectException() { expectThrows(ResourceNotFoundException.class, () -> awaitCancelConnectorSyncJob(NON_EXISTING_SYNC_JOB_ID)); } + public void testUpdateConnectorSyncJobIngestionStats() throws Exception { + PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( + connector.getConnectorId() + ); + PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); + String syncJobId = response.getId(); + Map syncJobSourceBeforeUpdate = getConnectorSyncJobSourceById(syncJobId); + + UpdateConnectorSyncJobIngestionStatsAction.Request request = ConnectorSyncJobTestUtils + .getRandomUpdateConnectorSyncJobIngestionStatsActionRequest(syncJobId); + UpdateResponse updateResponse = awaitUpdateConnectorSyncJobIngestionStats(request); + Map syncJobSourceAfterUpdate = getConnectorSyncJobSourceById(syncJobId); + + Long requestDeletedDocumentCount = request.getDeletedDocumentCount(); + Long requestIndexedDocumentCount = request.getIndexedDocumentCount(); + Long requestIndexedDocumentVolume = request.getIndexedDocumentVolume(); + Long requestTotalDocumentCount = request.getTotalDocumentCount(); + Instant requestLastSeen = request.getLastSeen(); + + Long deletedDocumentCountAfterUpdate = (Long) syncJobSourceAfterUpdate.get( + ConnectorSyncJob.DELETED_DOCUMENT_COUNT_FIELD.getPreferredName() + ); + Long indexedDocumentCountAfterUpdate = (Long) syncJobSourceAfterUpdate.get( + ConnectorSyncJob.INDEXED_DOCUMENT_COUNT_FIELD.getPreferredName() + ); + Long indexedDocumentVolumeAfterUpdate = (Long) syncJobSourceAfterUpdate.get( + ConnectorSyncJob.INDEXED_DOCUMENT_VOLUME_FIELD.getPreferredName() + ); + Long totalDocumentCountAfterUpdate = (Long) syncJobSourceAfterUpdate.get( + ConnectorSyncJob.TOTAL_DOCUMENT_COUNT_FIELD.getPreferredName() + ); + Instant lastSeenAfterUpdate = Instant.parse( + (String) syncJobSourceAfterUpdate.get(ConnectorSyncJob.LAST_SEEN_FIELD.getPreferredName()) + ); + + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + assertThat(deletedDocumentCountAfterUpdate, equalTo(requestDeletedDocumentCount)); + assertThat(indexedDocumentCountAfterUpdate, equalTo(requestIndexedDocumentCount)); + assertThat(indexedDocumentVolumeAfterUpdate, equalTo(requestIndexedDocumentVolume)); + assertThat(totalDocumentCountAfterUpdate, equalTo(requestTotalDocumentCount)); + assertThat(lastSeenAfterUpdate, equalTo(requestLastSeen)); + assertFieldsExceptAllIngestionStatsDidNotUpdate(syncJobSourceBeforeUpdate, syncJobSourceAfterUpdate); + } + + public void testUpdateConnectorSyncJobIngestionStats_WithoutLastSeen_ExpectUpdateOfLastSeen() throws Exception { + PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( + connector.getConnectorId() + ); + PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); + String syncJobId = response.getId(); + Map syncJobSourceBeforeUpdate = getConnectorSyncJobSourceById(syncJobId); + Instant lastSeenBeforeUpdate = Instant.parse( + (String) syncJobSourceBeforeUpdate.get(ConnectorSyncJob.LAST_SEEN_FIELD.getPreferredName()) + ); + UpdateConnectorSyncJobIngestionStatsAction.Request request = new UpdateConnectorSyncJobIngestionStatsAction.Request( + syncJobId, + 10L, + 20L, + 100L, + 10L, + null + ); + + safeSleep(ONE_SECOND_IN_MILLIS); + + UpdateResponse updateResponse = awaitUpdateConnectorSyncJobIngestionStats(request); + Map syncJobSourceAfterUpdate = getConnectorSyncJobSourceById(syncJobId); + Instant lastSeenAfterUpdate = Instant.parse( + (String) syncJobSourceAfterUpdate.get(ConnectorSyncJob.LAST_SEEN_FIELD.getPreferredName()) + ); + long secondsBetweenLastSeenBeforeAndAfterUpdate = ChronoUnit.SECONDS.between(lastSeenBeforeUpdate, lastSeenAfterUpdate); + + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + assertTrue(lastSeenAfterUpdate.isAfter(lastSeenBeforeUpdate)); + assertThat(secondsBetweenLastSeenBeforeAndAfterUpdate, greaterThanOrEqualTo(1L)); + assertFieldsExceptAllIngestionStatsDidNotUpdate(syncJobSourceBeforeUpdate, syncJobSourceAfterUpdate); + } + + public void testUpdateConnectorSyncJobIngestionStats_WithMissingSyncJobId_ExpectException() { + expectThrows( + ResourceNotFoundException.class, + () -> awaitUpdateConnectorSyncJobIngestionStats( + new UpdateConnectorSyncJobIngestionStatsAction.Request(NON_EXISTING_SYNC_JOB_ID, 0L, 0L, 0L, 0L, Instant.now()) + ) + ); + } + + private UpdateResponse awaitUpdateConnectorSyncJobIngestionStats(UpdateConnectorSyncJobIngestionStatsAction.Request request) + throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorSyncJobIndexService.updateConnectorSyncJobIngestionStats(request, new ActionListener<>() { + @Override + public void onResponse(UpdateResponse updateResponse) { + resp.set(updateResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for update request", latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from update request", resp.get()); + return resp.get(); + } + + private static void assertFieldsExceptAllIngestionStatsDidNotUpdate( + Map syncJobSourceBeforeUpdate, + Map syncJobSourceAfterUpdate + ) { + assertFieldsDidNotUpdateExceptFieldList( + syncJobSourceBeforeUpdate, + syncJobSourceAfterUpdate, + List.of( + ConnectorSyncJob.DELETED_DOCUMENT_COUNT_FIELD, + ConnectorSyncJob.INDEXED_DOCUMENT_COUNT_FIELD, + ConnectorSyncJob.INDEXED_DOCUMENT_VOLUME_FIELD, + ConnectorSyncJob.TOTAL_DOCUMENT_COUNT_FIELD, + ConnectorSyncJob.LAST_SEEN_FIELD + ) + ); + } + private static void assertFieldsExceptSyncStatusAndCancellationRequestedAtDidNotUpdate( Map syncJobSourceBeforeUpdate, Map syncJobSourceAfterUpdate diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java index 9ec404e109496..8170391094356 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.application.connector.syncjob.action.DeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.GetConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction; import java.time.Instant; @@ -24,6 +25,7 @@ import static org.elasticsearch.test.ESTestCase.randomInt; import static org.elasticsearch.test.ESTestCase.randomLong; import static org.elasticsearch.test.ESTestCase.randomMap; +import static org.elasticsearch.test.ESTestCase.randomNonNegativeLong; public class ConnectorSyncJobTestUtils { @@ -102,6 +104,36 @@ public static CheckInConnectorSyncJobAction.Request getRandomCheckInConnectorSyn return new CheckInConnectorSyncJobAction.Request(randomAlphaOfLength(10)); } + public static UpdateConnectorSyncJobIngestionStatsAction.Request getRandomUpdateConnectorSyncJobIngestionStatsActionRequest() { + Instant lowerBoundInstant = Instant.ofEpochSecond(0L); + Instant upperBoundInstant = Instant.ofEpochSecond(3000000000L); + + return new UpdateConnectorSyncJobIngestionStatsAction.Request( + randomAlphaOfLength(10), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomInstantBetween(lowerBoundInstant, upperBoundInstant) + ); + } + + public static UpdateConnectorSyncJobIngestionStatsAction.Request getRandomUpdateConnectorSyncJobIngestionStatsActionRequest( + String syncJobId + ) { + Instant lowerBoundInstant = Instant.ofEpochSecond(0L); + Instant upperBoundInstant = Instant.ofEpochSecond(3000000000L); + + return new UpdateConnectorSyncJobIngestionStatsAction.Request( + syncJobId, + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomInstantBetween(lowerBoundInstant, upperBoundInstant) + ); + } + public static GetConnectorSyncJobAction.Request getRandomGetConnectorSyncJobRequest() { return new GetConnectorSyncJobAction.Request(randomAlphaOfLength(10)); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobIngestionStatsActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobIngestionStatsActionTests.java new file mode 100644 index 0000000000000..625c2e6d96cda --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobIngestionStatsActionTests.java @@ -0,0 +1,81 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.junit.Before; + +import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.mockito.Mockito.mock; + +public class TransportUpdateConnectorSyncJobIngestionStatsActionTests extends ESSingleNodeTestCase { + private static final Long TIMEOUT_SECONDS = 10L; + + private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); + private TransportUpdateConnectorSyncJobIngestionStatsAction action; + + @Before + public void setup() { + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + x -> null, + null, + Collections.emptySet() + ); + + action = new TransportUpdateConnectorSyncJobIngestionStatsAction( + transportService, + clusterService, + mock(ActionFilters.class), + client() + ); + } + + @Override + public void tearDown() throws Exception { + super.tearDown(); + ThreadPool.terminate(threadPool, TIMEOUT_SECONDS, TimeUnit.SECONDS); + } + + public void testUpdateConnectorSyncJobIngestionStatsAction_ExpectNoWarnings() throws InterruptedException { + UpdateConnectorSyncJobIngestionStatsAction.Request request = ConnectorSyncJobTestUtils + .getRandomUpdateConnectorSyncJobIngestionStatsActionRequest(); + + executeRequest(request); + + ensureNoWarnings(); + } + + private void executeRequest(UpdateConnectorSyncJobIngestionStatsAction.Request request) throws InterruptedException { + final CountDownLatch latch = new CountDownLatch(1); + action.doExecute(mock(Task.class), request, ActionListener.wrap(response -> latch.countDown(), exception -> latch.countDown())); + + boolean requestTimedOut = latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + + assertTrue("Timeout waiting for update request", requestTimedOut); + } + +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..6e2178d8341cf --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsActionRequestBWCSerializingTests.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorSyncJobIngestionStatsActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase< + UpdateConnectorSyncJobIngestionStatsAction.Request> { + + public String connectorSyncJobId; + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorSyncJobIngestionStatsAction.Request::new; + } + + @Override + protected UpdateConnectorSyncJobIngestionStatsAction.Request createTestInstance() { + UpdateConnectorSyncJobIngestionStatsAction.Request request = ConnectorSyncJobTestUtils + .getRandomUpdateConnectorSyncJobIngestionStatsActionRequest(); + connectorSyncJobId = request.getConnectorSyncJobId(); + return request; + } + + @Override + protected UpdateConnectorSyncJobIngestionStatsAction.Request mutateInstance(UpdateConnectorSyncJobIngestionStatsAction.Request instance) + throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorSyncJobIngestionStatsAction.Request doParseInstance(XContentParser parser) throws IOException { + return UpdateConnectorSyncJobIngestionStatsAction.Request.fromXContent(parser, connectorSyncJobId); + } + + @Override + protected UpdateConnectorSyncJobIngestionStatsAction.Request mutateInstanceForVersion( + UpdateConnectorSyncJobIngestionStatsAction.Request instance, + TransportVersion version + ) { + return new UpdateConnectorSyncJobIngestionStatsAction.Request( + instance.getConnectorSyncJobId(), + instance.getDeletedDocumentCount(), + instance.getIndexedDocumentCount(), + instance.getIndexedDocumentVolume(), + instance.getTotalDocumentCount(), + instance.getLastSeen() + ); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsActionRequestTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsActionRequestTests.java new file mode 100644 index 0000000000000..48ab14558db7e --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsActionRequestTests.java @@ -0,0 +1,109 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; + +import java.time.Instant; + +import static org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants.EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE; +import static org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction.Request.DELETED_DOCUMENT_COUNT_NEGATIVE_ERROR_MESSAGE; +import static org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction.Request.INDEXED_DOCUMENT_COUNT_NEGATIVE_ERROR_MESSAGE; +import static org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction.Request.INDEXED_DOCUMENT_VOLUME_NEGATIVE_ERROR_MESSAGE; +import static org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction.Request.TOTAL_DOCUMENT_COUNT_NEGATIVE_ERROR_MESSAGE; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class UpdateConnectorSyncJobIngestionStatsActionRequestTests extends ESTestCase { + + public void testValidate_WhenRequestIsValid_ExpectNoValidationError() { + UpdateConnectorSyncJobIngestionStatsAction.Request request = ConnectorSyncJobTestUtils + .getRandomUpdateConnectorSyncJobIngestionStatsActionRequest(); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, nullValue()); + } + + public void testValidate_WhenConnectorSyncJobIdIsEmpty_ExpectValidationError() { + UpdateConnectorSyncJobIngestionStatsAction.Request request = new UpdateConnectorSyncJobIngestionStatsAction.Request( + "", + 0L, + 0L, + 0L, + 0L, + Instant.now() + ); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString(EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE)); + } + + public void testValidate_WhenDeletedDocumentCountIsNegative_ExpectValidationError() { + UpdateConnectorSyncJobIngestionStatsAction.Request request = new UpdateConnectorSyncJobIngestionStatsAction.Request( + randomAlphaOfLength(10), + -10L, + 0L, + 0L, + 0L, + Instant.now() + ); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString(DELETED_DOCUMENT_COUNT_NEGATIVE_ERROR_MESSAGE)); + } + + public void testValidate_WhenIndexedDocumentCountIsNegative_ExpectValidationError() { + UpdateConnectorSyncJobIngestionStatsAction.Request request = new UpdateConnectorSyncJobIngestionStatsAction.Request( + randomAlphaOfLength(10), + 0L, + -10L, + 0L, + 0L, + Instant.now() + ); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString(INDEXED_DOCUMENT_COUNT_NEGATIVE_ERROR_MESSAGE)); + } + + public void testValidate_WhenIndexedDocumentVolumeIsNegative_ExpectValidationError() { + UpdateConnectorSyncJobIngestionStatsAction.Request request = new UpdateConnectorSyncJobIngestionStatsAction.Request( + randomAlphaOfLength(10), + 0L, + 0L, + -10L, + 0L, + Instant.now() + ); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString(INDEXED_DOCUMENT_VOLUME_NEGATIVE_ERROR_MESSAGE)); + } + + public void testValidate_WhenTotalDocumentCountIsNegative_ExpectValidationError() { + UpdateConnectorSyncJobIngestionStatsAction.Request request = new UpdateConnectorSyncJobIngestionStatsAction.Request( + randomAlphaOfLength(10), + 0L, + 0L, + 0L, + -10L, + Instant.now() + ); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString(TOTAL_DOCUMENT_COUNT_NEGATIVE_ERROR_MESSAGE)); + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 44399e84ee6e0..11e293d8675f7 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -139,6 +139,7 @@ public class Constants { "cluster:admin/xpack/connector/sync_job/check_in", "cluster:admin/xpack/connector/sync_job/get", "cluster:admin/xpack/connector/sync_job/cancel", + "cluster:admin/xpack/connector/sync_job/update_stats", "cluster:admin/xpack/deprecation/info", "cluster:admin/xpack/deprecation/nodes/info", "cluster:admin/xpack/enrich/delete", From ad735e699ca4e8af23c5ebd4b921f530eadf9cb0 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Mon, 4 Dec 2023 16:15:50 -0800 Subject: [PATCH 137/181] Mute failing test --- .../resources/rest-api-spec/test/ml/3rd_party_deployment.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml index 2b9bc06bdd1d0..af3ecd2637843 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml @@ -218,7 +218,9 @@ setup: --- "Test start deployment fails while model download in progress": - + - skip: + version: "all" + reason: "Awaits fix: https://github.com/elastic/elasticsearch/issues/102948" - do: ml.put_trained_model: model_id: .elser_model_2 From 72afbc19566f6ff4c88dffb52c2479ddddf168fd Mon Sep 17 00:00:00 2001 From: Chenhui Wang <54903978+wangch079@users.noreply.github.com> Date: Tue, 5 Dec 2023 10:21:46 +0800 Subject: [PATCH 138/181] Add read pivilege on connectors indices for user kibana_system (#102770) --- .../authz/store/KibanaOwnedReservedRoleDescriptors.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index 6a1da2e0ddfa0..f11f5c450b270 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -365,7 +365,9 @@ static RoleDescriptor kibanaSystem(String name) { // Kibana system user uses them to read / write slo data. RoleDescriptor.IndicesPrivileges.builder().indices(".slo-observability.*").privileges("all").build(), // Endpoint heartbeat. Kibana reads from these to determine metering/billing for endpoints. - RoleDescriptor.IndicesPrivileges.builder().indices(".logs-endpoint.heartbeat-*").privileges("read").build() }, + RoleDescriptor.IndicesPrivileges.builder().indices(".logs-endpoint.heartbeat-*").privileges("read").build(), + // For connectors telemetry. Will be removed once we switched to connectors API + RoleDescriptor.IndicesPrivileges.builder().indices(".elastic-connectors*").privileges("read").build() }, null, new ConfigurableClusterPrivilege[] { new ConfigurableClusterPrivileges.ManageApplicationPrivileges(Set.of("kibana-*")), From 85311b20020ba424428c90a0aa457645287e8434 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20R=C3=BChsen?= Date: Tue, 5 Dec 2023 07:03:59 +0100 Subject: [PATCH 139/181] [Profiling] Fix CO2 calculation with user-provided PUE and CO2PerKWH (#102884) Co-authored-by: Elastic Machine --- .../elasticsearch/xpack/profiling/CO2Calculator.java | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java index 136821d491c59..0d92bf0a78d09 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.profiling; -import java.util.Collections; import java.util.Map; import static java.util.Map.entry; @@ -19,7 +18,6 @@ final class CO2Calculator { private static final double DEFAULT_KILOWATTS_PER_CORE_ARM64 = 2.8d / 1000.0d; // unit: watt / core private static final double DEFAULT_KILOWATTS_PER_CORE = DEFAULT_KILOWATTS_PER_CORE_X86; // unit: watt / core private static final double DEFAULT_DATACENTER_PUE = 1.7d; - private static final Provider DEFAULT_PROVIDER = new Provider(DEFAULT_DATACENTER_PUE, Collections.emptyMap()); private final InstanceTypeService instanceTypeService; private final Map hostMetadata; private final double samplingDurationInSeconds; @@ -76,12 +74,13 @@ private double getKiloWattsPerCore(HostMetadata host) { } private double getCO2TonsPerKWH(HostMetadata host) { - Provider provider = PROVIDERS.getOrDefault(host.instanceType.provider, DEFAULT_PROVIDER); - return provider.co2TonsPerKWH.getOrDefault(host.instanceType.region, customCO2PerKWH); + Provider provider = PROVIDERS.get(host.instanceType.provider); + return provider == null ? customCO2PerKWH : provider.co2TonsPerKWH.getOrDefault(host.instanceType.region, customCO2PerKWH); } - private static double getDatacenterPUE(HostMetadata host) { - return PROVIDERS.getOrDefault(host.instanceType.provider, DEFAULT_PROVIDER).pue; + private double getDatacenterPUE(HostMetadata host) { + Provider provider = PROVIDERS.get(host.instanceType.provider); + return provider == null ? customDatacenterPUE : provider.pue; } private record Provider(double pue, Map co2TonsPerKWH) {} From be98a4697e2719dbcf9a542cac377ed37b9cdb6d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20FOUCRET?= Date: Tue, 5 Dec 2023 07:53:49 +0100 Subject: [PATCH 140/181] [LTR] Update the feature name from "learn to rank" to "learning to rank". (#102938) --- .../cluster/stats/SearchUsageStatsTests.java | 6 +- .../test/cluster/FeatureFlag.java | 2 +- ...kConfig.java => LearningToRankConfig.java} | 72 ++++----- ...earningToRankFeatureExtractorBuilder.java} | 4 +- .../ltr/QueryExtractorBuilder.java | 2 +- .../ml/ltr/MlLTRNamedXContentProvider.java | 18 +-- ...ts.java => LearningToRankConfigTests.java} | 40 ++--- .../ml/qa/basic-multi-node/build.gradle | 2 +- ...T.java => MlLearningToRankRescorerIT.java} | 18 +-- .../ml/qa/ml-with-security/build.gradle | 4 +- ...rIT.java => LearningToRankRescorerIT.java} | 20 +-- .../xpack/ml/MachineLearning.java | 24 +-- .../loadingservice/ModelLoadingService.java | 2 +- ...corer.java => LearningToRankRescorer.java} | 12 +- ...ava => LearningToRankRescorerBuilder.java} | 146 ++++++++++-------- ...ava => LearningToRankRescorerContext.java} | 24 +-- ...ava => LearningToRankRescorerFeature.java} | 12 +- ...ervice.java => LearningToRankService.java} | 38 ++--- ...ingToRankRescorerBuilderRewriteTests.java} | 93 +++++------ ...ankRescorerBuilderSerializationTests.java} | 63 ++++---- ...s.java => LearningToRankServiceTests.java} | 66 ++++---- .../xpack/test/rest/XPackRestIT.java | 2 +- ...orer.yml => learning_to_rank_rescorer.yml} | 14 +- 23 files changed, 351 insertions(+), 333 deletions(-) rename x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/{LearnToRankConfig.java => LearningToRankConfig.java} (71%) rename x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/{LearnToRankFeatureExtractorBuilder.java => LearningToRankFeatureExtractorBuilder.java} (88%) rename x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/{LearnToRankConfigTests.java => LearningToRankConfigTests.java} (84%) rename x-pack/plugin/ml/qa/basic-multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/{MlRescorerIT.java => MlLearningToRankRescorerIT.java} (97%) rename x-pack/plugin/ml/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/{LearnToRankRescorerIT.java => LearningToRankRescorerIT.java} (94%) rename x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/{LearnToRankRescorer.java => LearningToRankRescorer.java} (94%) rename x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/{LearnToRankRescorerBuilder.java => LearningToRankRescorerBuilder.java} (57%) rename x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/{LearnToRankRescorerContext.java => LearningToRankRescorerContext.java} (81%) rename x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/{LearnToRankRescorerFeature.java => LearningToRankRescorerFeature.java} (57%) rename x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/{LearnToRankService.java => LearningToRankService.java} (86%) rename x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/{LearnToRankRescorerBuilderRewriteTests.java => LearningToRankRescorerBuilderRewriteTests.java} (66%) rename x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/{LearnToRankRescorerBuilderSerializationTests.java => LearningToRankRescorerBuilderSerializationTests.java} (71%) rename x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/{LearnToRankServiceTests.java => LearningToRankServiceTests.java} (77%) rename x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/{learn_to_rank_rescorer.yml => learning_to_rank_rescorer.yml} (94%) diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java index cc4509500f9c1..a5704748ea242 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java @@ -34,7 +34,7 @@ public class SearchUsageStatsTests extends AbstractWireSerializingTestCase RESCORER_TYPES = List.of("query", "learn_to_rank"); + private static final List RESCORER_TYPES = List.of("query", "learning_to_rank"); private static final List SECTIONS = List.of( "highlight", @@ -136,14 +136,14 @@ public void testAdd() { searchUsageStats.add( new SearchUsageStats( Map.of("term", 1L, "match", 1L), - Map.of("query", 5L, "learn_to_rank", 2L), + Map.of("query", 5L, "learning_to_rank", 2L), Map.of("query", 10L, "knn", 1L), 10L ) ); assertEquals(Map.of("match", 11L, "term", 1L), searchUsageStats.getQueryUsage()); assertEquals(Map.of("query", 20L, "knn", 1L), searchUsageStats.getSectionsUsage()); - assertEquals(Map.of("query", 10L, "learn_to_rank", 2L), searchUsageStats.getRescorerUsage()); + assertEquals(Map.of("query", 10L, "learning_to_rank", 2L), searchUsageStats.getRescorerUsage()); assertEquals(20L, searchUsageStats.getTotalSearchCount()); } diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java index ff7195f9f5f37..2c313da69b42e 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java @@ -16,7 +16,7 @@ */ public enum FeatureFlag { TIME_SERIES_MODE("es.index_mode_feature_flag_registered=true", Version.fromString("8.0.0"), null), - LEARN_TO_RANK("es.learn_to_rank_feature_flag_enabled=true", Version.fromString("8.10.0"), null), + LEARNING_TO_RANK("es.learning_to_rank_feature_flag_enabled=true", Version.fromString("8.12.0"), null), FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null); public final String systemProperty; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearningToRankConfig.java similarity index 71% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfig.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearningToRankConfig.java index ba617ca8d04b8..7a51fb9a0fce3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearningToRankConfig.java @@ -17,7 +17,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.MlConfigVersion; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearnToRankFeatureExtractorBuilder; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearningToRankFeatureExtractorBuilder; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.QueryExtractorBuilder; import org.elasticsearch.xpack.core.ml.utils.NamedXContentObjectHelper; @@ -30,29 +30,29 @@ import java.util.Set; import java.util.stream.Collectors; -public class LearnToRankConfig extends RegressionConfig implements Rewriteable { +public class LearningToRankConfig extends RegressionConfig implements Rewriteable { - public static final ParseField NAME = new ParseField("learn_to_rank"); + public static final ParseField NAME = new ParseField("learning_to_rank"); static final TransportVersion MIN_SUPPORTED_TRANSPORT_VERSION = TransportVersion.current(); public static final ParseField NUM_TOP_FEATURE_IMPORTANCE_VALUES = new ParseField("num_top_feature_importance_values"); public static final ParseField FEATURE_EXTRACTORS = new ParseField("feature_extractors"); public static final ParseField DEFAULT_PARAMS = new ParseField("default_params"); - public static LearnToRankConfig EMPTY_PARAMS = new LearnToRankConfig(null, null, null); + public static LearningToRankConfig EMPTY_PARAMS = new LearningToRankConfig(null, null, null); - private static final ObjectParser LENIENT_PARSER = createParser(true); - private static final ObjectParser STRICT_PARSER = createParser(false); + private static final ObjectParser LENIENT_PARSER = createParser(true); + private static final ObjectParser STRICT_PARSER = createParser(false); - private static ObjectParser createParser(boolean lenient) { - ObjectParser parser = new ObjectParser<>( + private static ObjectParser createParser(boolean lenient) { + ObjectParser parser = new ObjectParser<>( NAME.getPreferredName(), lenient, - LearnToRankConfig.Builder::new + LearningToRankConfig.Builder::new ); parser.declareInt(Builder::setNumTopFeatureImportanceValues, NUM_TOP_FEATURE_IMPORTANCE_VALUES); parser.declareNamedObjects( - Builder::setLearnToRankFeatureExtractorBuilders, - (p, c, n) -> p.namedObject(LearnToRankFeatureExtractorBuilder.class, n, lenient), + Builder::setLearningToRankFeatureExtractorBuilders, + (p, c, n) -> p.namedObject(LearningToRankFeatureExtractorBuilder.class, n, lenient), b -> {}, FEATURE_EXTRACTORS ); @@ -60,30 +60,30 @@ private static ObjectParser createParser(boo return parser; } - public static LearnToRankConfig fromXContentStrict(XContentParser parser) { + public static LearningToRankConfig fromXContentStrict(XContentParser parser) { return STRICT_PARSER.apply(parser, null).build(); } - public static LearnToRankConfig fromXContentLenient(XContentParser parser) { + public static LearningToRankConfig fromXContentLenient(XContentParser parser) { return LENIENT_PARSER.apply(parser, null).build(); } - public static Builder builder(LearnToRankConfig config) { + public static Builder builder(LearningToRankConfig config) { return new Builder(config); } - private final List featureExtractorBuilders; + private final List featureExtractorBuilders; private final Map paramsDefaults; - public LearnToRankConfig( + public LearningToRankConfig( Integer numTopFeatureImportanceValues, - List featureExtractorBuilders, + List featureExtractorBuilders, Map paramsDefaults ) { super(DEFAULT_RESULTS_FIELD, numTopFeatureImportanceValues); if (featureExtractorBuilders != null) { Set featureNames = featureExtractorBuilders.stream() - .map(LearnToRankFeatureExtractorBuilder::featureName) + .map(LearningToRankFeatureExtractorBuilder::featureName) .collect(Collectors.toSet()); if (featureNames.size() < featureExtractorBuilders.size()) { throw new IllegalArgumentException( @@ -95,19 +95,19 @@ public LearnToRankConfig( this.paramsDefaults = Collections.unmodifiableMap(Objects.requireNonNullElse(paramsDefaults, Map.of())); } - public LearnToRankConfig(StreamInput in) throws IOException { + public LearningToRankConfig(StreamInput in) throws IOException { super(in); - this.featureExtractorBuilders = in.readNamedWriteableCollectionAsList(LearnToRankFeatureExtractorBuilder.class); + this.featureExtractorBuilders = in.readNamedWriteableCollectionAsList(LearningToRankFeatureExtractorBuilder.class); this.paramsDefaults = in.readMap(); } - public List getFeatureExtractorBuilders() { + public List getFeatureExtractorBuilders() { return featureExtractorBuilders; } public List getQueryFeatureExtractorBuilders() { List queryExtractorBuilders = new ArrayList<>(); - for (LearnToRankFeatureExtractorBuilder featureExtractorBuilder : featureExtractorBuilders) { + for (LearningToRankFeatureExtractorBuilder featureExtractorBuilder : featureExtractorBuilders) { if (featureExtractorBuilder instanceof QueryExtractorBuilder queryExtractorBuilder) { queryExtractorBuilders.add(queryExtractorBuilder); } @@ -189,7 +189,7 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; if (super.equals(o) == false) return false; - LearnToRankConfig that = (LearnToRankConfig) o; + LearningToRankConfig that = (LearningToRankConfig) o; return Objects.equals(featureExtractorBuilders, that.featureExtractorBuilders) && Objects.equals(paramsDefaults, that.paramsDefaults); } @@ -220,33 +220,33 @@ public TransportVersion getMinimalSupportedTransportVersion() { } @Override - public LearnToRankConfig rewrite(QueryRewriteContext ctx) throws IOException { + public LearningToRankConfig rewrite(QueryRewriteContext ctx) throws IOException { if (this.featureExtractorBuilders.isEmpty()) { return this; } boolean rewritten = false; - List rewrittenExtractors = new ArrayList<>(this.featureExtractorBuilders.size()); - for (LearnToRankFeatureExtractorBuilder extractorBuilder : this.featureExtractorBuilders) { - LearnToRankFeatureExtractorBuilder rewrittenExtractor = Rewriteable.rewrite(extractorBuilder, ctx); + List rewrittenExtractors = new ArrayList<>(this.featureExtractorBuilders.size()); + for (LearningToRankFeatureExtractorBuilder extractorBuilder : this.featureExtractorBuilders) { + LearningToRankFeatureExtractorBuilder rewrittenExtractor = Rewriteable.rewrite(extractorBuilder, ctx); rewrittenExtractors.add(rewrittenExtractor); rewritten |= (rewrittenExtractor != extractorBuilder); } if (rewritten) { - return new LearnToRankConfig(getNumTopFeatureImportanceValues(), rewrittenExtractors, paramsDefaults); + return new LearningToRankConfig(getNumTopFeatureImportanceValues(), rewrittenExtractors, paramsDefaults); } return this; } public static class Builder { private Integer numTopFeatureImportanceValues; - private List learnToRankFeatureExtractorBuilders; + private List learningToRankFeatureExtractorBuilders; private Map paramsDefaults = Map.of(); Builder() {} - Builder(LearnToRankConfig config) { + Builder(LearningToRankConfig config) { this.numTopFeatureImportanceValues = config.getNumTopFeatureImportanceValues(); - this.learnToRankFeatureExtractorBuilders = config.featureExtractorBuilders; + this.learningToRankFeatureExtractorBuilders = config.featureExtractorBuilders; this.paramsDefaults = config.getParamsDefaults(); } @@ -255,10 +255,10 @@ public Builder setNumTopFeatureImportanceValues(Integer numTopFeatureImportanceV return this; } - public Builder setLearnToRankFeatureExtractorBuilders( - List learnToRankFeatureExtractorBuilders + public Builder setLearningToRankFeatureExtractorBuilders( + List learningToRankFeatureExtractorBuilders ) { - this.learnToRankFeatureExtractorBuilders = learnToRankFeatureExtractorBuilders; + this.learningToRankFeatureExtractorBuilders = learningToRankFeatureExtractorBuilders; return this; } @@ -267,8 +267,8 @@ public Builder setParamsDefaults(Map paramsDefaults) { return this; } - public LearnToRankConfig build() { - return new LearnToRankConfig(numTopFeatureImportanceValues, learnToRankFeatureExtractorBuilders, paramsDefaults); + public LearningToRankConfig build() { + return new LearningToRankConfig(numTopFeatureImportanceValues, learningToRankFeatureExtractorBuilders, paramsDefaults); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/LearnToRankFeatureExtractorBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/LearningToRankFeatureExtractorBuilder.java similarity index 88% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/LearnToRankFeatureExtractorBuilder.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/LearningToRankFeatureExtractorBuilder.java index 3eac7d0d0a245..a610756cda22e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/LearnToRankFeatureExtractorBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/LearningToRankFeatureExtractorBuilder.java @@ -12,11 +12,11 @@ import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.core.ml.utils.NamedXContentObject; -public interface LearnToRankFeatureExtractorBuilder +public interface LearningToRankFeatureExtractorBuilder extends NamedXContentObject, NamedWriteable, - Rewriteable { + Rewriteable { ParseField FEATURE_NAME = new ParseField("feature_name"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilder.java index a138fbbb98ba1..d9e90b92382e3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilder.java @@ -27,7 +27,7 @@ public record QueryExtractorBuilder(String featureName, QueryProvider query, float defaultScore) implements - LearnToRankFeatureExtractorBuilder { + LearningToRankFeatureExtractorBuilder { public static final ParseField NAME = new ParseField("query_extractor"); public static final ParseField FEATURE_NAME = new ParseField("feature_name"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/ltr/MlLTRNamedXContentProvider.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/ltr/MlLTRNamedXContentProvider.java index c7a8db0ebf011..0f59d1183a632 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/ltr/MlLTRNamedXContentProvider.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/ltr/MlLTRNamedXContentProvider.java @@ -10,10 +10,10 @@ import org.elasticsearch.plugins.spi.NamedXContentProvider; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearnToRankConfig; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearningToRankConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LenientlyParsedInferenceConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.StrictlyParsedInferenceConfig; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearnToRankFeatureExtractorBuilder; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearningToRankFeatureExtractorBuilder; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.QueryExtractorBuilder; import java.util.ArrayList; @@ -32,22 +32,22 @@ public List getNamedXContentParsers() { namedXContent.add( new NamedXContentRegistry.Entry( LenientlyParsedInferenceConfig.class, - LearnToRankConfig.NAME, - LearnToRankConfig::fromXContentLenient + LearningToRankConfig.NAME, + LearningToRankConfig::fromXContentLenient ) ); // Strict Inference Config namedXContent.add( new NamedXContentRegistry.Entry( StrictlyParsedInferenceConfig.class, - LearnToRankConfig.NAME, - LearnToRankConfig::fromXContentStrict + LearningToRankConfig.NAME, + LearningToRankConfig::fromXContentStrict ) ); // LTR extractors namedXContent.add( new NamedXContentRegistry.Entry( - LearnToRankFeatureExtractorBuilder.class, + LearningToRankFeatureExtractorBuilder.class, QueryExtractorBuilder.NAME, QueryExtractorBuilder::fromXContent ) @@ -59,12 +59,12 @@ public List getNamedWriteables() { List namedWriteables = new ArrayList<>(); // Inference config namedWriteables.add( - new NamedWriteableRegistry.Entry(InferenceConfig.class, LearnToRankConfig.NAME.getPreferredName(), LearnToRankConfig::new) + new NamedWriteableRegistry.Entry(InferenceConfig.class, LearningToRankConfig.NAME.getPreferredName(), LearningToRankConfig::new) ); // LTR Extractors namedWriteables.add( new NamedWriteableRegistry.Entry( - LearnToRankFeatureExtractorBuilder.class, + LearningToRankFeatureExtractorBuilder.class, QueryExtractorBuilder.NAME.getPreferredName(), QueryExtractorBuilder::new ) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearningToRankConfigTests.java similarity index 84% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfigTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearningToRankConfigTests.java index 1059af21ab7eb..09d2366984383 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearningToRankConfigTests.java @@ -22,7 +22,7 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.inference.InferenceConfigItemTestCase; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearnToRankFeatureExtractorBuilder; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearningToRankFeatureExtractorBuilder; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.QueryExtractorBuilderTests; import org.elasticsearch.xpack.core.ml.ltr.MlLTRNamedXContentProvider; import org.junit.Before; @@ -37,11 +37,11 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class LearnToRankConfigTests extends InferenceConfigItemTestCase { +public class LearningToRankConfigTests extends InferenceConfigItemTestCase { private boolean lenient; - public static LearnToRankConfig randomLearnToRankConfig() { - return new LearnToRankConfig( + public static LearningToRankConfig randomLearningToRankConfig() { + return new LearningToRankConfig( randomBoolean() ? null : randomIntBetween(0, 10), randomBoolean() ? null @@ -56,15 +56,15 @@ public void chooseStrictOrLenient() { } @Override - protected LearnToRankConfig createTestInstance() { - return randomLearnToRankConfig(); + protected LearningToRankConfig createTestInstance() { + return randomLearningToRankConfig(); } @Override - protected LearnToRankConfig mutateInstance(LearnToRankConfig instance) { + protected LearningToRankConfig mutateInstance(LearningToRankConfig instance) { int i = randomInt(2); - LearnToRankConfig.Builder builder = LearnToRankConfig.builder(instance); + LearningToRankConfig.Builder builder = LearningToRankConfig.builder(instance); switch (i) { case 0 -> { @@ -76,7 +76,7 @@ protected LearnToRankConfig mutateInstance(LearnToRankConfig instance) { ); } case 1 -> { - builder.setLearnToRankFeatureExtractorBuilders( + builder.setLearningToRankFeatureExtractorBuilders( randomValueOtherThan( instance.getFeatureExtractorBuilders(), () -> randomBoolean() || instance.getFeatureExtractorBuilders().isEmpty() @@ -109,13 +109,13 @@ protected Predicate getRandomFieldsExcludeFilter() { } @Override - protected Writeable.Reader instanceReader() { - return LearnToRankConfig::new; + protected Writeable.Reader instanceReader() { + return LearningToRankConfig::new; } @Override - protected LearnToRankConfig doParseInstance(XContentParser parser) throws IOException { - return lenient ? LearnToRankConfig.fromXContentLenient(parser) : LearnToRankConfig.fromXContentStrict(parser); + protected LearningToRankConfig doParseInstance(XContentParser parser) throws IOException { + return lenient ? LearningToRankConfig.fromXContentLenient(parser) : LearningToRankConfig.fromXContentStrict(parser); } @Override @@ -124,18 +124,18 @@ protected boolean supportsUnknownFields() { } @Override - protected LearnToRankConfig mutateInstanceForVersion(LearnToRankConfig instance, TransportVersion version) { + protected LearningToRankConfig mutateInstanceForVersion(LearningToRankConfig instance, TransportVersion version) { return instance; } public void testDuplicateFeatureNames() { - List featureExtractorBuilderList = List.of( + List featureExtractorBuilderList = List.of( new TestValueExtractor("foo"), new TestValueExtractor("foo") ); - LearnToRankConfig.Builder builder = LearnToRankConfig.builder(randomLearnToRankConfig()) - .setLearnToRankFeatureExtractorBuilders(featureExtractorBuilderList); + LearningToRankConfig.Builder builder = LearningToRankConfig.builder(randomLearningToRankConfig()) + .setLearningToRankFeatureExtractorBuilders(featureExtractorBuilderList); expectThrows(IllegalArgumentException.class, () -> builder.build()); } @@ -148,7 +148,7 @@ protected NamedXContentRegistry xContentRegistry() { namedXContent.addAll(new SearchModule(Settings.EMPTY, List.of()).getNamedXContents()); namedXContent.add( new NamedXContentRegistry.Entry( - LearnToRankFeatureExtractorBuilder.class, + LearningToRankFeatureExtractorBuilder.class, TestValueExtractor.NAME, TestValueExtractor::fromXContent ) @@ -163,7 +163,7 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { namedWriteables.addAll(new MlLTRNamedXContentProvider().getNamedWriteables()); namedWriteables.add( new NamedWriteableRegistry.Entry( - LearnToRankFeatureExtractorBuilder.class, + LearningToRankFeatureExtractorBuilder.class, TestValueExtractor.NAME.getPreferredName(), TestValueExtractor::new ) @@ -171,7 +171,7 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { return new NamedWriteableRegistry(namedWriteables); } - private static class TestValueExtractor implements LearnToRankFeatureExtractorBuilder { + private static class TestValueExtractor implements LearningToRankFeatureExtractorBuilder { public static final ParseField NAME = new ParseField("test"); private final String featureName; diff --git a/x-pack/plugin/ml/qa/basic-multi-node/build.gradle b/x-pack/plugin/ml/qa/basic-multi-node/build.gradle index bf6ab9ed7d77e..3f2f85e3e09da 100644 --- a/x-pack/plugin/ml/qa/basic-multi-node/build.gradle +++ b/x-pack/plugin/ml/qa/basic-multi-node/build.gradle @@ -17,7 +17,7 @@ testClusters.configureEach { setting 'xpack.license.self_generated.type', 'trial' setting 'indices.lifecycle.history_index_enabled', 'false' setting 'slm.history_index_enabled', 'false' - requiresFeature 'es.learn_to_rank_feature_flag_enabled', Version.fromString("8.10.0") + requiresFeature 'es.learning_to_rank_feature_flag_enabled', Version.fromString("8.12.0") } if (BuildParams.inFipsJvm){ diff --git a/x-pack/plugin/ml/qa/basic-multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlRescorerIT.java b/x-pack/plugin/ml/qa/basic-multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlLearningToRankRescorerIT.java similarity index 97% rename from x-pack/plugin/ml/qa/basic-multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlRescorerIT.java rename to x-pack/plugin/ml/qa/basic-multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlLearningToRankRescorerIT.java index 6dbb0a46121aa..0dab4f9e4256c 100644 --- a/x-pack/plugin/ml/qa/basic-multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlRescorerIT.java +++ b/x-pack/plugin/ml/qa/basic-multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlLearningToRankRescorerIT.java @@ -21,7 +21,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; -public class MlRescorerIT extends ESRestTestCase { +public class MlLearningToRankRescorerIT extends ESRestTestCase { private static final String MODEL_ID = "basic-ltr-model"; private static final String INDEX_NAME = "store"; @@ -33,7 +33,7 @@ public void setupModelAndData() throws IOException { "description": "super complex model for tests", "input": { "field_names": ["cost", "product"] }, "inference_config": { - "learn_to_rank": { + "learning_to_rank": { "feature_extractors": [ { "query_extractor": { @@ -206,7 +206,7 @@ public void testLtrSimple() throws Exception { }, "rescore": { "window_size": 10, - "learn_to_rank": { + "learning_to_rank": { "model_id": "basic-ltr-model" } } @@ -225,7 +225,7 @@ public void testLtrSimpleDFS() throws Exception { }, "rescore": { "window_size": 10, - "learn_to_rank": { + "learning_to_rank": { "model_id": "basic-ltr-model", "params": { "keyword": "TV" } } @@ -239,7 +239,7 @@ public void testLtrSimpleDFS() throws Exception { { "rescore": { "window_size": 10, - "learn_to_rank": { + "learning_to_rank": { "model_id": "basic-ltr-model", "params": { "keyword": "TV" } } @@ -263,7 +263,7 @@ public void testLtrSimpleEmpty() throws Exception { }, "rescore": { "window_size": 10, - "learn_to_rank": { + "learning_to_rank": { "model_id": "basic-ltr-model" } } @@ -282,7 +282,7 @@ public void testLtrEmptyDFS() throws Exception { }, "rescore": { "window_size": 10, - "learn_to_rank": { + "learning_to_rank": { "model_id": "basic-ltr-model" } } @@ -301,7 +301,7 @@ public void testLtrCanMatch() throws Exception { }, "rescore": { "window_size": 10, - "learn_to_rank": { + "learning_to_rank": { "model_id": "basic-ltr-model" } } @@ -317,7 +317,7 @@ public void testLtrCanMatch() throws Exception { }, "rescore": { "window_size": 10, - "learn_to_rank": { + "learning_to_rank": { "model_id": "basic-ltr-model" } } diff --git a/x-pack/plugin/ml/qa/ml-with-security/build.gradle b/x-pack/plugin/ml/qa/ml-with-security/build.gradle index b8b706353d624..df2eb2c687fb5 100644 --- a/x-pack/plugin/ml/qa/ml-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/ml-with-security/build.gradle @@ -181,7 +181,7 @@ tasks.named("yamlRestTest").configure { 'ml/inference_crud/Test put nlp model config with vocabulary set', 'ml/inference_crud/Test put model model aliases with nlp model', 'ml/inference_processor/Test create processor with missing mandatory fields', - 'ml/learn_to_rank_rescorer/Test rescore with missing model', + 'ml/learning_to_rank_rescorer/Test rescore with missing model', 'ml/inference_stats_crud/Test get stats given missing trained model', 'ml/inference_stats_crud/Test get stats given expression without matches and allow_no_match is false', 'ml/jobs_crud/Test cannot create job with model snapshot id set', @@ -258,5 +258,5 @@ testClusters.configureEach { user username: "no_ml", password: "x-pack-test-password", role: "minimal" setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' - requiresFeature 'es.learn_to_rank_feature_flag_enabled', Version.fromString("8.10.0") + requiresFeature 'es.learning_to_rank_feature_flag_enabled', Version.fromString("8.12.0") } diff --git a/x-pack/plugin/ml/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/LearnToRankRescorerIT.java b/x-pack/plugin/ml/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/LearningToRankRescorerIT.java similarity index 94% rename from x-pack/plugin/ml/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/LearnToRankRescorerIT.java rename to x-pack/plugin/ml/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/LearningToRankRescorerIT.java index d246f070f0b8d..0e060b3c94644 100644 --- a/x-pack/plugin/ml/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/LearnToRankRescorerIT.java +++ b/x-pack/plugin/ml/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/LearningToRankRescorerIT.java @@ -18,7 +18,7 @@ import static org.hamcrest.Matchers.equalTo; -public class LearnToRankRescorerIT extends InferenceTestCase { +public class LearningToRankRescorerIT extends InferenceTestCase { private static final String MODEL_ID = "ltr-model"; private static final String INDEX_NAME = "store"; @@ -30,7 +30,7 @@ public void setupModelAndData() throws IOException { "description": "super complex model for tests", "input": {"field_names": ["cost", "product"]}, "inference_config": { - "learn_to_rank": { + "learning_to_rank": { "feature_extractors": [ { "query_extractor": { @@ -196,13 +196,13 @@ public void setupModelAndData() throws IOException { adminClient().performRequest(new Request("POST", INDEX_NAME + "/_refresh")); } - public void testLearnToRankRescore() throws Exception { + public void testLearningToRankRescore() throws Exception { Request request = new Request("GET", "store/_search?size=3&error_trace"); request.setJsonEntity(""" { "rescore": { "window_size": 10, - "learn_to_rank": { "model_id": "ltr-model" } + "learning_to_rank": { "model_id": "ltr-model" } } }"""); assertHitScores(client().performRequest(request), List.of(20.0, 20.0, 17.0)); @@ -211,7 +211,7 @@ public void testLearnToRankRescore() throws Exception { "query": { "term": { "product": "Laptop" } }, "rescore": { "window_size": 10, - "learn_to_rank": { + "learning_to_rank": { "model_id": "ltr-model", "params": { "keyword": "Laptop" @@ -225,25 +225,25 @@ public void testLearnToRankRescore() throws Exception { "query": {"term": { "product": "Laptop" } }, "rescore": { "window_size": 10, - "learn_to_rank": { "model_id": "ltr-model"} + "learning_to_rank": { "model_id": "ltr-model"} } }"""); assertHitScores(client().performRequest(request), List.of(9.0, 9.0, 6.0)); } - public void testLearnToRankRescoreSmallWindow() throws Exception { + public void testLearningToRankRescoreSmallWindow() throws Exception { Request request = new Request("GET", "store/_search?size=5"); request.setJsonEntity(""" { "rescore": { "window_size": 2, - "learn_to_rank": { "model_id": "ltr-model" } + "learning_to_rank": { "model_id": "ltr-model" } } }"""); assertHitScores(client().performRequest(request), List.of(20.0, 20.0, 1.0, 1.0, 1.0)); } - public void testLearnToRankRescorerWithChainedRescorers() throws IOException { + public void testLearningToRankRescorerWithChainedRescorers() throws IOException { Request request = new Request("GET", "store/_search?size=5"); request.setJsonEntity(""" { @@ -254,7 +254,7 @@ public void testLearnToRankRescorerWithChainedRescorers() throws IOException { }, { "window_size": 3, - "learn_to_rank": { "model_id": "ltr-model" } + "learning_to_rank": { "model_id": "ltr-model" } }, { "window_size": 2, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index d0f7302105768..749a31de51b07 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -324,9 +324,9 @@ import org.elasticsearch.xpack.ml.inference.deployment.DeploymentManager; import org.elasticsearch.xpack.ml.inference.ingest.InferenceProcessor; import org.elasticsearch.xpack.ml.inference.loadingservice.ModelLoadingService; -import org.elasticsearch.xpack.ml.inference.ltr.LearnToRankRescorerBuilder; -import org.elasticsearch.xpack.ml.inference.ltr.LearnToRankRescorerFeature; -import org.elasticsearch.xpack.ml.inference.ltr.LearnToRankService; +import org.elasticsearch.xpack.ml.inference.ltr.LearningToRankRescorerBuilder; +import org.elasticsearch.xpack.ml.inference.ltr.LearningToRankRescorerFeature; +import org.elasticsearch.xpack.ml.inference.ltr.LearningToRankService; import org.elasticsearch.xpack.ml.inference.modelsize.MlModelSizeNamedXContentProvider; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; import org.elasticsearch.xpack.ml.inference.pytorch.process.BlackHolePyTorchProcess; @@ -762,7 +762,7 @@ public void loadExtensions(ExtensionLoader loader) { private final SetOnce mlLifeCycleService = new SetOnce<>(); private final SetOnce inferenceModelBreaker = new SetOnce<>(); private final SetOnce modelLoadingService = new SetOnce<>(); - private final SetOnce learnToRankService = new SetOnce<>(); + private final SetOnce learningToRankService = new SetOnce<>(); private final SetOnce mlAutoscalingDeciderService = new SetOnce<>(); private final SetOnce deploymentManager = new SetOnce<>(); private final SetOnce trainedModelAllocationClusterServiceSetOnce = new SetOnce<>(); @@ -886,12 +886,12 @@ private static void reportClashingNodeAttribute(String attrName) { @Override public List> getRescorers() { - if (enabled && LearnToRankRescorerFeature.isEnabled()) { + if (enabled && LearningToRankRescorerFeature.isEnabled()) { return List.of( new RescorerSpec<>( - LearnToRankRescorerBuilder.NAME, - in -> new LearnToRankRescorerBuilder(in, learnToRankService.get()), - parser -> LearnToRankRescorerBuilder.fromXContent(parser, learnToRankService.get()) + LearningToRankRescorerBuilder.NAME, + in -> new LearningToRankRescorerBuilder(in, learningToRankService.get()), + parser -> LearningToRankRescorerBuilder.fromXContent(parser, learningToRankService.get()) ) ); } @@ -1120,8 +1120,8 @@ public Collection createComponents(PluginServices services) { ); this.modelLoadingService.set(modelLoadingService); - this.learnToRankService.set( - new LearnToRankService(modelLoadingService, trainedModelProvider, services.scriptService(), services.xContentRegistry()) + this.learningToRankService.set( + new LearningToRankService(modelLoadingService, trainedModelProvider, services.scriptService(), services.xContentRegistry()) ); this.deploymentManager.set( @@ -1797,7 +1797,7 @@ public List getNamedXContent() { ); namedXContent.addAll(new CorrelationNamedContentProvider().getNamedXContentParsers()); // LTR Combine with Inference named content provider when feature flag is removed - if (LearnToRankRescorerFeature.isEnabled()) { + if (LearningToRankRescorerFeature.isEnabled()) { namedXContent.addAll(new MlLTRNamedXContentProvider().getNamedXContentParsers()); } return namedXContent; @@ -1885,7 +1885,7 @@ public List getNamedWriteables() { namedWriteables.addAll(new CorrelationNamedContentProvider().getNamedWriteables()); namedWriteables.addAll(new ChangePointNamedContentProvider().getNamedWriteables()); // LTR Combine with Inference named content provider when feature flag is removed - if (LearnToRankRescorerFeature.isEnabled()) { + if (LearningToRankRescorerFeature.isEnabled()) { namedWriteables.addAll(new MlLTRNamedXContentProvider().getNamedWriteables()); } return namedWriteables; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java index a82beaf936573..e9b7a1a3e137b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java @@ -298,7 +298,7 @@ public void getModelForAggregation(String modelId, ActionListener mo * @param modelId the model to get * @param modelActionListener the listener to alert when the model has been retrieved */ - public void getModelForLearnToRank(String modelId, ActionListener modelActionListener) { + public void getModelForLearningToRank(String modelId, ActionListener modelActionListener) { getModel(modelId, Consumer.SEARCH_RESCORER, null, modelActionListener); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorer.java similarity index 94% rename from x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorer.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorer.java index dd1df7d8090d6..068462bcdfca2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorer.java @@ -32,17 +32,17 @@ import static java.util.stream.Collectors.toUnmodifiableSet; -public class LearnToRankRescorer implements Rescorer { +public class LearningToRankRescorer implements Rescorer { - public static final LearnToRankRescorer INSTANCE = new LearnToRankRescorer(); - private static final Logger logger = LogManager.getLogger(LearnToRankRescorer.class); + public static final LearningToRankRescorer INSTANCE = new LearningToRankRescorer(); + private static final Logger logger = LogManager.getLogger(LearningToRankRescorer.class); private static final Comparator SCORE_DOC_COMPARATOR = (o1, o2) -> { int cmp = Float.compare(o2.score, o1.score); return cmp == 0 ? Integer.compare(o1.doc, o2.doc) : cmp; }; - private LearnToRankRescorer() { + private LearningToRankRescorer() { } @@ -51,7 +51,7 @@ public TopDocs rescore(TopDocs topDocs, IndexSearcher searcher, RescoreContext r if (topDocs.scoreDocs.length == 0) { return topDocs; } - LearnToRankRescorerContext ltrRescoreContext = (LearnToRankRescorerContext) rescoreContext; + LearningToRankRescorerContext ltrRescoreContext = (LearningToRankRescorerContext) rescoreContext; if (ltrRescoreContext.regressionModelDefinition == null) { throw new IllegalStateException("local model reference is null, missing rewriteAndFetch before rescore phase?"); } @@ -104,7 +104,7 @@ public TopDocs rescore(TopDocs topDocs, IndexSearcher searcher, RescoreContext r for (int i = 0; i < hitsToRescore.length; i++) { Map features = docFeatures.get(i); try { - InferenceResults results = definition.inferLtr(features, ltrRescoreContext.learnToRankConfig); + InferenceResults results = definition.inferLtr(features, ltrRescoreContext.learningToRankConfig); if (results instanceof WarningInferenceResults warningInferenceResults) { logger.warn("Failure rescoring doc, warning returned [" + warningInferenceResults.getWarning() + "]"); } else if (results.predictedValue() instanceof Number prediction) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilder.java similarity index 57% rename from x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerBuilder.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilder.java index 49a082c9da6df..038f3fb08adbf 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilder.java @@ -22,7 +22,7 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearnToRankConfig; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearningToRankConfig; import org.elasticsearch.xpack.ml.inference.loadingservice.LocalModel; import java.io.IOException; @@ -30,9 +30,9 @@ import java.util.Objects; import java.util.function.Function; -public class LearnToRankRescorerBuilder extends RescorerBuilder { +public class LearningToRankRescorerBuilder extends RescorerBuilder { - public static final String NAME = "learn_to_rank"; + public static final String NAME = "learning_to_rank"; private static final ParseField MODEL_FIELD = new ParseField("model_id"); private static final ParseField PARAMS_FIELD = new ParseField("params"); private static final ObjectParser PARSER = new ObjectParser<>(NAME, false, Builder::new); @@ -42,56 +42,56 @@ public class LearnToRankRescorerBuilder extends RescorerBuilder p.map(), PARAMS_FIELD); } - public static LearnToRankRescorerBuilder fromXContent(XContentParser parser, LearnToRankService learnToRankService) { - return PARSER.apply(parser, null).build(learnToRankService); + public static LearningToRankRescorerBuilder fromXContent(XContentParser parser, LearningToRankService learningToRankService) { + return PARSER.apply(parser, null).build(learningToRankService); } private final String modelId; private final Map params; - private final LearnToRankService learnToRankService; + private final LearningToRankService learningToRankService; private final LocalModel localModel; - private final LearnToRankConfig learnToRankConfig; + private final LearningToRankConfig learningToRankConfig; private boolean rescoreOccurred = false; - LearnToRankRescorerBuilder(String modelId, Map params, LearnToRankService learnToRankService) { - this(modelId, null, params, learnToRankService); + LearningToRankRescorerBuilder(String modelId, Map params, LearningToRankService learningToRankService) { + this(modelId, null, params, learningToRankService); } - LearnToRankRescorerBuilder( + LearningToRankRescorerBuilder( String modelId, - LearnToRankConfig learnToRankConfig, + LearningToRankConfig learningToRankConfig, Map params, - LearnToRankService learnToRankService + LearningToRankService learningToRankService ) { this.modelId = modelId; this.params = params; - this.learnToRankConfig = learnToRankConfig; - this.learnToRankService = learnToRankService; + this.learningToRankConfig = learningToRankConfig; + this.learningToRankService = learningToRankService; // Local inference model is not loaded yet. Will be done in a later rewrite. this.localModel = null; } - LearnToRankRescorerBuilder( + LearningToRankRescorerBuilder( LocalModel localModel, - LearnToRankConfig learnToRankConfig, + LearningToRankConfig learningToRankConfig, Map params, - LearnToRankService learnToRankService + LearningToRankService learningToRankService ) { this.modelId = localModel.getModelId(); this.params = params; - this.learnToRankConfig = learnToRankConfig; + this.learningToRankConfig = learningToRankConfig; this.localModel = localModel; - this.learnToRankService = learnToRankService; + this.learningToRankService = learningToRankService; } - public LearnToRankRescorerBuilder(StreamInput input, LearnToRankService learnToRankService) throws IOException { + public LearningToRankRescorerBuilder(StreamInput input, LearningToRankService learningToRankService) throws IOException { super(input); this.modelId = input.readString(); this.params = input.readMap(); - this.learnToRankConfig = (LearnToRankConfig) input.readOptionalNamedWriteable(InferenceConfig.class); - this.learnToRankService = learnToRankService; + this.learningToRankConfig = (LearningToRankConfig) input.readOptionalNamedWriteable(InferenceConfig.class); + this.learningToRankService = learningToRankService; this.localModel = null; } @@ -104,12 +104,12 @@ public Map params() { return params; } - public LearnToRankConfig learnToRankConfig() { - return learnToRankConfig; + public LearningToRankConfig learningToRankConfig() { + return learningToRankConfig; } - public LearnToRankService learnToRankService() { - return learnToRankService; + public LearningToRankService learningToRankService() { + return learningToRankService; } public LocalModel localModel() { @@ -117,7 +117,7 @@ public LocalModel localModel() { } @Override - public RescorerBuilder rewrite(QueryRewriteContext ctx) throws IOException { + public RescorerBuilder rewrite(QueryRewriteContext ctx) throws IOException { if (ctx.convertToDataRewriteContext() != null) { return doDataNodeRewrite(ctx); } @@ -133,41 +133,46 @@ public RescorerBuilder rewrite(QueryRewriteContext c * This can and be done on the coordinator as it not only validates if the stored model is of the appropriate type, it allows * any stored logic to rewrite on the coordinator level if possible. * @param ctx QueryRewriteContext - * @return rewritten LearnToRankRescorerBuilder or self if no changes + * @return rewritten LearningToRankRescorerBuilder or self if no changes * @throws IOException when rewrite fails */ - private RescorerBuilder doCoordinatorNodeRewrite(QueryRewriteContext ctx) throws IOException { + private RescorerBuilder doCoordinatorNodeRewrite(QueryRewriteContext ctx) throws IOException { // We have requested for the stored config and fetch is completed, get the config and rewrite further if required - if (learnToRankConfig != null) { - LearnToRankConfig rewrittenConfig = Rewriteable.rewrite(learnToRankConfig, ctx); - if (rewrittenConfig == learnToRankConfig) { + if (learningToRankConfig != null) { + LearningToRankConfig rewrittenConfig = Rewriteable.rewrite(learningToRankConfig, ctx); + if (rewrittenConfig == learningToRankConfig) { return this; } - LearnToRankRescorerBuilder builder = new LearnToRankRescorerBuilder(modelId, rewrittenConfig, params, learnToRankService); + LearningToRankRescorerBuilder builder = new LearningToRankRescorerBuilder( + modelId, + rewrittenConfig, + params, + learningToRankService + ); if (windowSize != null) { builder.windowSize(windowSize); } return builder; } - if (learnToRankService == null) { - throw new IllegalStateException("Learn to rank service must be available"); + if (learningToRankService == null) { + throw new IllegalStateException("Learning to rank service must be available"); } - SetOnce configSetOnce = new SetOnce<>(); + SetOnce configSetOnce = new SetOnce<>(); GetTrainedModelsAction.Request request = new GetTrainedModelsAction.Request(modelId); request.setAllowNoResources(false); ctx.registerAsyncAction( - (c, l) -> learnToRankService.loadLearnToRankConfig(modelId, params, ActionListener.wrap(learnToRankConfig -> { - configSetOnce.set(learnToRankConfig); + (c, l) -> learningToRankService.loadLearningToRankConfig(modelId, params, ActionListener.wrap(learningToRankConfig -> { + configSetOnce.set(learningToRankConfig); l.onResponse(null); }, l::onFailure)) ); - LearnToRankRescorerBuilder builder = new RewritingLearnToRankRescorerBuilder( + LearningToRankRescorerBuilder builder = new RewritingLearningToRankRescorerBuilder( (rewritingBuilder) -> configSetOnce.get() == null ? rewritingBuilder - : new LearnToRankRescorerBuilder(modelId, configSetOnce.get(), params, learnToRankService) + : new LearningToRankRescorerBuilder(modelId, configSetOnce.get(), params, learningToRankService) ); if (windowSize() != null) { @@ -181,28 +186,28 @@ private RescorerBuilder doCoordinatorNodeRewrite(Que * @param ctx Rewrite context * @return A rewritten rescorer with a model definition or a model definition supplier populated */ - private RescorerBuilder doDataNodeRewrite(QueryRewriteContext ctx) throws IOException { - assert learnToRankConfig != null; + private RescorerBuilder doDataNodeRewrite(QueryRewriteContext ctx) throws IOException { + assert learningToRankConfig != null; // The model is already loaded, no need to rewrite further. if (localModel != null) { return this; } - if (learnToRankService == null) { - throw new IllegalStateException("Learn to rank service must be available"); + if (learningToRankService == null) { + throw new IllegalStateException("Learning to rank service must be available"); } - LearnToRankConfig rewrittenConfig = Rewriteable.rewrite(learnToRankConfig, ctx); + LearningToRankConfig rewrittenConfig = Rewriteable.rewrite(learningToRankConfig, ctx); SetOnce localModelSetOnce = new SetOnce<>(); - ctx.registerAsyncAction((c, l) -> learnToRankService.loadLocalModel(modelId, ActionListener.wrap(lm -> { + ctx.registerAsyncAction((c, l) -> learningToRankService.loadLocalModel(modelId, ActionListener.wrap(lm -> { localModelSetOnce.set(lm); l.onResponse(null); }, l::onFailure))); - LearnToRankRescorerBuilder builder = new RewritingLearnToRankRescorerBuilder( + LearningToRankRescorerBuilder builder = new RewritingLearningToRankRescorerBuilder( (rewritingBuilder) -> localModelSetOnce.get() != null - ? new LearnToRankRescorerBuilder(localModelSetOnce.get(), rewrittenConfig, params, learnToRankService) + ? new LearningToRankRescorerBuilder(localModelSetOnce.get(), rewrittenConfig, params, learningToRankService) : rewritingBuilder ); @@ -218,15 +223,20 @@ private RescorerBuilder doDataNodeRewrite(QueryRewri * @return A rewritten rescorer with a model definition or a model definition supplier populated * @throws IOException If fetching, parsing, or overall rewrite failures occur */ - private RescorerBuilder doSearchRewrite(QueryRewriteContext ctx) throws IOException { - if (learnToRankConfig == null) { + private RescorerBuilder doSearchRewrite(QueryRewriteContext ctx) throws IOException { + if (learningToRankConfig == null) { return this; } - LearnToRankConfig rewrittenConfig = Rewriteable.rewrite(learnToRankConfig, ctx); - if (rewrittenConfig == learnToRankConfig) { + LearningToRankConfig rewrittenConfig = Rewriteable.rewrite(learningToRankConfig, ctx); + if (rewrittenConfig == learningToRankConfig) { return this; } - LearnToRankRescorerBuilder builder = new LearnToRankRescorerBuilder(localModel, rewrittenConfig, params, learnToRankService); + LearningToRankRescorerBuilder builder = new LearningToRankRescorerBuilder( + localModel, + rewrittenConfig, + params, + learningToRankService + ); if (windowSize != null) { builder.windowSize(windowSize); } @@ -234,9 +244,9 @@ private RescorerBuilder doSearchRewrite(QueryRewrite } @Override - protected LearnToRankRescorerContext innerBuildContext(int windowSize, SearchExecutionContext context) { + protected LearningToRankRescorerContext innerBuildContext(int windowSize, SearchExecutionContext context) { rescoreOccurred = true; - return new LearnToRankRescorerContext(windowSize, LearnToRankRescorer.INSTANCE, learnToRankConfig, localModel, context); + return new LearningToRankRescorerContext(windowSize, LearningToRankRescorer.INSTANCE, learningToRankConfig, localModel, context); } @Override @@ -255,7 +265,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { assert localModel == null || rescoreOccurred : "Unnecessarily populated local model object"; out.writeString(modelId); out.writeGenericMap(params); - out.writeOptionalNamedWriteable(learnToRankConfig); + out.writeOptionalNamedWriteable(learningToRankConfig); } @Override @@ -273,19 +283,19 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; if (super.equals(o) == false) return false; - LearnToRankRescorerBuilder that = (LearnToRankRescorerBuilder) o; + LearningToRankRescorerBuilder that = (LearningToRankRescorerBuilder) o; return Objects.equals(modelId, that.modelId) && Objects.equals(params, that.params) - && Objects.equals(learnToRankConfig, that.learnToRankConfig) + && Objects.equals(learningToRankConfig, that.learningToRankConfig) && Objects.equals(localModel, that.localModel) - && Objects.equals(learnToRankService, that.learnToRankService) + && Objects.equals(learningToRankService, that.learningToRankService) && rescoreOccurred == that.rescoreOccurred; } @Override public int hashCode() { - return Objects.hash(super.hashCode(), modelId, params, learnToRankConfig, localModel, learnToRankService, rescoreOccurred); + return Objects.hash(super.hashCode(), modelId, params, learningToRankConfig, localModel, learningToRankService, rescoreOccurred); } static class Builder { @@ -300,23 +310,25 @@ public void setParams(Map params) { this.params = params; } - LearnToRankRescorerBuilder build(LearnToRankService learnToRankService) { - return new LearnToRankRescorerBuilder(modelId, params, learnToRankService); + LearningToRankRescorerBuilder build(LearningToRankService learningToRankService) { + return new LearningToRankRescorerBuilder(modelId, params, learningToRankService); } } - private static class RewritingLearnToRankRescorerBuilder extends LearnToRankRescorerBuilder { + private static class RewritingLearningToRankRescorerBuilder extends LearningToRankRescorerBuilder { - private final Function rewriteFunction; + private final Function rewriteFunction; - RewritingLearnToRankRescorerBuilder(Function rewriteFunction) { + RewritingLearningToRankRescorerBuilder( + Function rewriteFunction + ) { super(null, null, null); this.rewriteFunction = rewriteFunction; } @Override - public RescorerBuilder rewrite(QueryRewriteContext ctx) throws IOException { - LearnToRankRescorerBuilder builder = this.rewriteFunction.apply(this); + public RescorerBuilder rewrite(QueryRewriteContext ctx) throws IOException { + LearningToRankRescorerBuilder builder = this.rewriteFunction.apply(this); if (windowSize() != null) { builder.windowSize(windowSize()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerContext.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerContext.java similarity index 81% rename from x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerContext.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerContext.java index 844f96208cb35..b1df3a2da7c42 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerContext.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerContext.java @@ -15,8 +15,8 @@ import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.rescore.RescoreContext; import org.elasticsearch.search.rescore.Rescorer; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearnToRankConfig; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearnToRankFeatureExtractorBuilder; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearningToRankConfig; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearningToRankFeatureExtractorBuilder; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.QueryExtractorBuilder; import org.elasticsearch.xpack.ml.inference.loadingservice.LocalModel; @@ -24,34 +24,34 @@ import java.util.ArrayList; import java.util.List; -public class LearnToRankRescorerContext extends RescoreContext { +public class LearningToRankRescorerContext extends RescoreContext { final SearchExecutionContext executionContext; final LocalModel regressionModelDefinition; - final LearnToRankConfig learnToRankConfig; + final LearningToRankConfig learningToRankConfig; /** * @param windowSize how many documents to rescore * @param rescorer The rescorer to apply - * @param learnToRankConfig The inference config containing updated and rewritten parameters + * @param learningToRankConfig The inference config containing updated and rewritten parameters * @param regressionModelDefinition The local model inference definition, may be null during certain search phases. * @param executionContext The local shard search context */ - public LearnToRankRescorerContext( + public LearningToRankRescorerContext( int windowSize, Rescorer rescorer, - LearnToRankConfig learnToRankConfig, + LearningToRankConfig learningToRankConfig, LocalModel regressionModelDefinition, SearchExecutionContext executionContext ) { super(windowSize, rescorer); this.executionContext = executionContext; this.regressionModelDefinition = regressionModelDefinition; - this.learnToRankConfig = learnToRankConfig; + this.learningToRankConfig = learningToRankConfig; } List buildFeatureExtractors(IndexSearcher searcher) throws IOException { - assert this.regressionModelDefinition != null && this.learnToRankConfig != null; + assert this.regressionModelDefinition != null && this.learningToRankConfig != null; List featureExtractors = new ArrayList<>(); if (this.regressionModelDefinition.inputFields().isEmpty() == false) { featureExtractors.add( @@ -60,7 +60,7 @@ List buildFeatureExtractors(IndexSearcher searcher) throws IOE } List weights = new ArrayList<>(); List queryFeatureNames = new ArrayList<>(); - for (LearnToRankFeatureExtractorBuilder featureExtractorBuilder : learnToRankConfig.getFeatureExtractorBuilders()) { + for (LearningToRankFeatureExtractorBuilder featureExtractorBuilder : learningToRankConfig.getFeatureExtractorBuilders()) { if (featureExtractorBuilder instanceof QueryExtractorBuilder queryExtractorBuilder) { Query query = executionContext.toQuery(queryExtractorBuilder.query().getParsedQuery()).query(); Weight weight = searcher.rewrite(query).createWeight(searcher, ScoreMode.COMPLETE, 1f); @@ -77,11 +77,11 @@ List buildFeatureExtractors(IndexSearcher searcher) throws IOE @Override public List getParsedQueries() { - if (this.learnToRankConfig == null) { + if (this.learningToRankConfig == null) { return List.of(); } List parsedQueries = new ArrayList<>(); - for (LearnToRankFeatureExtractorBuilder featureExtractorBuilder : learnToRankConfig.getFeatureExtractorBuilders()) { + for (LearningToRankFeatureExtractorBuilder featureExtractorBuilder : learningToRankConfig.getFeatureExtractorBuilders()) { if (featureExtractorBuilder instanceof QueryExtractorBuilder queryExtractorBuilder) { parsedQueries.add(executionContext.toQuery(queryExtractorBuilder.query().getParsedQuery())); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerFeature.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerFeature.java similarity index 57% rename from x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerFeature.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerFeature.java index 18b2c6fe5ff3f..42598691beec2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerFeature.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerFeature.java @@ -10,19 +10,19 @@ import org.elasticsearch.common.util.FeatureFlag; /** - * Learn to rank feature flag. When the feature is complete, this flag will be removed. + * Learning to rank feature flag. When the feature is complete, this flag will be removed. * * Upon removal, ensure transport serialization is all corrected for future BWC. * - * See {@link LearnToRankRescorerBuilder} + * See {@link LearningToRankRescorerBuilder} */ -public class LearnToRankRescorerFeature { +public class LearningToRankRescorerFeature { - private LearnToRankRescorerFeature() {} + private LearningToRankRescorerFeature() {} - private static final FeatureFlag LEARN_TO_RANK = new FeatureFlag("learn_to_rank"); + private static final FeatureFlag LEARNING_TO_RANK = new FeatureFlag("learning_to_rank"); public static boolean isEnabled() { - return LEARN_TO_RANK.isEnabled(); + return LEARNING_TO_RANK.isEnabled(); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankService.java similarity index 86% rename from x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankService.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankService.java index 2f85000705d8a..177099801e0a5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankService.java @@ -26,8 +26,8 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearnToRankConfig; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearnToRankFeatureExtractorBuilder; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearningToRankConfig; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearningToRankFeatureExtractorBuilder; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.QueryExtractorBuilder; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -51,7 +51,7 @@ import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; import static org.elasticsearch.xpack.core.ml.job.messages.Messages.INFERENCE_CONFIG_QUERY_BAD_FORMAT; -public class LearnToRankService { +public class LearningToRankService { private static final Map SCRIPT_OPTIONS = Map.ofEntries( entry(MustacheScriptEngine.DETECT_MISSING_PARAMS_OPTION, Boolean.TRUE.toString()) ); @@ -60,7 +60,7 @@ public class LearnToRankService { private final ScriptService scriptService; private final XContentParserConfiguration parserConfiguration; - public LearnToRankService( + public LearningToRankService( ModelLoadingService modelLoadingService, TrainedModelProvider trainedModelProvider, ScriptService scriptService, @@ -69,7 +69,7 @@ public LearnToRankService( this(modelLoadingService, trainedModelProvider, scriptService, XContentParserConfiguration.EMPTY.withRegistry(xContentRegistry)); } - LearnToRankService( + LearningToRankService( ModelLoadingService modelLoadingService, TrainedModelProvider trainedModelProvider, ScriptService scriptService, @@ -82,30 +82,30 @@ public LearnToRankService( } /** - * Asynchronously load a regression model to be used for learn to rank. + * Asynchronously load a regression model to be used for learning to rank. * * @param modelId The model id to be loaded. * @param listener Response listener. */ public void loadLocalModel(String modelId, ActionListener listener) { - modelLoadingService.getModelForLearnToRank(modelId, listener); + modelLoadingService.getModelForLearningToRank(modelId, listener); } /** - * Asynchronously load the learn to rank config by model id. + * Asynchronously load the learning to rank config by model id. * Once the model is loaded, templates are executed using params provided. * * @param modelId Id of the model. * @param params Templates params. * @param listener Response listener. */ - public void loadLearnToRankConfig(String modelId, Map params, ActionListener listener) { + public void loadLearningToRankConfig(String modelId, Map params, ActionListener listener) { trainedModelProvider.getTrainedModel( modelId, GetTrainedModelsAction.Includes.all(), null, ActionListener.wrap(trainedModelConfig -> { - if (trainedModelConfig.getInferenceConfig() instanceof LearnToRankConfig retrievedInferenceConfig) { + if (trainedModelConfig.getInferenceConfig() instanceof LearningToRankConfig retrievedInferenceConfig) { listener.onResponse(applyParams(retrievedInferenceConfig, params)); return; } @@ -114,7 +114,7 @@ public void loadLearnToRankConfig(String modelId, Map params, Ac Messages.getMessage( Messages.INFERENCE_CONFIG_INCORRECT_TYPE, Optional.ofNullable(trainedModelConfig.getInferenceConfig()).map(InferenceConfig::getName).orElse("null"), - LearnToRankConfig.NAME.getPreferredName() + LearningToRankConfig.NAME.getPreferredName() ) ) ); @@ -123,29 +123,29 @@ public void loadLearnToRankConfig(String modelId, Map params, Ac } /** - * Applies templates params to a {@link LearnToRankConfig} object. + * Applies templates params to a {@link LearningToRankConfig} object. * * @param config Original config. * @param params Templates params. - * @return A {@link LearnToRankConfig} object with templates applied. + * @return A {@link LearningToRankConfig} object with templates applied. * * @throws IOException */ - private LearnToRankConfig applyParams(LearnToRankConfig config, Map params) throws Exception { + private LearningToRankConfig applyParams(LearningToRankConfig config, Map params) throws Exception { if (scriptService.isLangSupported(DEFAULT_TEMPLATE_LANG) == false) { return config; } - List featureExtractorBuilders = new ArrayList<>(); + List featureExtractorBuilders = new ArrayList<>(); Map mergedParams = new HashMap<>(Objects.requireNonNullElse(params, Map.of())); mergeDefaults(mergedParams, config.getParamsDefaults()); - for (LearnToRankFeatureExtractorBuilder featureExtractorBuilder : config.getFeatureExtractorBuilders()) { + for (LearningToRankFeatureExtractorBuilder featureExtractorBuilder : config.getFeatureExtractorBuilders()) { featureExtractorBuilders.add(applyParams(featureExtractorBuilder, mergedParams)); } - return LearnToRankConfig.builder(config).setLearnToRankFeatureExtractorBuilders(featureExtractorBuilders).build(); + return LearningToRankConfig.builder(config).setLearningToRankFeatureExtractorBuilders(featureExtractorBuilders).build(); } /** @@ -157,8 +157,8 @@ private LearnToRankConfig applyParams(LearnToRankConfig config, Map params ) throws Exception { if (featureExtractorBuilder instanceof QueryExtractorBuilder queryExtractorBuilder) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerBuilderRewriteTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilderRewriteTests.java similarity index 66% rename from x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerBuilderRewriteTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilderRewriteTests.java index 5939d012831aa..3bfe8aa390d8b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerBuilderRewriteTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilderRewriteTests.java @@ -26,17 +26,17 @@ import org.elasticsearch.search.rescore.RescorerBuilder; import org.elasticsearch.test.AbstractBuilderTestCase; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearnToRankConfig; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearnToRankFeatureExtractorBuilder; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearningToRankConfig; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearningToRankFeatureExtractorBuilder; import org.elasticsearch.xpack.ml.inference.loadingservice.LocalModel; import java.io.IOException; import java.util.List; -import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearnToRankConfigTests.randomLearnToRankConfig; -import static org.elasticsearch.xpack.ml.inference.ltr.LearnToRankServiceTests.BAD_MODEL; -import static org.elasticsearch.xpack.ml.inference.ltr.LearnToRankServiceTests.GOOD_MODEL; -import static org.elasticsearch.xpack.ml.inference.ltr.LearnToRankServiceTests.GOOD_MODEL_CONFIG; +import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearningToRankConfigTests.randomLearningToRankConfig; +import static org.elasticsearch.xpack.ml.inference.ltr.LearningToRankServiceTests.BAD_MODEL; +import static org.elasticsearch.xpack.ml.inference.ltr.LearningToRankServiceTests.GOOD_MODEL; +import static org.elasticsearch.xpack.ml.inference.ltr.LearningToRankServiceTests.GOOD_MODEL_CONFIG; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; @@ -50,19 +50,19 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -public class LearnToRankRescorerBuilderRewriteTests extends AbstractBuilderTestCase { +public class LearningToRankRescorerBuilderRewriteTests extends AbstractBuilderTestCase { public void testMustRewrite() { - LearnToRankService learnToRankService = learnToRankServiceMock(); - LearnToRankRescorerBuilder rescorerBuilder = new LearnToRankRescorerBuilder( + LearningToRankService learningToRankService = learningToRankServiceMock(); + LearningToRankRescorerBuilder rescorerBuilder = new LearningToRankRescorerBuilder( GOOD_MODEL, - randomLearnToRankConfig(), + randomLearningToRankConfig(), null, - learnToRankService + learningToRankService ); SearchExecutionContext context = createSearchExecutionContext(); - LearnToRankRescorerContext rescorerContext = rescorerBuilder.innerBuildContext(randomIntBetween(1, 30), context); + LearningToRankRescorerContext rescorerContext = rescorerBuilder.innerBuildContext(randomIntBetween(1, 30), context); IllegalStateException e = expectThrows( IllegalStateException.class, () -> rescorerContext.rescorer() @@ -76,25 +76,25 @@ public void testMustRewrite() { } public void testRewriteOnCoordinator() throws IOException { - LearnToRankService learnToRankService = learnToRankServiceMock(); - LearnToRankRescorerBuilder rescorerBuilder = new LearnToRankRescorerBuilder(GOOD_MODEL, null, learnToRankService); + LearningToRankService learningToRankService = learningToRankServiceMock(); + LearningToRankRescorerBuilder rescorerBuilder = new LearningToRankRescorerBuilder(GOOD_MODEL, null, learningToRankService); rescorerBuilder.windowSize(4); CoordinatorRewriteContext context = createCoordinatorRewriteContext( new DateFieldMapper.DateFieldType("@timestamp"), randomIntBetween(0, 1_100_000), randomIntBetween(1_500_000, Integer.MAX_VALUE) ); - LearnToRankRescorerBuilder rewritten = rewriteAndFetch(rescorerBuilder, context); - assertThat(rewritten.learnToRankConfig(), not(nullValue())); - assertThat(rewritten.learnToRankConfig().getNumTopFeatureImportanceValues(), equalTo(2)); + LearningToRankRescorerBuilder rewritten = rewriteAndFetch(rescorerBuilder, context); + assertThat(rewritten.learningToRankConfig(), not(nullValue())); + assertThat(rewritten.learningToRankConfig().getNumTopFeatureImportanceValues(), equalTo(2)); assertThat( "feature_1", is( in( - rewritten.learnToRankConfig() + rewritten.learningToRankConfig() .getFeatureExtractorBuilders() .stream() - .map(LearnToRankFeatureExtractorBuilder::featureName) + .map(LearningToRankFeatureExtractorBuilder::featureName) .toList() ) ) @@ -103,8 +103,8 @@ public void testRewriteOnCoordinator() throws IOException { } public void testRewriteOnCoordinatorWithBadModel() throws IOException { - LearnToRankService learnToRankService = learnToRankServiceMock(); - LearnToRankRescorerBuilder rescorerBuilder = new LearnToRankRescorerBuilder(BAD_MODEL, null, learnToRankService); + LearningToRankService learningToRankService = learningToRankServiceMock(); + LearningToRankRescorerBuilder rescorerBuilder = new LearningToRankRescorerBuilder(BAD_MODEL, null, learningToRankService); CoordinatorRewriteContext context = createCoordinatorRewriteContext( new DateFieldMapper.DateFieldType("@timestamp"), randomIntBetween(0, 1_100_000), @@ -115,8 +115,8 @@ public void testRewriteOnCoordinatorWithBadModel() throws IOException { } public void testRewriteOnCoordinatorWithMissingModel() { - LearnToRankService learnToRankService = learnToRankServiceMock(); - LearnToRankRescorerBuilder rescorerBuilder = new LearnToRankRescorerBuilder("missing_model", null, learnToRankService); + LearningToRankService learningToRankService = learningToRankServiceMock(); + LearningToRankRescorerBuilder rescorerBuilder = new LearningToRankRescorerBuilder("missing_model", null, learningToRankService); CoordinatorRewriteContext context = createCoordinatorRewriteContext( new DateFieldMapper.DateFieldType("@timestamp"), randomIntBetween(0, 1_100_000), @@ -129,27 +129,27 @@ public void testRewriteOnShard() throws IOException { LocalModel localModel = mock(LocalModel.class); when(localModel.getModelId()).thenReturn(GOOD_MODEL); - LearnToRankService learnToRankService = learnToRankServiceMock(); - LearnToRankRescorerBuilder rescorerBuilder = new LearnToRankRescorerBuilder( + LearningToRankService learningToRankService = learningToRankServiceMock(); + LearningToRankRescorerBuilder rescorerBuilder = new LearningToRankRescorerBuilder( localModel, - (LearnToRankConfig) GOOD_MODEL_CONFIG.getInferenceConfig(), + (LearningToRankConfig) GOOD_MODEL_CONFIG.getInferenceConfig(), null, - learnToRankService + learningToRankService ); SearchExecutionContext searchExecutionContext = createSearchExecutionContext(); - LearnToRankRescorerBuilder rewritten = (LearnToRankRescorerBuilder) rescorerBuilder.rewrite(createSearchExecutionContext()); + LearningToRankRescorerBuilder rewritten = (LearningToRankRescorerBuilder) rescorerBuilder.rewrite(createSearchExecutionContext()); assertFalse(searchExecutionContext.hasAsyncActions()); assertSame(localModel, rewritten.localModel()); assertEquals(localModel.getModelId(), rewritten.modelId()); } public void testRewriteAndFetchOnDataNode() throws IOException { - LearnToRankService learnToRankService = learnToRankServiceMock(); - LearnToRankRescorerBuilder rescorerBuilder = new LearnToRankRescorerBuilder( + LearningToRankService learningToRankService = learningToRankServiceMock(); + LearningToRankRescorerBuilder rescorerBuilder = new LearningToRankRescorerBuilder( GOOD_MODEL, - randomLearnToRankConfig(), + randomLearningToRankConfig(), null, - learnToRankService + learningToRankService ); boolean setWindowSize = randomBoolean(); @@ -157,7 +157,7 @@ public void testRewriteAndFetchOnDataNode() throws IOException { rescorerBuilder.windowSize(42); } DataRewriteContext rewriteContext = dataRewriteContext(); - LearnToRankRescorerBuilder rewritten = (LearnToRankRescorerBuilder) rescorerBuilder.rewrite(rewriteContext); + LearningToRankRescorerBuilder rewritten = (LearningToRankRescorerBuilder) rescorerBuilder.rewrite(rewriteContext); assertNotSame(rescorerBuilder, rewritten); assertTrue(rewriteContext.hasAsyncActions()); if (setWindowSize) { @@ -166,8 +166,8 @@ public void testRewriteAndFetchOnDataNode() throws IOException { } @SuppressWarnings("unchecked") - private static LearnToRankService learnToRankServiceMock() { - LearnToRankService learnToRankService = mock(LearnToRankService.class); + private static LearningToRankService learningToRankServiceMock() { + LearningToRankService learningToRankService = mock(LearningToRankService.class); doAnswer(invocation -> { String modelId = invocation.getArgument(0); @@ -180,15 +180,15 @@ private static LearnToRankService learnToRankServiceMock() { l.onFailure(new ResourceNotFoundException("missing model")); } return null; - }).when(learnToRankService).loadLearnToRankConfig(anyString(), any(), any()); + }).when(learningToRankService).loadLearningToRankConfig(anyString(), any(), any()); doAnswer(invocation -> { ActionListener l = invocation.getArgument(1, ActionListener.class); l.onResponse(mock(LocalModel.class)); return null; - }).when(learnToRankService).loadLocalModel(anyString(), any()); + }).when(learningToRankService).loadLocalModel(anyString(), any()); - return learnToRankService; + return learningToRankService; } public void testBuildContext() throws Exception { @@ -200,14 +200,14 @@ public void testBuildContext() throws Exception { doAnswer(invocation -> invocation.getArgument(0)).when(searcher).rewrite(any(Query.class)); SearchExecutionContext context = createSearchExecutionContext(searcher); - LearnToRankRescorerBuilder rescorerBuilder = new LearnToRankRescorerBuilder( + LearningToRankRescorerBuilder rescorerBuilder = new LearningToRankRescorerBuilder( localModel, - (LearnToRankConfig) GOOD_MODEL_CONFIG.getInferenceConfig(), + (LearningToRankConfig) GOOD_MODEL_CONFIG.getInferenceConfig(), null, - mock(LearnToRankService.class) + mock(LearningToRankService.class) ); - LearnToRankRescorerContext rescoreContext = rescorerBuilder.innerBuildContext(20, context); + LearningToRankRescorerContext rescoreContext = rescorerBuilder.innerBuildContext(20, context); assertNotNull(rescoreContext); assertThat(rescoreContext.getWindowSize(), equalTo(20)); List featureExtractors = rescoreContext.buildFeatureExtractors(context.searcher()); @@ -218,9 +218,12 @@ public void testBuildContext() throws Exception { ); } - private LearnToRankRescorerBuilder rewriteAndFetch(RescorerBuilder builder, QueryRewriteContext context) { - PlainActionFuture> future = new PlainActionFuture<>(); + private LearningToRankRescorerBuilder rewriteAndFetch( + RescorerBuilder builder, + QueryRewriteContext context + ) { + PlainActionFuture> future = new PlainActionFuture<>(); Rewriteable.rewriteAndFetch(builder, context, future); - return (LearnToRankRescorerBuilder) future.actionGet(); + return (LearningToRankRescorerBuilder) future.actionGet(); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerBuilderSerializationTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilderSerializationTests.java similarity index 71% rename from x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerBuilderSerializationTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilderSerializationTests.java index 4f6e5a0512f73..79044a465442b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerBuilderSerializationTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilderSerializationTests.java @@ -19,7 +19,7 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearnToRankConfig; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearningToRankConfig; import org.elasticsearch.xpack.core.ml.ltr.MlLTRNamedXContentProvider; import org.elasticsearch.xpack.ml.inference.loadingservice.LocalModel; @@ -30,18 +30,18 @@ import java.util.Map; import static org.elasticsearch.search.rank.RankBuilder.WINDOW_SIZE_FIELD; -import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearnToRankConfigTests.randomLearnToRankConfig; +import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearningToRankConfigTests.randomLearningToRankConfig; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -public class LearnToRankRescorerBuilderSerializationTests extends AbstractBWCSerializationTestCase { +public class LearningToRankRescorerBuilderSerializationTests extends AbstractBWCSerializationTestCase { - private static LearnToRankService learnToRankService = mock(LearnToRankService.class); + private static LearningToRankService learningToRankService = mock(LearningToRankService.class); @Override - protected LearnToRankRescorerBuilder doParseInstance(XContentParser parser) throws IOException { + protected LearningToRankRescorerBuilder doParseInstance(XContentParser parser) throws IOException { String fieldName = null; - LearnToRankRescorerBuilder rescorer = null; + LearningToRankRescorerBuilder rescorer = null; Integer windowSize = null; XContentParser.Token token = parser.nextToken(); assert token == XContentParser.Token.START_OBJECT; @@ -55,7 +55,7 @@ protected LearnToRankRescorerBuilder doParseInstance(XContentParser parser) thro throw new ParsingException(parser.getTokenLocation(), "rescore doesn't support [" + fieldName + "]"); } } else if (token == XContentParser.Token.START_OBJECT) { - rescorer = LearnToRankRescorerBuilder.fromXContent(parser, learnToRankService); + rescorer = LearningToRankRescorerBuilder.fromXContent(parser, learningToRankService); } else { throw new ParsingException(parser.getTokenLocation(), "unexpected token [" + token + "] after [" + fieldName + "]"); } @@ -70,19 +70,19 @@ protected LearnToRankRescorerBuilder doParseInstance(XContentParser parser) thro } @Override - protected Writeable.Reader instanceReader() { - return in -> new LearnToRankRescorerBuilder(in, learnToRankService); + protected Writeable.Reader instanceReader() { + return in -> new LearningToRankRescorerBuilder(in, learningToRankService); } @Override - protected LearnToRankRescorerBuilder createTestInstance() { - LearnToRankRescorerBuilder builder = randomBoolean() + protected LearningToRankRescorerBuilder createTestInstance() { + LearningToRankRescorerBuilder builder = randomBoolean() ? createXContextTestInstance(null) - : new LearnToRankRescorerBuilder( + : new LearningToRankRescorerBuilder( randomAlphaOfLength(10), - randomLearnToRankConfig(), + randomLearningToRankConfig(), randomBoolean() ? randomParams() : null, - learnToRankService + learningToRankService ); if (randomBoolean()) { @@ -93,34 +93,34 @@ protected LearnToRankRescorerBuilder createTestInstance() { } @Override - protected LearnToRankRescorerBuilder createXContextTestInstance(XContentType xContentType) { - return new LearnToRankRescorerBuilder(randomAlphaOfLength(10), randomBoolean() ? randomParams() : null, learnToRankService); + protected LearningToRankRescorerBuilder createXContextTestInstance(XContentType xContentType) { + return new LearningToRankRescorerBuilder(randomAlphaOfLength(10), randomBoolean() ? randomParams() : null, learningToRankService); } @Override - protected LearnToRankRescorerBuilder mutateInstance(LearnToRankRescorerBuilder instance) throws IOException { + protected LearningToRankRescorerBuilder mutateInstance(LearningToRankRescorerBuilder instance) throws IOException { int i = randomInt(4); return switch (i) { case 0 -> { - LearnToRankRescorerBuilder builder = new LearnToRankRescorerBuilder( + LearningToRankRescorerBuilder builder = new LearningToRankRescorerBuilder( randomValueOtherThan(instance.modelId(), () -> randomAlphaOfLength(10)), instance.params(), - learnToRankService + learningToRankService ); if (instance.windowSize() != null) { builder.windowSize(instance.windowSize()); } yield builder; } - case 1 -> new LearnToRankRescorerBuilder(instance.modelId(), instance.params(), learnToRankService).windowSize( + case 1 -> new LearningToRankRescorerBuilder(instance.modelId(), instance.params(), learningToRankService).windowSize( randomValueOtherThan(instance.windowSize(), () -> randomIntBetween(1, 10000)) ); case 2 -> { - LearnToRankRescorerBuilder builder = new LearnToRankRescorerBuilder( + LearningToRankRescorerBuilder builder = new LearningToRankRescorerBuilder( instance.modelId(), randomValueOtherThan(instance.params(), () -> (randomBoolean() ? randomParams() : null)), - learnToRankService + learningToRankService ); if (instance.windowSize() != null) { builder.windowSize(instance.windowSize() + 1); @@ -128,12 +128,15 @@ protected LearnToRankRescorerBuilder mutateInstance(LearnToRankRescorerBuilder i yield builder; } case 3 -> { - LearnToRankConfig learnToRankConfig = randomValueOtherThan(instance.learnToRankConfig(), () -> randomLearnToRankConfig()); - LearnToRankRescorerBuilder builder = new LearnToRankRescorerBuilder( + LearningToRankConfig learningToRankConfig = randomValueOtherThan( + instance.learningToRankConfig(), + () -> randomLearningToRankConfig() + ); + LearningToRankRescorerBuilder builder = new LearningToRankRescorerBuilder( instance.modelId(), - learnToRankConfig, + learningToRankConfig, null, - learnToRankService + learningToRankService ); if (instance.windowSize() != null) { builder.windowSize(instance.windowSize()); @@ -141,11 +144,11 @@ protected LearnToRankRescorerBuilder mutateInstance(LearnToRankRescorerBuilder i yield builder; } case 4 -> { - LearnToRankRescorerBuilder builder = new LearnToRankRescorerBuilder( + LearningToRankRescorerBuilder builder = new LearningToRankRescorerBuilder( mock(LocalModel.class), - instance.learnToRankConfig(), + instance.learningToRankConfig(), instance.params(), - learnToRankService + learningToRankService ); if (instance.windowSize() != null) { builder.windowSize(instance.windowSize()); @@ -157,7 +160,7 @@ protected LearnToRankRescorerBuilder mutateInstance(LearnToRankRescorerBuilder i } @Override - protected LearnToRankRescorerBuilder mutateInstanceForVersion(LearnToRankRescorerBuilder instance, TransportVersion version) { + protected LearningToRankRescorerBuilder mutateInstanceForVersion(LearningToRankRescorerBuilder instance, TransportVersion version) { return instance; } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankServiceTests.java similarity index 77% rename from x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankServiceTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankServiceTests.java index cbe91ba874e6d..39d0af9041d03 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankServiceTests.java @@ -24,7 +24,7 @@ import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelInput; import org.elasticsearch.xpack.core.ml.inference.TrainedModelType; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearnToRankConfig; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearningToRankConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.RegressionConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.QueryExtractorBuilder; import org.elasticsearch.xpack.core.ml.ltr.MlLTRNamedXContentProvider; @@ -49,7 +49,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; -public class LearnToRankServiceTests extends ESTestCase { +public class LearningToRankServiceTests extends ESTestCase { public static final String GOOD_MODEL = "modelId"; public static final String BAD_MODEL = "badModel"; public static final TrainedModelConfig GOOD_MODEL_CONFIG = TrainedModelConfig.builder() @@ -59,7 +59,7 @@ public class LearnToRankServiceTests extends ESTestCase { .setModelSize(2) .setModelType(TrainedModelType.TREE_ENSEMBLE) .setInferenceConfig( - new LearnToRankConfig( + new LearningToRankConfig( 2, List.of( new QueryExtractorBuilder("feature_1", QueryProviderTests.createTestQueryProvider("field_1", "foo")), @@ -79,44 +79,44 @@ public class LearnToRankServiceTests extends ESTestCase { .build(); @SuppressWarnings("unchecked") - public void testLoadLearnToRankConfig() throws Exception { - LearnToRankService learnToRankService = getTestLearnToRankService(); - ActionListener listener = mock(ActionListener.class); - learnToRankService.loadLearnToRankConfig(GOOD_MODEL, Map.of(), listener); + public void testLoadLearningToRankConfig() throws Exception { + LearningToRankService learningToRankService = getTestLearningToRankService(); + ActionListener listener = mock(ActionListener.class); + learningToRankService.loadLearningToRankConfig(GOOD_MODEL, Map.of(), listener); - verify(listener).onResponse(eq((LearnToRankConfig) GOOD_MODEL_CONFIG.getInferenceConfig())); + verify(listener).onResponse(eq((LearningToRankConfig) GOOD_MODEL_CONFIG.getInferenceConfig())); } @SuppressWarnings("unchecked") - public void testLoadMissingLearnToRankConfig() throws Exception { - LearnToRankService learnToRankService = getTestLearnToRankService(); - ActionListener listener = mock(ActionListener.class); - learnToRankService.loadLearnToRankConfig("non-existing-model", Map.of(), listener); + public void testLoadMissingLearningToRankConfig() throws Exception { + LearningToRankService learningToRankService = getTestLearningToRankService(); + ActionListener listener = mock(ActionListener.class); + learningToRankService.loadLearningToRankConfig("non-existing-model", Map.of(), listener); verify(listener).onFailure(isA(ResourceNotFoundException.class)); } @SuppressWarnings("unchecked") - public void testLoadBadLearnToRankConfig() throws Exception { - LearnToRankService learnToRankService = getTestLearnToRankService(); - ActionListener listener = mock(ActionListener.class); - learnToRankService.loadLearnToRankConfig(BAD_MODEL, Map.of(), listener); + public void testLoadBadLearningToRankConfig() throws Exception { + LearningToRankService learningToRankService = getTestLearningToRankService(); + ActionListener listener = mock(ActionListener.class); + learningToRankService.loadLearningToRankConfig(BAD_MODEL, Map.of(), listener); verify(listener).onFailure(isA(ElasticsearchStatusException.class)); } @SuppressWarnings("unchecked") - public void testLoadLearnToRankConfigWithTemplate() throws Exception { - LearnToRankConfig learnToRankConfig = new LearnToRankConfig( + public void testLoadLearningToRankConfigWithTemplate() throws Exception { + LearningToRankConfig learningToRankConfig = new LearningToRankConfig( 0, List.of(new QueryExtractorBuilder("feature_1", QueryProviderTests.createTestQueryProvider("field_1", "{{foo_param}}"))), Map.of() ); - LearnToRankService learnToRankService = getTestLearnToRankService(learnToRankConfig); - ActionListener listener = mock(ActionListener.class); + LearningToRankService learningToRankService = getTestLearningToRankService(learningToRankConfig); + ActionListener listener = mock(ActionListener.class); - learnToRankService.loadLearnToRankConfig("model-id", Map.ofEntries(Map.entry("foo_param", "foo")), listener); + learningToRankService.loadLearningToRankConfig("model-id", Map.ofEntries(Map.entry("foo_param", "foo")), listener); verify(listener).onResponse(argThat(retrievedConfig -> { assertThat(retrievedConfig.getFeatureExtractorBuilders(), hasSize(1)); QueryExtractorBuilder queryExtractorBuilder = retrievedConfig.getQueryFeatureExtractorBuilders().get(0); @@ -127,8 +127,8 @@ public void testLoadLearnToRankConfigWithTemplate() throws Exception { } @SuppressWarnings("unchecked") - public void testLoadLearnToRankConfigWithMissingTemplateParams() throws Exception { - LearnToRankConfig learnToRankConfig = new LearnToRankConfig( + public void testLoadLearningToRankConfigWithMissingTemplateParams() throws Exception { + LearningToRankConfig learningToRankConfig = new LearningToRankConfig( 0, List.of( new QueryExtractorBuilder("feature_1", QueryProviderTests.createTestQueryProvider("field_1", "foo")), @@ -139,10 +139,10 @@ public void testLoadLearnToRankConfigWithMissingTemplateParams() throws Exceptio Map.of("baz_param", "default_value") ); - LearnToRankService learnToRankService = getTestLearnToRankService(learnToRankConfig); - ActionListener listener = mock(ActionListener.class); + LearningToRankService learningToRankService = getTestLearningToRankService(learningToRankConfig); + ActionListener listener = mock(ActionListener.class); - learnToRankService.loadLearnToRankConfig("model-id", randomBoolean() ? null : Map.of(), listener); + learningToRankService.loadLearningToRankConfig("model-id", randomBoolean() ? null : Map.of(), listener); verify(listener).onResponse(argThat(retrievedConfig -> { // Check all features are present. assertThat(retrievedConfig.getFeatureExtractorBuilders(), hasSize(4)); @@ -208,12 +208,12 @@ private TrainedModelProvider mockTrainedModelProvider() { return trainedModelProvider; } - private LearnToRankService getTestLearnToRankService() { - return getTestLearnToRankService(mockTrainedModelProvider()); + private LearningToRankService getTestLearningToRankService() { + return getTestLearningToRankService(mockTrainedModelProvider()); } @SuppressWarnings("unchecked") - private LearnToRankService getTestLearnToRankService(LearnToRankConfig learnToRankConfig) { + private LearningToRankService getTestLearningToRankService(LearningToRankConfig learningToRankConfig) { TrainedModelProvider trainedModelProvider = mock(TrainedModelProvider.class); doAnswer(invocation -> { @@ -227,18 +227,18 @@ private LearnToRankService getTestLearnToRankService(LearnToRankConfig learnToRa .setEstimatedOperations(1) .setModelSize(2) .setModelType(TrainedModelType.TREE_ENSEMBLE) - .setInferenceConfig(learnToRankConfig) + .setInferenceConfig(learningToRankConfig) .build() ); return null; }).when(trainedModelProvider).getTrainedModel(any(), any(), any(), any()); - return getTestLearnToRankService(trainedModelProvider); + return getTestLearningToRankService(trainedModelProvider); } - private LearnToRankService getTestLearnToRankService(TrainedModelProvider trainedModelProvider) { - return new LearnToRankService(mockModelLoadingService(), trainedModelProvider, getTestScriptService(), xContentRegistry()); + private LearningToRankService getTestLearningToRankService(TrainedModelProvider trainedModelProvider) { + return new LearningToRankService(mockModelLoadingService(), trainedModelProvider, getTestScriptService(), xContentRegistry()); } private ScriptService getTestScriptService() { diff --git a/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java b/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java index 3fd8e952d626e..0efe2797c7f76 100644 --- a/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java +++ b/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java @@ -43,7 +43,7 @@ public class XPackRestIT extends AbstractXPackRestTest { .setting("xpack.searchable.snapshot.shared_cache.region_size", "256KB") .user("x_pack_rest_user", "x-pack-test-password") .feature(FeatureFlag.TIME_SERIES_MODE) - .feature(FeatureFlag.LEARN_TO_RANK) + .feature(FeatureFlag.LEARNING_TO_RANK) .configFile("testnode.pem", Resource.fromClasspath("org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem")) .configFile("testnode.crt", Resource.fromClasspath("org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) .configFile("service_tokens", Resource.fromClasspath("service_tokens")) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/learn_to_rank_rescorer.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/learning_to_rank_rescorer.yml similarity index 94% rename from x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/learn_to_rank_rescorer.yml rename to x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/learning_to_rank_rescorer.yml index a0ae4b7c44316..e307e72d2ca4f 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/learn_to_rank_rescorer.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/learning_to_rank_rescorer.yml @@ -11,7 +11,7 @@ setup: "description": "super complex model for tests", "input": {"field_names": ["cost", "product"]}, "inference_config": { - "learn_to_rank": { + "learning_to_rank": { } }, "definition": { @@ -146,7 +146,7 @@ setup: { "rescore": { "window_size": 10, - "learn_to_rank": { "model_id": "ltr-model" } + "learning_to_rank": { "model_id": "ltr-model" } } } - match: { hits.hits.0._score: 17.0 } @@ -162,7 +162,7 @@ setup: "query": {"term": {"product": "Laptop"}}, "rescore": { "window_size": 10, - "learn_to_rank": { "model_id": "ltr-model" } + "learning_to_rank": { "model_id": "ltr-model" } } } - match: { hits.hits.0._score: 6.0 } @@ -182,7 +182,7 @@ setup: { "rescore": { "window_size": 2, - "learn_to_rank": { "model_id": "ltr-model" } + "learning_to_rank": { "model_id": "ltr-model" } } } - match: { hits.hits.0._score: 17.0 } @@ -209,7 +209,7 @@ setup: }, { "window_size": 3, - "learn_to_rank": { "model_id": "ltr-model" } + "learning_to_rank": { "model_id": "ltr-model" } }, { "window_size": 2, @@ -232,7 +232,7 @@ setup: { "rescore": { "window_size": 10, - "learn_to_rank": { "model_id": "ltr-missing" } + "learning_to_rank": { "model_id": "ltr-missing" } } } --- @@ -245,7 +245,7 @@ setup: "query": {"term": {"product": "Speaker"}}, "rescore": { "window_size": 10, - "learn_to_rank": { "model_id": "ltr-model" } + "learning_to_rank": { "model_id": "ltr-model" } } } - length: { hits.hits: 0 } From ccf92e42f1ea6b941c35a66daf3ed260e37808b4 Mon Sep 17 00:00:00 2001 From: Matteo Piergiovanni <134913285+piergm@users.noreply.github.com> Date: Tue, 5 Dec 2023 08:43:34 +0100 Subject: [PATCH 141/181] Node stats as metrics (#102248) In ES there are node stats that can be retrieved via API call (`GET /_nodes/stats`) but not scraped by Metricbeat. This PR register as metrics some of those stats. The API has the capability to aggregate stats of all the nodes connected to the cluster. We decided instead each node will report its own stats in order not to hit the wire and cause unwanted latencies. All the metrics are registered as either `LongAsyncCounter` or `LongGauge` both of which have a callback reporting the total value for a metric and not the delta. We have in place a lazy cache that expires after 1 minute for `NodeStats` in order not to recalculate it for every metric callback. List of metrics that this PR will introduce: - es.node.stats.indices.get.total - es.node.stats.indices.get.time - es.node.stats.indices.search.fetch.total - es.node.stats.indices.search.fetch.time - es.node.stats.indices.merge.total - es.node.stats.indices.merge.time - es.node.stats.indices.translog.operations - es.node.stats.indices.translog.size - es.node.stats.indices.translog.uncommitted_operations - es.node.stats.indices.translog.uncommitted_size - es.node.stats.indices.translog.earliest_last_modified_age - es.node.stats.transport.rx_size - es.node.stats.transport.tx_size - es.node.stats.jvm.mem.pools.young.used - es.node.stats.jvm.mem.pools.survivor.used - es.node.stats.jvm.mem.pools.old.used - es.node.stats.fs.io_stats.io_time.total --- docs/changelog/102248.yaml | 5 + .../monitor/metrics/NodeMetrics.java | 311 ++++++++++++++++++ .../java/org/elasticsearch/node/Node.java | 5 + .../elasticsearch/node/NodeConstruction.java | 4 + 4 files changed, 325 insertions(+) create mode 100644 docs/changelog/102248.yaml create mode 100644 server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java diff --git a/docs/changelog/102248.yaml b/docs/changelog/102248.yaml new file mode 100644 index 0000000000000..854e8afde4086 --- /dev/null +++ b/docs/changelog/102248.yaml @@ -0,0 +1,5 @@ +pr: 102248 +summary: Node stats as metrics +area: Search +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java new file mode 100644 index 0000000000000..e36f1a085fbde --- /dev/null +++ b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java @@ -0,0 +1,311 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.monitor.metrics; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; +import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.util.SingleObjectCache; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.monitor.jvm.GcNames; +import org.elasticsearch.monitor.jvm.JvmStats; +import org.elasticsearch.node.NodeService; +import org.elasticsearch.telemetry.metric.LongWithAttributes; +import org.elasticsearch.telemetry.metric.MeterRegistry; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * NodeMetrics monitors various statistics of an Elasticsearch node and exposes them as metrics through + * the provided MeterRegistry. It includes counters for indices operations, memory usage, transport statistics, + * and more. The metrics are periodically updated based on a schedule. + */ +public class NodeMetrics extends AbstractLifecycleComponent { + private final Logger logger = LogManager.getLogger(NodeMetrics.class); + private final MeterRegistry registry; + private final NodeService nodeService; + private final List metrics; + private NodeStatsCache stats; + + /** + * Constructs a new NodeMetrics instance. + * + * @param meterRegistry The MeterRegistry used to register metrics. + * @param nodeService The NodeService for interacting with the Elasticsearch node and extracting statistics. + */ + public NodeMetrics(MeterRegistry meterRegistry, NodeService nodeService) { + this.registry = meterRegistry; + this.nodeService = nodeService; + this.metrics = new ArrayList<>(17); + } + + /** + * Registers async metrics in the provided MeterRegistry. We are using the lazy NodeStatCache to retrieve + * the NodeStats once per pool period instead of for every callback if we were not to use it. + * + * @param registry The MeterRegistry used to register and collect metrics. + */ + private void registerAsyncMetrics(MeterRegistry registry) { + // Agent should poll stats every 4 minutes and being this cache is lazy we need a + // number high enough so that the cache does not update during the same poll + // period and that expires before a new poll period, therefore we choose 1 minute. + this.stats = new NodeStatsCache(TimeValue.timeValueMinutes(1)); + metrics.add( + registry.registerLongAsyncCounter( + "es.node.stats.indices.get.total", + "Total number of get operations", + "operation", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getGet().getCount()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.node.stats.indices.get.time", + "Time in milliseconds spent performing get operations.", + "milliseconds", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getGet().getTimeInMillis()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.node.stats.indices.search.fetch.total", + "Total number of fetch operations.", + "operation", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getSearch().getTotal().getFetchCount()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.node.stats.indices.search.fetch.time", + "Time in milliseconds spent performing fetch operations.", + "milliseconds", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getSearch().getTotal().getFetchTimeInMillis()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.node.stats.indices.merge.total", + "Total number of merge operations.", + "operation", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getMerge().getTotal()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.node.stats.indices.merge.time", + "Time in milliseconds spent performing merge operations.", + "milliseconds", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getMerge().getTotalTimeInMillis()) + ) + ); + + metrics.add( + registry.registerLongGauge( + "es.node.stats.indices.translog.operations", + "Number of transaction log operations.", + "operation", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().estimatedNumberOfOperations()) + ) + ); + + metrics.add( + registry.registerLongGauge( + "es.node.stats.indices.translog.size", + "Size, in bytes, of the transaction log.", + "bytes", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().getTranslogSizeInBytes()) + ) + ); + + metrics.add( + registry.registerLongGauge( + "es.node.stats.indices.translog.uncommitted_operations", + "Number of uncommitted transaction log operations.", + "operations", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().getUncommittedOperations()) + ) + ); + + metrics.add( + registry.registerLongGauge( + "es.node.stats.indices.translog.uncommitted_size", + "Size, in bytes, of uncommitted transaction log operations.", + "bytes", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().getUncommittedSizeInBytes()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.node.stats.indices.translog.earliest_last_modified_age", + "Earliest last modified age for the transaction log.", + "time", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().getEarliestLastModifiedAge()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.node.stats.transport.rx_size", + "Size, in bytes, of RX packets received by the node during internal cluster communication.", + "bytes", + () -> new LongWithAttributes(stats.getOrRefresh().getTransport().getRxSize().getBytes()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.node.stats.transport.tx_size", + "Size, in bytes, of TX packets sent by the node during internal cluster communication.", + "bytes", + () -> new LongWithAttributes(stats.getOrRefresh().getTransport().getTxSize().getBytes()) + ) + ); + + metrics.add( + registry.registerLongGauge( + "es.node.stats.jvm.mem.pools.young.used", + "Memory, in bytes, used by the young generation heap.", + "bytes", + () -> new LongWithAttributes(bytesUsedByGCGen(stats.getOrRefresh().getJvm().getMem(), GcNames.YOUNG)) + ) + ); + + metrics.add( + registry.registerLongGauge( + "es.node.stats.jvm.mem.pools.survivor.used", + "Memory, in bytes, used by the survivor space.", + "bytes", + () -> new LongWithAttributes(bytesUsedByGCGen(stats.getOrRefresh().getJvm().getMem(), GcNames.SURVIVOR)) + ) + ); + + metrics.add( + registry.registerLongGauge( + "es.node.stats.jvm.mem.pools.old.used", + "Memory, in bytes, used by the old generation heap.", + "bytes", + () -> new LongWithAttributes(bytesUsedByGCGen(stats.getOrRefresh().getJvm().getMem(), GcNames.OLD)) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.node.stats.fs.io_stats.io_time.total", + "The total time in millis spent performing I/O operations across all devices used by Elasticsearch.", + "milliseconds", + () -> new LongWithAttributes(stats.getOrRefresh().getFs().getIoStats().getTotalIOTimeMillis()) + ) + ); + } + + /** + * Retrieves the bytes used by a specific garbage collection generation from the provided JvmStats.Mem. + * + * @param mem The JvmStats.Mem containing memory pool information. + * @param name The name of the garbage collection generation (e.g., "young", "survivor", "old"). + * @return The number of bytes used by the specified garbage collection generation. + */ + private long bytesUsedByGCGen(JvmStats.Mem mem, String name) { + long bytesUsed = 0; + for (JvmStats.MemoryPool pool : mem) { + if (pool.getName().equals(name)) { + bytesUsed = pool.getUsed().getBytes(); + } + } + return bytesUsed; + } + + /** + * Retrieves the current NodeStats for the Elasticsearch node. + * + * @return The current NodeStats. + */ + private NodeStats getNodeStats() { + CommonStatsFlags flags = new CommonStatsFlags( + CommonStatsFlags.Flag.Get, + CommonStatsFlags.Flag.Search, + CommonStatsFlags.Flag.Merge, + CommonStatsFlags.Flag.Translog + ); + return nodeService.stats( + flags, + true, + false, + false, + true, + false, + true, + true, + false, + false, + false, + false, + false, + false, + false, + false, + false + ); + } + + @Override + protected void doStart() { + registerAsyncMetrics(registry); + } + + @Override + protected void doStop() { + stats.stopRefreshing(); + } + + @Override + protected void doClose() throws IOException { + metrics.forEach(metric -> { + try { + metric.close(); + } catch (Exception e) { + logger.warn("metrics close() method should not throw Exception", e); + } + }); + } + + /** + * A very simple NodeStats cache that allows non-blocking refresh calls + * lazily triggered by expiry time. When getOrRefresh() is called either + * the cached NodeStats is returned if refreshInterval didn't expire or + * refresh() is called, cache is updated and the new instance returned. + */ + private class NodeStatsCache extends SingleObjectCache { + private boolean refresh; + + NodeStatsCache(TimeValue interval) { + super(interval, getNodeStats()); + this.refresh = true; + } + + @Override + protected NodeStats refresh() { + return refresh ? getNodeStats() : getNoRefresh(); + } + + public void stopRefreshing() { + this.refresh = false; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 1c1b9745befe8..67c604248a7f4 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -60,6 +60,7 @@ import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.monitor.fs.FsHealthService; import org.elasticsearch.monitor.jvm.JvmInfo; +import org.elasticsearch.monitor.metrics.NodeMetrics; import org.elasticsearch.node.internal.TerminationHandler; import org.elasticsearch.plugins.ClusterCoordinationPlugin; import org.elasticsearch.plugins.ClusterPlugin; @@ -419,6 +420,8 @@ public void onTimeout(TimeValue timeout) { } } + injector.getInstance(NodeMetrics.class).start(); + logger.info("started {}", transportService.getLocalNode()); pluginsService.filterPlugins(ClusterPlugin.class).forEach(ClusterPlugin::onNodeStarted); @@ -462,6 +465,7 @@ private void stop() { stopIfStarted(GatewayService.class); stopIfStarted(SearchService.class); stopIfStarted(TransportService.class); + stopIfStarted(NodeMetrics.class); pluginLifecycleComponents.forEach(Node::stopIfStarted); // we should stop this last since it waits for resources to get released @@ -530,6 +534,7 @@ public synchronized void close() throws IOException { toClose.add(injector.getInstance(SearchService.class)); toClose.add(() -> stopWatch.stop().start("transport")); toClose.add(injector.getInstance(TransportService.class)); + toClose.add(injector.getInstance(NodeMetrics.class)); if (ReadinessService.enabled(environment)) { toClose.add(injector.getInstance(ReadinessService.class)); } diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 19a1310ed86aa..7a0d8c941e50f 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -125,6 +125,7 @@ import org.elasticsearch.monitor.MonitorService; import org.elasticsearch.monitor.fs.FsHealthService; import org.elasticsearch.monitor.jvm.JvmInfo; +import org.elasticsearch.monitor.metrics.NodeMetrics; import org.elasticsearch.node.internal.TerminationHandler; import org.elasticsearch.node.internal.TerminationHandlerProvider; import org.elasticsearch.persistent.PersistentTasksClusterService; @@ -963,6 +964,8 @@ record PluginServiceInstances( repositoryService ); + final NodeMetrics nodeMetrics = new NodeMetrics(telemetryProvider.getMeterRegistry(), nodeService); + final SearchService searchService = serviceProvider.newSearchService( pluginsService, clusterService, @@ -1039,6 +1042,7 @@ record PluginServiceInstances( b.bind(SearchPhaseController.class).toInstance(new SearchPhaseController(searchService::aggReduceContextBuilder)); b.bind(Transport.class).toInstance(transport); b.bind(TransportService.class).toInstance(transportService); + b.bind(NodeMetrics.class).toInstance(nodeMetrics); b.bind(NetworkService.class).toInstance(networkService); b.bind(IndexMetadataVerifier.class).toInstance(indexMetadataVerifier); b.bind(ClusterInfoService.class).toInstance(clusterInfoService); From 43c320f75fd4803fdf9c0902edc2ce609491723e Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Tue, 5 Dec 2023 08:53:52 +0100 Subject: [PATCH 142/181] Fix layout for MV_EXPAND (#102916) --- docs/changelog/102916.yaml | 6 ++++++ .../src/main/resources/mv_expand.csv-spec | 10 ++++++++++ .../elasticsearch/xpack/esql/planner/Layout.java | 9 +++++++++ .../xpack/esql/planner/LocalExecutionPlanner.java | 15 ++------------- 4 files changed, 27 insertions(+), 13 deletions(-) create mode 100644 docs/changelog/102916.yaml diff --git a/docs/changelog/102916.yaml b/docs/changelog/102916.yaml new file mode 100644 index 0000000000000..3943f34d91221 --- /dev/null +++ b/docs/changelog/102916.yaml @@ -0,0 +1,6 @@ +pr: 102916 +summary: Fix layout for MV_EXPAND +area: ES|QL +type: bug +issues: + - 102912 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec index c681a1a7e977c..a3bc9c6c6dcf6 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec @@ -306,3 +306,13 @@ a:long | b:long | c:long | gender:keyword | str:keyword | x:key 57 |57 |57 |M |"57,M" |M 0 |10 |10 |null |null |null ; + + +//see https://github.com/elastic/elasticsearch/issues/102912 +statsDissectThatOverwritesAndMvExpand#[skip:-8.11.99] +row a = "a", b = 1 | stats e = min(b) by a | dissect a "%{e}" | mv_expand e; + +a:keyword | e:keyword +a | a +; + diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Layout.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Layout.java index 871d3751b225d..97885a060d639 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Layout.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Layout.java @@ -119,5 +119,14 @@ public Layout build() { } return new DefaultLayout(Collections.unmodifiableMap(layout), numberOfChannels); } + + public void replace(NameId id, NameId id1) { + for (ChannelSet channel : this.channels) { + if (channel != null && channel.nameIds.contains(id)) { + channel.nameIds.remove(id); + channel.nameIds.add(id1); + } + } + } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 3d377497e17af..c531fd01c2a40 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -581,20 +581,9 @@ private PhysicalOperation planLimit(LimitExec limit, LocalExecutionPlannerContex private PhysicalOperation planMvExpand(MvExpandExec mvExpandExec, LocalExecutionPlannerContext context) { PhysicalOperation source = plan(mvExpandExec.child(), context); - List childOutput = mvExpandExec.child().output(); int blockSize = 5000;// TODO estimate row size and use context.pageSize() - - Layout.Builder layout = new Layout.Builder(); - List inverse = source.layout.inverse(); - var expandedName = mvExpandExec.expanded().name(); - for (int index = 0; index < inverse.size(); index++) { - if (childOutput.get(index).name().equals(expandedName)) { - layout.append(mvExpandExec.expanded()); - } else { - layout.append(inverse.get(index)); - } - } - + Layout.Builder layout = source.layout.builder(); + layout.replace(mvExpandExec.target().id(), mvExpandExec.expanded().id()); return source.with( new MvExpandOperator.Factory(source.layout.get(mvExpandExec.target().id()).channel(), blockSize), layout.build() From 26905047fb62ba2932ccd85f0e958bc8b9205a5b Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Tue, 5 Dec 2023 09:30:50 +0100 Subject: [PATCH 143/181] Rework minio test fixture and its usages (#102707) * Update minio fixture and usages to rely on new test cluster framework and testcontainer * Cache test fixture resources during packer caching Add logic that ensures we resolve docker images resolved from docker registry as part of our packer cache setup. --- build-tools-internal/build.gradle | 6 + .../internal/RestrictedBuildApiService.java | 2 - .../packer/CacheCacheableTestFixtures.java | 115 ++++++++++++++++++ .../CacheTestFixtureResourcesPlugin.java | 45 +++++++ build-tools-internal/version.properties | 4 + gradle/build.versions.toml | 1 + gradle/verification-metadata.xml | 5 + test/fixtures/geoip-fixture/build.gradle | 2 - test/fixtures/minio-fixture/Dockerfile | 9 -- test/fixtures/minio-fixture/build.gradle | 8 +- .../fixtures/minio-fixture/docker-compose.yml | 35 ------ .../test/fixtures/CacheableTestFixture.java | 13 ++ .../fixtures/minio/MinioTestContainer.java | 15 ++- .../qa/minio/build.gradle | 41 ++----- .../minio/MinioSearchableSnapshotsIT.java | 45 +++++-- .../qa/hdfs/build.gradle | 2 +- .../qa/minio/build.gradle | 30 +---- .../testkit/MinioSnapshotRepoTestKitIT.java | 42 +++++-- 18 files changed, 290 insertions(+), 130 deletions(-) create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheCacheableTestFixtures.java create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheTestFixtureResourcesPlugin.java delete mode 100644 test/fixtures/minio-fixture/Dockerfile delete mode 100644 test/fixtures/minio-fixture/docker-compose.yml create mode 100644 test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/CacheableTestFixture.java diff --git a/build-tools-internal/build.gradle b/build-tools-internal/build.gradle index 66001e66f2486..738b3f62803ab 100644 --- a/build-tools-internal/build.gradle +++ b/build-tools-internal/build.gradle @@ -195,6 +195,10 @@ gradlePlugin { id = 'elasticsearch.legacy-yaml-rest-test' implementationClass = 'org.elasticsearch.gradle.internal.test.rest.LegacyYamlRestTestPlugin' } + cacheTestFixtures { + id = 'elasticsearch.cache-test-fixtures' + implementationClass = 'org.elasticsearch.gradle.internal.packer.CacheTestFixtureResourcesPlugin' + } yamlRestTest { id = 'elasticsearch.internal-yaml-rest-test' implementationClass = 'org.elasticsearch.gradle.internal.test.rest.InternalYamlRestTestPlugin' @@ -288,6 +292,8 @@ dependencies { api buildLibs.httpcore compileOnly buildLibs.checkstyle + compileOnly buildLibs.reflections + runtimeOnly "org.elasticsearch.gradle:reaper:$version" testImplementation buildLibs.checkstyle testImplementation buildLibs.wiremock diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java index 2d5dc65a43fae..8042bdd64dabb 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java @@ -139,7 +139,6 @@ private static ListMultimap, String> createLegacyRestTestBasePluginUsag map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:ml:qa:native-multi-node-tests"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:ml:qa:single-node-tests"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:searchable-snapshots:qa:hdfs"); - map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:searchable-snapshots:qa:minio"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:searchable-snapshots:qa:rest"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:searchable-snapshots:qa:url"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:security:qa:tls-basic"); @@ -150,7 +149,6 @@ private static ListMultimap, String> createLegacyRestTestBasePluginUsag map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:snapshot-based-recoveries:qa:fs"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:snapshot-based-recoveries:qa:license-enforcing"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:snapshot-repo-test-kit:qa:hdfs"); - map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:snapshot-repo-test-kit:qa:minio"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:snapshot-repo-test-kit:qa:rest"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:sql:qa:jdbc:multi-node"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:sql:qa:jdbc:no-sql"); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheCacheableTestFixtures.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheCacheableTestFixtures.java new file mode 100644 index 0000000000000..a01b1c28a851f --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheCacheableTestFixtures.java @@ -0,0 +1,115 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.packer; + +import org.gradle.api.DefaultTask; +import org.gradle.api.InvalidUserDataException; +import org.gradle.api.file.ConfigurableFileCollection; +import org.gradle.api.file.FileCollection; +import org.gradle.api.tasks.CompileClasspath; +import org.gradle.api.tasks.TaskAction; +import org.gradle.workers.WorkAction; +import org.gradle.workers.WorkParameters; +import org.gradle.workers.WorkQueue; +import org.gradle.workers.WorkerExecutor; +import org.reflections.Reflections; +import org.reflections.scanners.SubTypesScanner; +import org.reflections.util.ClasspathHelper; +import org.reflections.util.ConfigurationBuilder; + +import java.io.File; +import java.io.IOException; +import java.lang.reflect.Method; +import java.net.MalformedURLException; +import java.net.URL; +import java.net.URLClassLoader; +import java.util.LinkedHashSet; +import java.util.Set; + +import javax.inject.Inject; + +public abstract class CacheCacheableTestFixtures extends DefaultTask { + + @CompileClasspath + public abstract ConfigurableFileCollection getClasspath(); + + @Inject + public abstract WorkerExecutor getWorkerExecutor(); + + /** + * Executes the forbidden apis task. + */ + @TaskAction + public void checkForbidden() { + WorkQueue workQueue = getWorkerExecutor().classLoaderIsolation(spec -> spec.getClasspath().from(getClasspath())); + workQueue.submit(CacheTestFixtureWorkAction.class, params -> params.getClasspath().setFrom(getClasspath())); + } + + interface Parameters extends WorkParameters { + ConfigurableFileCollection getClasspath(); + } + + abstract static class CacheTestFixtureWorkAction implements WorkAction { + + @Inject + @SuppressWarnings("checkstyle:RedundantModifier") + public CacheTestFixtureWorkAction() {} + + @Override + public void execute() { + final URLClassLoader urlLoader = createClassLoader(getParameters().getClasspath()); + try { + Reflections reflections = new Reflections( + new ConfigurationBuilder().setUrls(ClasspathHelper.forPackage("org.elasticsearch.test.fixtures")) + .setScanners(new SubTypesScanner()) + ); + + Class ifClass = Class.forName("org.elasticsearch.test.fixtures.CacheableTestFixture"); + Set> classes = (Set>) reflections.getSubTypesOf(ifClass); + + for (Class cacheableTestFixtureClazz : classes) { + Object o = cacheableTestFixtureClazz.getDeclaredConstructor().newInstance(); + Method cacheMethod = cacheableTestFixtureClazz.getMethod("cache"); + cacheMethod.invoke(o); + } + } catch (Exception e) { + throw new RuntimeException(e); + } finally { + // Close the classloader to free resources: + try { + if (urlLoader != null) urlLoader.close(); + } catch (IOException ioe) { + // getLogger().warn("Cannot close classloader: ".concat(ioe.toString())); + } + } + } + + private URLClassLoader createClassLoader(FileCollection classpath) { + if (classpath == null) { + throw new InvalidUserDataException("Missing 'classesDirs' or 'classpath' property."); + } + + final Set cpElements = new LinkedHashSet<>(); + cpElements.addAll(classpath.getFiles()); + final URL[] urls = new URL[cpElements.size()]; + try { + int i = 0; + for (final File cpElement : cpElements) { + urls[i++] = cpElement.toURI().toURL(); + } + assert i == urls.length; + } catch (MalformedURLException mfue) { + throw new InvalidUserDataException("Failed to build classpath URLs.", mfue); + } + + return URLClassLoader.newInstance(urls, ClassLoader.getSystemClassLoader()); + } + + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheTestFixtureResourcesPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheTestFixtureResourcesPlugin.java new file mode 100644 index 0000000000000..f8ab8eef1004c --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheTestFixtureResourcesPlugin.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.packer; + +import org.elasticsearch.gradle.VersionProperties; +import org.elasticsearch.gradle.internal.ResolveAllDependencies; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.artifacts.dsl.DependencyHandler; +import org.gradle.api.plugins.JavaPlugin; +import org.gradle.api.plugins.JavaPluginExtension; + +public class CacheTestFixtureResourcesPlugin implements Plugin { + + public static final String CACHE_TEST_FIXTURES = "cacheTestFixtures"; + + @Override + public void apply(Project project) { + + var cacheTestFixturesConfiguration = project.getConfigurations().create(CACHE_TEST_FIXTURES); + cacheTestFixturesConfiguration.defaultDependencies(deps -> { + DependencyHandler dependencyHandler = project.getDependencies(); + deps.add(dependencyHandler.create("org.reflections:reflections:" + VersionProperties.getVersions().get("reflections"))); + deps.add(dependencyHandler.create("org.javassist:javassist:" + VersionProperties.getVersions().get("javassist"))); + }); + project.getPlugins().withType(JavaPlugin.class, javaPlugin -> { + var cacheTestFixtures = project.getTasks().register(CACHE_TEST_FIXTURES, CacheCacheableTestFixtures.class, (t) -> { + var testSourceSet = project.getExtensions() + .getByType(JavaPluginExtension.class) + .getSourceSets() + .getByName(JavaPlugin.TEST_TASK_NAME); + t.getClasspath().from(cacheTestFixturesConfiguration); + t.getClasspath().from(testSourceSet.getRuntimeClasspath()); + }); + project.getTasks().withType(ResolveAllDependencies.class).configureEach(r -> r.dependsOn(cacheTestFixtures)); + }); + + } +} diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 9763cef8aefeb..f0e599a9c0e87 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -47,6 +47,10 @@ dockerJava = 3.3.4 ductTape = 1.0.8 commonsCompress = 1.24.0 +# packer caching build logic +reflections = 0.9.12 +javassist = 3.28.0-GA + # benchmark dependencies jmh = 1.26 diff --git a/gradle/build.versions.toml b/gradle/build.versions.toml index e8d94ce624dbb..f1965fc5400ea 100644 --- a/gradle/build.versions.toml +++ b/gradle/build.versions.toml @@ -37,6 +37,7 @@ junit5-vintage = { group = "org.junit.vintage", name="junit-vintage-engine", ver maven-model = "org.apache.maven:maven-model:3.6.2" mockito-core = "org.mockito:mockito-core:1.9.5" nebula-info = "com.netflix.nebula:gradle-info-plugin:11.3.3" +reflections = "org.reflections:reflections:0.9.12" shadow-plugin = "com.github.johnrengelman:shadow:8.1.1" spock-core = { group = "org.spockframework", name="spock-core", version.ref="spock" } spock-junit4 = { group = "org.spockframework", name="spock-junit4", version.ref="spock" } diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 9d383c426cb74..7f672ece21f66 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -4092,6 +4092,11 @@ + + + + + diff --git a/test/fixtures/geoip-fixture/build.gradle b/test/fixtures/geoip-fixture/build.gradle index 377deb5e8a872..a7805c68b08e9 100644 --- a/test/fixtures/geoip-fixture/build.gradle +++ b/test/fixtures/geoip-fixture/build.gradle @@ -18,5 +18,3 @@ dependencies { exclude module: 'hamcrest-core' } } - -tasks.named("test") { enabled = false } diff --git a/test/fixtures/minio-fixture/Dockerfile b/test/fixtures/minio-fixture/Dockerfile deleted file mode 100644 index 6a94355951683..0000000000000 --- a/test/fixtures/minio-fixture/Dockerfile +++ /dev/null @@ -1,9 +0,0 @@ -FROM minio/minio:RELEASE.2021-03-01T04-20-55Z - -ARG bucket -ARG accessKey -ARG secretKey - -RUN mkdir -p /minio/data/${bucket} -ENV MINIO_ACCESS_KEY=${accessKey} -ENV MINIO_SECRET_KEY=${secretKey} diff --git a/test/fixtures/minio-fixture/build.gradle b/test/fixtures/minio-fixture/build.gradle index 8673c51d46038..66613809068f7 100644 --- a/test/fixtures/minio-fixture/build.gradle +++ b/test/fixtures/minio-fixture/build.gradle @@ -5,9 +5,9 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.test.fixtures' apply plugin: 'java' apply plugin: 'elasticsearch.java' +apply plugin: 'elasticsearch.cache-test-fixtures' description = 'Fixture for MinIO Storage service' @@ -23,10 +23,14 @@ dependencies { implementation "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" implementation "org.slf4j:slf4j-api:${versions.slf4j}" implementation "com.github.docker-java:docker-java-api:${versions.dockerJava}" + runtimeOnly "com.github.docker-java:docker-java-transport-zerodep:${versions.dockerJava}" runtimeOnly "com.github.docker-java:docker-java-transport:${versions.dockerJava}" runtimeOnly "com.github.docker-java:docker-java-core:${versions.dockerJava}" runtimeOnly "org.apache.commons:commons-compress:${versions.commonsCompress}" runtimeOnly "org.rnorth.duct-tape:duct-tape:${versions.ductTape}" - runtimeOnly "org.rnorth.duct-tape:duct-tape:${versions.ductTape}" + + // ensure we have proper logging during when used in tests + runtimeOnly "org.slf4j:slf4j-simple:${versions.slf4j}" + runtimeOnly "org.hamcrest:hamcrest:${versions.hamcrest}" } diff --git a/test/fixtures/minio-fixture/docker-compose.yml b/test/fixtures/minio-fixture/docker-compose.yml deleted file mode 100644 index c65ed2f070703..0000000000000 --- a/test/fixtures/minio-fixture/docker-compose.yml +++ /dev/null @@ -1,35 +0,0 @@ -version: '3' -services: - minio-fixture: - build: - context: . - args: - bucket: "bucket" - accessKey: "s3_test_access_key" - secretKey: "s3_test_secret_key" - dockerfile: Dockerfile - ports: - - "9000" - command: ["server", "/minio/data"] - minio-fixture-other: - build: - context: . - args: - bucket: "bucket" - accessKey: "s3_test_access_key" - secretKey: "s3_test_secret_key" - dockerfile: Dockerfile - ports: - - "9000" - command: ["server", "/minio/data"] - minio-fixture-repository-test-kit: - build: - context: . - args: - bucket: "bucket" - accessKey: "s3_test_access_key" - secretKey: "s3_test_secret_key" - dockerfile: Dockerfile - ports: - - "9000" - command: ["server", "/minio/data"] diff --git a/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/CacheableTestFixture.java b/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/CacheableTestFixture.java new file mode 100644 index 0000000000000..e824cd612c1b4 --- /dev/null +++ b/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/CacheableTestFixture.java @@ -0,0 +1,13 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.test.fixtures; + +public interface CacheableTestFixture { + void cache(); +} diff --git a/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java b/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java index fcb95890ace31..a7e6ba8d785a1 100644 --- a/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java +++ b/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java @@ -8,13 +8,15 @@ package org.elasticsearch.test.fixtures.minio; +import org.elasticsearch.test.fixtures.CacheableTestFixture; import org.elasticsearch.test.fixtures.testcontainers.DockerEnvironmentAwareTestContainer; import org.junit.rules.TestRule; import org.testcontainers.images.builder.ImageFromDockerfile; -public final class MinioTestContainer extends DockerEnvironmentAwareTestContainer implements TestRule { +public final class MinioTestContainer extends DockerEnvironmentAwareTestContainer implements TestRule, CacheableTestFixture { private static final int servicePort = 9000; + public static final String DOCKER_BASE_IMAGE = "minio/minio:RELEASE.2021-03-01T04-20-55Z"; private final boolean enabled; public MinioTestContainer() { @@ -24,7 +26,7 @@ public MinioTestContainer() { public MinioTestContainer(boolean enabled) { super( new ImageFromDockerfile().withDockerfileFromBuilder( - builder -> builder.from("minio/minio:RELEASE.2021-03-01T04-20-55Z") + builder -> builder.from(DOCKER_BASE_IMAGE) .env("MINIO_ACCESS_KEY", "s3_test_access_key") .env("MINIO_SECRET_KEY", "s3_test_secret_key") .run("mkdir -p /minio/data/bucket") @@ -48,4 +50,13 @@ public void start() { public String getAddress() { return "http://127.0.0.1:" + getMappedPort(servicePort); } + + public void cache() { + try { + start(); + stop(); + } catch (RuntimeException e) { + logger().warn("Error while caching container images.", e); + } + } } diff --git a/x-pack/plugin/searchable-snapshots/qa/minio/build.gradle b/x-pack/plugin/searchable-snapshots/qa/minio/build.gradle index 860e42378dcd9..2d6b6e80d4bdf 100644 --- a/x-pack/plugin/searchable-snapshots/qa/minio/build.gradle +++ b/x-pack/plugin/searchable-snapshots/qa/minio/build.gradle @@ -1,13 +1,17 @@ import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE -apply plugin: 'elasticsearch.legacy-java-rest-test' -apply plugin: 'elasticsearch.test.fixtures' +apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' -final Project fixture = project(':test:fixtures:minio-fixture') - dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('searchable-snapshots')))) + javaRestTestImplementation project(':test:test-clusters') + javaRestTestImplementation project(":test:framework") + javaRestTestImplementation project(':test:fixtures:minio-fixture') +} + +tasks.named('javaRestTest') { + usesDefaultDistribution() } restResources { @@ -15,32 +19,3 @@ restResources { include 'indices', 'search', 'bulk', 'snapshot', 'nodes', '_common', 'searchable_snapshots' } } - -testFixtures.useFixture(fixture.path, 'minio-fixture-other') -def fixtureAddress = { - int ephemeralPort = fixture.postProcessFixture.ext."test.fixtures.minio-fixture-other.tcp.9000" - assert ephemeralPort > 0 - '127.0.0.1:' + ephemeralPort -} - -tasks.named("javaRestTest").configure { - systemProperty 'test.minio.bucket', 'bucket' - systemProperty 'test.minio.base_path', 'searchable_snapshots_tests' -} - -testClusters.matching { it.name == "javaRestTest" }.configureEach { - testDistribution = 'DEFAULT' - - keystore 's3.client.searchable_snapshots.access_key', 's3_test_access_key' - keystore 's3.client.searchable_snapshots.secret_key', 's3_test_secret_key' - setting 'xpack.license.self_generated.type', 'trial' - setting 's3.client.searchable_snapshots.protocol', 'http' - setting 's3.client.searchable_snapshots.endpoint', { "${-> fixtureAddress()}" }, IGNORE_VALUE - - setting 'xpack.searchable.snapshot.shared_cache.size', '16MB' - setting 'xpack.searchable.snapshot.shared_cache.region_size', '256KB' - setting 'xpack.searchable_snapshots.cache_fetch_async_thread_pool.keep_alive', '0ms' - - setting 'xpack.security.enabled', 'false' -} - diff --git a/x-pack/plugin/searchable-snapshots/qa/minio/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/minio/MinioSearchableSnapshotsIT.java b/x-pack/plugin/searchable-snapshots/qa/minio/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/minio/MinioSearchableSnapshotsIT.java index eded3824d4f63..5c2b19fe75a07 100644 --- a/x-pack/plugin/searchable-snapshots/qa/minio/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/minio/MinioSearchableSnapshotsIT.java +++ b/x-pack/plugin/searchable-snapshots/qa/minio/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/minio/MinioSearchableSnapshotsIT.java @@ -6,27 +6,54 @@ */ package org.elasticsearch.xpack.searchablesnapshots.minio; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.fixtures.minio.MinioTestContainer; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; import org.elasticsearch.xpack.searchablesnapshots.AbstractSearchableSnapshotsRestTestCase; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; -import static org.hamcrest.Matchers.blankOrNullString; -import static org.hamcrest.Matchers.not; - +@ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) public class MinioSearchableSnapshotsIT extends AbstractSearchableSnapshotsRestTestCase { + public static final MinioTestContainer minioFixture = new MinioTestContainer(); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .keystore("s3.client.searchable_snapshots.access_key", "s3_test_access_key") + .keystore("s3.client.searchable_snapshots.secret_key", "s3_test_secret_key") + .setting("xpack.license.self_generated.type", "trial") + .setting("s3.client.searchable_snapshots.protocol", () -> "http") + .setting("s3.client.searchable_snapshots.endpoint", minioFixture::getAddress) + .setting("xpack.searchable.snapshot.shared_cache.size", "16MB") + .setting("xpack.searchable.snapshot.shared_cache.region_size", "256KB") + .setting("xpack.searchable_snapshots.cache_fetch_async_thread_pool.keep_alive", "0ms") + .setting("xpack.security.enabled", "false") + .setting("xpack.ml.enabled", "false") + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(minioFixture).around(cluster); + @Override protected String writeRepositoryType() { return "s3"; } @Override - protected Settings writeRepositorySettings() { - final String bucket = System.getProperty("test.minio.bucket"); - assertThat(bucket, not(blankOrNullString())); - - final String basePath = System.getProperty("test.minio.base_path"); - assertThat(basePath, not(blankOrNullString())); + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + @Override + protected Settings writeRepositorySettings() { + final String bucket = "bucket"; + final String basePath = "searchable_snapshots_tests"; return Settings.builder().put("client", "searchable_snapshots").put("bucket", bucket).put("base_path", basePath).build(); } } diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle index eafeb22106ff8..90a6f4ada32e0 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle @@ -27,7 +27,7 @@ final Project hdfsRepoPluginProject = project(':plugins:repository-hdfs') dependencies { javaRestTestImplementation testArtifact(project(xpackModule('snapshot-repo-test-kit'))) - javaRestTestImplementation hdfsRepoPluginProject + javaRestTestImplementation project(':plugins:repository-hdfs') } restResources { diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/minio/build.gradle b/x-pack/plugin/snapshot-repo-test-kit/qa/minio/build.gradle index 225e0146a6ecb..6e8993a7a141d 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/minio/build.gradle +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/minio/build.gradle @@ -7,14 +7,12 @@ import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE -apply plugin: 'elasticsearch.legacy-java-rest-test' -apply plugin: 'elasticsearch.test.fixtures' +apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' -final Project fixture = project(':test:fixtures:minio-fixture') - dependencies { javaRestTestImplementation testArtifact(project(xpackModule('snapshot-repo-test-kit'))) + javaRestTestImplementation project(':test:fixtures:minio-fixture') } restResources { @@ -23,26 +21,6 @@ restResources { } } -testFixtures.useFixture(fixture.path, 'minio-fixture-repository-test-kit') -def fixtureAddress = { - int ephemeralPort = fixture.postProcessFixture.ext."test.fixtures.minio-fixture-repository-test-kit.tcp.9000" - assert ephemeralPort > 0 - '127.0.0.1:' + ephemeralPort -} - -tasks.named("javaRestTest").configure { - systemProperty 'test.minio.bucket', 'bucket' - systemProperty 'test.minio.base_path', 'repository_test_kit_tests' -} - -testClusters.matching { it.name == "javaRestTest" }.configureEach { - testDistribution = 'DEFAULT' - - keystore 's3.client.repository_test_kit.access_key', 's3_test_access_key' - keystore 's3.client.repository_test_kit.secret_key', 's3_test_secret_key' - setting 's3.client.repository_test_kit.protocol', 'http' - setting 's3.client.repository_test_kit.endpoint', { "${-> fixtureAddress()}" }, IGNORE_VALUE - - setting 'xpack.security.enabled', 'false' +tasks.named('javaRestTest') { + usesDefaultDistribution() } - diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/minio/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/MinioSnapshotRepoTestKitIT.java b/x-pack/plugin/snapshot-repo-test-kit/qa/minio/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/MinioSnapshotRepoTestKitIT.java index 16dc843690d05..3e58a8d89ff31 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/minio/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/MinioSnapshotRepoTestKitIT.java +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/minio/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/MinioSnapshotRepoTestKitIT.java @@ -6,26 +6,50 @@ */ package org.elasticsearch.repositories.blobstore.testkit; -import org.elasticsearch.common.settings.Settings; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; -import static org.hamcrest.Matchers.blankOrNullString; -import static org.hamcrest.Matchers.not; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.fixtures.minio.MinioTestContainer; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; +@ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) public class MinioSnapshotRepoTestKitIT extends AbstractSnapshotRepoTestKitRestTestCase { + public static final MinioTestContainer minioFixture = new MinioTestContainer(); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .keystore("s3.client.repository_test_kit.access_key", "s3_test_access_key") + .keystore("s3.client.repository_test_kit.secret_key", "s3_test_secret_key") + .setting("s3.client.repository_test_kit.protocol", () -> "http") + .setting("s3.client.repository_test_kit.endpoint", minioFixture::getAddress) + .setting("xpack.security.enabled", "false") + // Additional tracing related to investigation into https://github.com/elastic/elasticsearch/issues/102294 + .setting("xpack.ml.enabled", "false") + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(minioFixture).around(cluster); + @Override protected String repositoryType() { return "s3"; } @Override - protected Settings repositorySettings() { - final String bucket = System.getProperty("test.minio.bucket"); - assertThat(bucket, not(blankOrNullString())); - - final String basePath = System.getProperty("test.minio.base_path"); - assertThat(basePath, not(blankOrNullString())); + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + @Override + protected Settings repositorySettings() { + final String bucket = "bucket"; + final String basePath = "repository_test_kit_tests"; return Settings.builder().put("client", "repository_test_kit").put("bucket", bucket).put("base_path", basePath).build(); } } From 86b80a36c431031f880f43d279a38bd936fd2e51 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 5 Dec 2023 09:16:46 +0000 Subject: [PATCH 144/181] Enable retry on rate limit error from OpenAI(#102922) Extract token and request usage from the OpenAI headers --- .../http/retry/BaseResponseHandler.java | 2 +- .../openai/OpenAiResponseHandler.java | 42 ++++++++- .../openai/OpenAiResponseHandlerTests.java | 89 +++++++++++++++++-- 3 files changed, 124 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java index 31d987118c28d..4e36d6a59a5e6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java @@ -79,7 +79,7 @@ protected Exception buildError(String message, HttpRequestBase request, HttpResu ); } - static RestStatus toRestStatus(int statusCode) { + public static RestStatus toRestStatus(int statusCode) { RestStatus code = null; if (statusCode < 500) { code = RestStatus.fromCode(statusCode); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java index 7609b734db4f5..43c234a6809c4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java @@ -7,8 +7,10 @@ package org.elasticsearch.xpack.inference.external.openai; +import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpRequestBase; import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.Strings; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; @@ -22,6 +24,17 @@ import static org.elasticsearch.xpack.inference.external.http.HttpUtils.checkForEmptyBody; public class OpenAiResponseHandler extends BaseResponseHandler { + /** + * Rate limit headers taken from https://platform.openai.com/docs/guides/rate-limits/rate-limits-in-headers + */ + // The maximum number of requests that are permitted before exhausting the rate limit. + static final String REQUESTS_LIMIT = "x-ratelimit-limit-requests"; + // The maximum number of tokens that are permitted before exhausting the rate limit. + static final String TOKENS_LIMIT = "x-ratelimit-limit-tokens"; + // The remaining number of requests that are permitted before exhausting the rate limit. + static final String REMAINING_REQUESTS = "x-ratelimit-remaining-requests"; + // The remaining number of tokens that are permitted before exhausting the rate limit. + static final String REMAINING_TOKENS = "x-ratelimit-remaining-tokens"; public OpenAiResponseHandler(String requestType, CheckedFunction parseFunction) { super(requestType, parseFunction, OpenAiErrorResponseEntity::fromResponse); @@ -52,7 +65,7 @@ void checkForFailureStatusCode(HttpRequestBase request, HttpResult result) throw if (statusCode >= 500) { throw new RetryException(false, buildError(SERVER_ERROR, request, result)); } else if (statusCode == 429) { - throw new RetryException(false, buildError(RATE_LIMIT, request, result)); // TODO back off and retry + throw new RetryException(true, buildError(buildRateLimitErrorMessage(request, result), request, result)); } else if (statusCode == 401) { throw new RetryException(false, buildError(AUTHENTICATION, request, result)); } else if (statusCode >= 300 && statusCode < 400) { @@ -61,4 +74,31 @@ void checkForFailureStatusCode(HttpRequestBase request, HttpResult result) throw throw new RetryException(false, buildError(UNSUCCESSFUL, request, result)); } } + + static String buildRateLimitErrorMessage(HttpRequestBase request, HttpResult result) { + var response = result.response(); + int statusCode = result.response().getStatusLine().getStatusCode(); + var tokenLimit = getFirstHeaderOrUnknown(response, TOKENS_LIMIT); + var remainingTokens = getFirstHeaderOrUnknown(response, REMAINING_TOKENS); + var requestLimit = getFirstHeaderOrUnknown(response, REQUESTS_LIMIT); + var remainingRequests = getFirstHeaderOrUnknown(response, REMAINING_REQUESTS); + + var usageMessage = Strings.format( + "Token limit [%s], remaining tokens [%s]. Request limit [%s], remaining requests [%s]", + tokenLimit, + remainingTokens, + requestLimit, + remainingRequests + ); + + return RATE_LIMIT + ". " + usageMessage; + } + + private static String getFirstHeaderOrUnknown(HttpResponse response, String name) { + var header = response.getFirstHeader(name); + if (header != null && header.getElements().length > 0) { + return header.getElements()[0].getName(); + } + return "unknown"; + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandlerTests.java index 56495b053e172..e3698701ac1f0 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandlerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandlerTests.java @@ -7,9 +7,13 @@ package org.elasticsearch.xpack.inference.external.openai; +import org.apache.http.Header; +import org.apache.http.HeaderElement; import org.apache.http.HttpResponse; +import org.apache.http.RequestLine; import org.apache.http.StatusLine; import org.apache.http.client.methods.HttpRequestBase; +import org.apache.http.message.BasicHeader; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; @@ -18,6 +22,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.core.Is.is; +import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -28,11 +33,12 @@ public void testCheckForFailureStatusCode() { var httpResponse = mock(HttpResponse.class); when(httpResponse.getStatusLine()).thenReturn(statusLine); + var header = mock(Header.class); + when(header.getElements()).thenReturn(new HeaderElement[] {}); + when(httpResponse.getFirstHeader(anyString())).thenReturn(header); var httpRequest = mock(HttpRequestBase.class); - var httpResult = new HttpResult(httpResponse, new byte[] {}); - var handler = new OpenAiResponseHandler("", result -> null); // 200 ok @@ -50,11 +56,8 @@ public void testCheckForFailureStatusCode() { // 429 when(statusLine.getStatusCode()).thenReturn(429); retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); - assertFalse(retryException.shouldRetry()); - assertThat( - retryException.getCause().getMessage(), - containsString("Received a rate limit status code for request [null] status [429]") - ); + assertTrue(retryException.shouldRetry()); + assertThat(retryException.getCause().getMessage(), containsString("Received a rate limit status code. Token limit")); assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.TOO_MANY_REQUESTS)); // 401 when(statusLine.getStatusCode()).thenReturn(401); @@ -81,4 +84,76 @@ public void testCheckForFailureStatusCode() { ); assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.PAYMENT_REQUIRED)); } + + public void testBuildRateLimitErrorMessage() { + int statusCode = 429; + var statusLine = mock(StatusLine.class); + when(statusLine.getStatusCode()).thenReturn(statusCode); + var requestLine = mock(RequestLine.class); + var response = mock(HttpResponse.class); + when(response.getStatusLine()).thenReturn(statusLine); + var request = mock(HttpRequestBase.class); + var httpResult = new HttpResult(response, new byte[] {}); + + { + when(response.getFirstHeader(OpenAiResponseHandler.REQUESTS_LIMIT)).thenReturn( + new BasicHeader(OpenAiResponseHandler.REQUESTS_LIMIT, "3000") + ); + when(response.getFirstHeader(OpenAiResponseHandler.REMAINING_REQUESTS)).thenReturn( + new BasicHeader(OpenAiResponseHandler.REMAINING_REQUESTS, "2999") + ); + when(response.getFirstHeader(OpenAiResponseHandler.TOKENS_LIMIT)).thenReturn( + new BasicHeader(OpenAiResponseHandler.TOKENS_LIMIT, "10000") + ); + when(response.getFirstHeader(OpenAiResponseHandler.REMAINING_TOKENS)).thenReturn( + new BasicHeader(OpenAiResponseHandler.REMAINING_TOKENS, "99800") + ); + + var error = OpenAiResponseHandler.buildRateLimitErrorMessage(request, httpResult); + assertThat( + error, + containsString("Token limit [10000], remaining tokens [99800]. Request limit [3000], remaining requests [2999]") + ); + } + + { + when(response.getFirstHeader(OpenAiResponseHandler.TOKENS_LIMIT)).thenReturn(null); + when(response.getFirstHeader(OpenAiResponseHandler.REMAINING_TOKENS)).thenReturn(null); + var error = OpenAiResponseHandler.buildRateLimitErrorMessage(request, httpResult); + assertThat( + error, + containsString("Token limit [unknown], remaining tokens [unknown]. Request limit [3000], remaining requests [2999]") + ); + } + + { + when(response.getFirstHeader(OpenAiResponseHandler.REQUESTS_LIMIT)).thenReturn(null); + when(response.getFirstHeader(OpenAiResponseHandler.REMAINING_REQUESTS)).thenReturn( + new BasicHeader(OpenAiResponseHandler.REMAINING_REQUESTS, "2999") + ); + when(response.getFirstHeader(OpenAiResponseHandler.TOKENS_LIMIT)).thenReturn(null); + when(response.getFirstHeader(OpenAiResponseHandler.REMAINING_TOKENS)).thenReturn(null); + var error = OpenAiResponseHandler.buildRateLimitErrorMessage(request, httpResult); + assertThat( + error, + containsString("Token limit [unknown], remaining tokens [unknown]. Request limit [unknown], remaining requests [2999]") + ); + } + + { + when(response.getFirstHeader(OpenAiResponseHandler.REQUESTS_LIMIT)).thenReturn(null); + when(response.getFirstHeader(OpenAiResponseHandler.REMAINING_REQUESTS)).thenReturn( + new BasicHeader(OpenAiResponseHandler.REMAINING_REQUESTS, "2999") + ); + when(response.getFirstHeader(OpenAiResponseHandler.TOKENS_LIMIT)).thenReturn( + new BasicHeader(OpenAiResponseHandler.TOKENS_LIMIT, "10000") + ); + when(response.getFirstHeader(OpenAiResponseHandler.REMAINING_TOKENS)).thenReturn(null); + var error = OpenAiResponseHandler.buildRateLimitErrorMessage(request, httpResult); + assertThat( + error, + containsString("Token limit [10000], remaining tokens [unknown]. Request limit [unknown], remaining requests [2999]") + ); + } + } } From bbe9f115cc59bb7b7bfcc8d89cde1ab015427cc2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Tue, 5 Dec 2023 10:18:49 +0100 Subject: [PATCH 145/181] More version removal/mitigation (#102867) --- .../upgrades/FullClusterRestartIT.java | 59 +++++++++++++------ ...rameterizedFullClusterRestartTestCase.java | 14 +++-- .../upgrades/QueryBuilderBWCIT.java | 16 +++-- .../metadata/DataStreamTestHelper.java | 4 +- .../test/rest/RestTestLegacyFeatures.java | 35 ++++++++++- .../xpack/restart/FullClusterRestartIT.java | 6 +- .../xpack/restart/FullClusterRestartIT.java | 48 ++++++++++----- ...MLModelDeploymentFullClusterRestartIT.java | 2 +- ...nfigIndexMappingsFullClusterRestartIT.java | 6 +- .../MlHiddenIndicesFullClusterRestartIT.java | 6 +- .../MlMigrationFullClusterRestartIT.java | 4 +- .../xpack/restart/WatcherMappingUpdateIT.java | 12 ++-- 12 files changed, 150 insertions(+), 62 deletions(-) diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index e5bc4a729f8b1..db2904a53dd11 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -11,6 +11,7 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import org.apache.http.util.EntityUtils; +import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.settings.RestClusterGetSettingsResponse; import org.elasticsearch.client.Request; @@ -25,6 +26,7 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; @@ -268,7 +270,10 @@ public void testNewReplicas() throws Exception { } public void testSearchTimeSeriesMode() throws Exception { - assumeTrue("indexing time series indices changed in 8.2.0", getOldClusterVersion().onOrAfter(Version.V_8_2_0)); + + var originalClusterHasNewTimeSeriesIndexing = parseLegacyVersion(getOldClusterVersion()).map(v -> v.onOrAfter(Version.V_8_2_0)) + .orElse(true); + assumeTrue("indexing time series indices changed in 8.2.0", originalClusterHasNewTimeSeriesIndexing); int numDocs; if (isRunningAgainstOldCluster()) { numDocs = createTimeSeriesModeIndex(1); @@ -310,7 +315,9 @@ public void testSearchTimeSeriesMode() throws Exception { } public void testNewReplicasTimeSeriesMode() throws Exception { - assumeTrue("indexing time series indices changed in 8.2.0", getOldClusterVersion().onOrAfter(Version.V_8_2_0)); + var originalClusterHasNewTimeSeriesIndexing = parseLegacyVersion(getOldClusterVersion()).map(v -> v.onOrAfter(Version.V_8_2_0)) + .orElse(true); + assumeTrue("indexing time series indices changed in 8.2.0", originalClusterHasNewTimeSeriesIndexing); if (isRunningAgainstOldCluster()) { createTimeSeriesModeIndex(0); } else { @@ -995,7 +1002,7 @@ public void testSnapshotRestore() throws IOException { { templateBuilder.startObject("term"); { - templateBuilder.field("version", isRunningAgainstOldCluster() ? getOldClusterVersion() : Version.CURRENT); + templateBuilder.field("version", isRunningAgainstOldCluster() ? getOldClusterVersion() : Build.current().version()); } templateBuilder.endObject(); } @@ -1035,7 +1042,7 @@ public void testSnapshotRestore() throws IOException { checkSnapshot("old_snap", count, getOldClusterVersion(), getOldClusterIndexVersion()); if (false == isRunningAgainstOldCluster()) { - checkSnapshot("new_snap", count, Version.CURRENT, IndexVersion.current()); + checkSnapshot("new_snap", count, Build.current().version(), IndexVersion.current()); } } @@ -1158,7 +1165,12 @@ public void testClosedIndices() throws Exception { closeIndex(index); } - if (getOldClusterVersion().onOrAfter(Version.V_7_2_0)) { + @UpdateForV9 // This check can be removed (always assume true) + var originalClusterSupportsReplicationOfClosedIndices = parseLegacyVersion(getOldClusterVersion()).map( + v -> v.onOrAfter(Version.V_7_2_0) + ).orElse(true); + + if (originalClusterSupportsReplicationOfClosedIndices) { ensureGreenLongWait(index); assertClosedIndex(index, true); } else { @@ -1225,7 +1237,7 @@ private void assertClosedIndex(final String indexName, final boolean checkRoutin } @SuppressWarnings("unchecked") - private void checkSnapshot(String snapshotName, int count, Version tookOnVersion, IndexVersion tookOnIndexVersion) throws IOException { + private void checkSnapshot(String snapshotName, int count, String tookOnVersion, IndexVersion tookOnIndexVersion) throws IOException { // Check the snapshot metadata, especially the version Request listSnapshotRequest = new Request("GET", "/_snapshot/repo/" + snapshotName); Map snapResponse = entityAsMap(client().performRequest(listSnapshotRequest)); @@ -1235,7 +1247,7 @@ private void checkSnapshot(String snapshotName, int count, Version tookOnVersion // the format can change depending on the ES node version running & this test code running assertThat( XContentMapValues.extractValue("snapshots.version", snapResponse), - either(Matchers.equalTo(List.of(tookOnVersion.toString()))).or(equalTo(List.of(tookOnIndexVersion.toString()))) + either(Matchers.equalTo(List.of(tookOnVersion))).or(equalTo(List.of(tookOnIndexVersion.toString()))) ); // Remove the routing setting and template so we can test restoring them. @@ -1295,7 +1307,7 @@ private void checkSnapshot(String snapshotName, int count, Version tookOnVersion Map clusterSettingsResponse = entityAsMap(client().performRequest(clusterSettingsRequest)); @SuppressWarnings("unchecked") final Map persistentSettings = (Map) clusterSettingsResponse.get("persistent"); - assertThat(persistentSettings.get("cluster.routing.allocation.exclude.test_attr"), equalTo(getOldClusterVersion().toString())); + assertThat(persistentSettings.get("cluster.routing.allocation.exclude.test_attr"), equalTo(getOldClusterVersion())); // Check that the template was restored successfully Request getTemplateRequest = new Request("GET", "/_template/test_template"); @@ -1310,14 +1322,14 @@ private void checkSnapshot(String snapshotName, int count, Version tookOnVersion expectedTemplate.put("order", 0); Map aliases = new HashMap<>(); aliases.put("alias1", emptyMap()); - aliases.put("alias2", singletonMap("filter", singletonMap("term", singletonMap("version", tookOnVersion.toString())))); + aliases.put("alias2", singletonMap("filter", singletonMap("term", singletonMap("version", tookOnVersion)))); expectedTemplate.put("aliases", aliases); expectedTemplate = singletonMap("test_template", expectedTemplate); if (false == expectedTemplate.equals(getTemplateResponse)) { NotEqualMessageBuilder builder = new NotEqualMessageBuilder(); builder.compareMaps(getTemplateResponse, expectedTemplate); logger.info("expected: {}\nactual:{}", expectedTemplate, getTemplateResponse); - fail("template doesn't match:\n" + builder.toString()); + fail("template doesn't match:\n" + builder); } } @@ -1561,7 +1573,12 @@ public void testResize() throws Exception { @SuppressWarnings("unchecked") public void testSystemIndexMetadataIsUpgraded() throws Exception { - assumeTrue(".tasks became a system index in 7.10.0", getOldClusterVersion().onOrAfter(Version.V_7_10_0)); + + @UpdateForV9 // assumeTrue can be removed (condition always true) + var originalClusterTaskIndexIsSystemIndex = parseLegacyVersion(getOldClusterVersion()).map(v -> v.onOrAfter(Version.V_7_10_0)) + .orElse(true); + + assumeTrue(".tasks became a system index in 7.10.0", originalClusterTaskIndexIsSystemIndex); final String systemIndexWarning = "this request accesses system indices: [.tasks], but in a future major version, direct " + "access to system indices will be prevented by default"; if (isRunningAgainstOldCluster()) { @@ -1669,8 +1686,15 @@ public void testSystemIndexMetadataIsUpgraded() throws Exception { } } + /** + * This test ensures that soft deletes are enabled a when upgrading a pre-8 cluster to 8.0+ + */ + @UpdateForV9 // This test can be removed in v9 public void testEnableSoftDeletesOnRestore() throws Exception { - assumeTrue("soft deletes must be enabled on 8.0+", getOldClusterVersion().before(Version.V_8_0_0)); + var originalClusterDidNotEnforceSoftDeletes = parseLegacyVersion(getOldClusterVersion()).map(v -> v.before(Version.V_8_0_0)) + .orElse(false); + + assumeTrue("soft deletes must be enabled on 8.0+", originalClusterDidNotEnforceSoftDeletes); final String snapshot = "snapshot-" + index; if (isRunningAgainstOldCluster()) { final Settings.Builder settings = indexSettings(1, 1); @@ -1783,16 +1807,15 @@ public void testForbidDisableSoftDeletesOnRestore() throws Exception { } /** - * In 7.14 the cluster.remote.*.transport.compress setting was change from a boolean to an enum setting + * In 7.14 the cluster.remote.*.transport.compress setting was changed from a boolean to an enum setting * with true/false as options. This test ensures that the old boolean setting in cluster state is * translated properly. This test can be removed in 9.0. */ + @UpdateForV9 public void testTransportCompressionSetting() throws IOException { - assumeTrue("the old transport.compress setting existed before 7.14", getOldClusterVersion().before(Version.V_7_14_0)); - assumeTrue( - "Early versions of 6.x do not have cluster.remote* prefixed settings", - getOldClusterVersion().onOrAfter(Version.fromString("6.8.0")) - ); + var originalClusterCompressSettingIsBoolean = parseLegacyVersion(getOldClusterVersion()).map(v -> v.before(Version.V_7_14_0)) + .orElse(false); + assumeTrue("the old transport.compress setting existed before 7.14", originalClusterCompressSettingIsBoolean); if (isRunningAgainstOldCluster()) { final Request putSettingsRequest = new Request("PUT", "/_cluster/settings"); try (XContentBuilder builder = jsonBuilder()) { diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedFullClusterRestartTestCase.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedFullClusterRestartTestCase.java index eef8f62eedd98..05a2892717697 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedFullClusterRestartTestCase.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedFullClusterRestartTestCase.java @@ -15,6 +15,7 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.util.Version; import org.elasticsearch.test.rest.ESRestTestCase; @@ -34,7 +35,7 @@ @TestCaseOrdering(FullClusterRestartTestOrdering.class) public abstract class ParameterizedFullClusterRestartTestCase extends ESRestTestCase { private static final Version MINIMUM_WIRE_COMPATIBLE_VERSION = Version.fromString("7.17.0"); - private static final Version OLD_CLUSTER_VERSION = Version.fromString(System.getProperty("tests.old_cluster_version")); + private static final String OLD_CLUSTER_VERSION = System.getProperty("tests.old_cluster_version"); private static IndexVersion oldIndexVersion; private static boolean upgradeFailed = false; private static boolean upgraded = false; @@ -66,7 +67,8 @@ public void extractOldIndexVersion() throws Exception { version = IndexVersion.fromId(ix.intValue()); } else { // it doesn't have index version (pre 8.11) - just infer it from the release version - version = IndexVersion.fromId(getOldClusterVersion().id); + version = parseLegacyVersion(OLD_CLUSTER_VERSION).map(x -> IndexVersion.fromId(x.id())) + .orElse(IndexVersions.MINIMUM_COMPATIBLE); } if (indexVersion == null) { @@ -86,7 +88,7 @@ public void extractOldIndexVersion() throws Exception { public void maybeUpgrade() throws Exception { if (upgraded == false && requestedUpgradeStatus == UPGRADED) { try { - if (OLD_CLUSTER_VERSION.before(MINIMUM_WIRE_COMPATIBLE_VERSION)) { + if (getOldClusterTestVersion().before(MINIMUM_WIRE_COMPATIBLE_VERSION)) { // First upgrade to latest wire compatible version getUpgradeCluster().upgradeToVersion(MINIMUM_WIRE_COMPATIBLE_VERSION); } @@ -115,8 +117,8 @@ public boolean isRunningAgainstOldCluster() { return requestedUpgradeStatus == OLD; } - public static org.elasticsearch.Version getOldClusterVersion() { - return org.elasticsearch.Version.fromString(OLD_CLUSTER_VERSION.toString()); + public static String getOldClusterVersion() { + return OLD_CLUSTER_VERSION; } public static IndexVersion getOldClusterIndexVersion() { @@ -125,7 +127,7 @@ public static IndexVersion getOldClusterIndexVersion() { } public static Version getOldClusterTestVersion() { - return Version.fromString(OLD_CLUSTER_VERSION.toString()); + return Version.fromString(OLD_CLUSTER_VERSION); } protected abstract ElasticsearchCluster getUpgradeCluster(); diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java index e2f70db0f69ba..3b58cf932fa61 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java @@ -11,6 +11,7 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; @@ -21,6 +22,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.ConstantScoreQueryBuilder; import org.elasticsearch.index.query.DisMaxQueryBuilder; @@ -248,11 +250,17 @@ public void testQueryBuilderBWC() throws Exception { InputStream in = new ByteArrayInputStream(qbSource, 0, qbSource.length); StreamInput input = new NamedWriteableAwareStreamInput(new InputStreamStreamInput(in), registry) ) { - Version clusterVersion = getOldClusterVersion(); - TransportVersion transportVersion; - if (clusterVersion.before(Version.V_8_8_0)) { - transportVersion = TransportVersion.fromId(clusterVersion.id); + @UpdateForV9 // always true + var originalClusterHasTransportVersion = parseLegacyVersion(getOldClusterVersion()).map( + v -> v.onOrAfter(Version.V_8_8_0) + ).orElse(true); + + final TransportVersion transportVersion; + if (originalClusterHasTransportVersion == false) { + transportVersion = TransportVersion.fromId( + parseLegacyVersion(getOldClusterVersion()).map(Version::id).orElse(TransportVersions.MINIMUM_COMPATIBLE.id()) + ); } else { transportVersion = TransportVersion.readVersion(input); } diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java index 9017e88f430b5..e3e11907534e2 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java @@ -120,9 +120,9 @@ public static String getLegacyDefaultBackingIndexName( String dataStreamName, long generation, long epochMillis, - Version minNodeVersion + boolean isNewIndexNameFormat ) { - if (minNodeVersion.onOrAfter(DATE_IN_BACKING_INDEX_VERSION)) { + if (isNewIndexNameFormat) { return String.format( Locale.ROOT, BACKING_INDEX_PREFIX + "%s-%s-%06d", diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java index bd19757bac438..1530809a064b1 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java @@ -9,6 +9,7 @@ package org.elasticsearch.test.rest; import org.elasticsearch.Version; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; @@ -23,10 +24,16 @@ */ public class RestTestLegacyFeatures implements FeatureSpecification { public static final NodeFeature ML_STATE_RESET_FALLBACK_ON_DISABLED = new NodeFeature("ml.state_reset_fallback_on_disabled"); + @UpdateForV9 public static final NodeFeature FEATURE_STATE_RESET_SUPPORTED = new NodeFeature("system_indices.feature_state_reset_supported"); public static final NodeFeature SYSTEM_INDICES_REST_ACCESS_ENFORCED = new NodeFeature("system_indices.rest_access_enforced"); + @UpdateForV9 + public static final NodeFeature SYSTEM_INDICES_REST_ACCESS_DEPRECATED = new NodeFeature("system_indices.rest_access_deprecated"); + @UpdateForV9 public static final NodeFeature HIDDEN_INDICES_SUPPORTED = new NodeFeature("indices.hidden_supported"); + @UpdateForV9 public static final NodeFeature COMPONENT_TEMPLATE_SUPPORTED = new NodeFeature("indices.component_template_supported"); + @UpdateForV9 public static final NodeFeature DELETE_TEMPLATE_MULTIPLE_NAMES_SUPPORTED = new NodeFeature( "indices.delete_template_multiple_names_supported" ); @@ -34,18 +41,44 @@ public class RestTestLegacyFeatures implements FeatureSpecification { // QA - rolling upgrade tests public static final NodeFeature SECURITY_UPDATE_API_KEY = new NodeFeature("security.api_key_update"); public static final NodeFeature SECURITY_BULK_UPDATE_API_KEY = new NodeFeature("security.api_key_bulk_update"); + @UpdateForV9 + + public static final NodeFeature WATCHES_VERSION_IN_META = new NodeFeature("watcher.version_in_meta"); + @UpdateForV9 + public static final NodeFeature SECURITY_ROLE_DESCRIPTORS_OPTIONAL = new NodeFeature("security.role_descriptors_optional"); + @UpdateForV9 + public static final NodeFeature SEARCH_AGGREGATIONS_FORCE_INTERVAL_SELECTION_DATE_HISTOGRAM = new NodeFeature( + "search.aggregations.force_interval_selection_on_date_histogram" + ); + @UpdateForV9 + public static final NodeFeature TRANSFORM_NEW_API_ENDPOINT = new NodeFeature("transform.new_api_endpoint"); + // Ref: https://github.com/elastic/elasticsearch/pull/65205 + @UpdateForV9 + public static final NodeFeature DATA_STREAMS_DATE_IN_INDEX_NAME = new NodeFeature("data-streams.date_in_index_name"); + @UpdateForV9 + public static final NodeFeature ML_INDICES_HIDDEN = new NodeFeature("ml.indices_hidden"); + @UpdateForV9 + public static final NodeFeature ML_ANALYTICS_MAPPINGS = new NodeFeature("ml.analytics_mappings"); @Override public Map getHistoricalFeatures() { return Map.ofEntries( entry(FEATURE_STATE_RESET_SUPPORTED, Version.V_7_13_0), entry(SYSTEM_INDICES_REST_ACCESS_ENFORCED, Version.V_8_0_0), + entry(SYSTEM_INDICES_REST_ACCESS_DEPRECATED, Version.V_7_10_0), entry(HIDDEN_INDICES_SUPPORTED, Version.V_7_7_0), entry(COMPONENT_TEMPLATE_SUPPORTED, Version.V_7_8_0), entry(DELETE_TEMPLATE_MULTIPLE_NAMES_SUPPORTED, Version.V_7_13_0), entry(ML_STATE_RESET_FALLBACK_ON_DISABLED, Version.V_8_7_0), entry(SECURITY_UPDATE_API_KEY, Version.V_8_4_0), - entry(SECURITY_BULK_UPDATE_API_KEY, Version.V_8_5_0) + entry(SECURITY_BULK_UPDATE_API_KEY, Version.V_8_5_0), + entry(WATCHES_VERSION_IN_META, Version.V_7_13_0), + entry(SECURITY_ROLE_DESCRIPTORS_OPTIONAL, Version.V_7_3_0), + entry(SEARCH_AGGREGATIONS_FORCE_INTERVAL_SELECTION_DATE_HISTOGRAM, Version.V_7_2_0), + entry(TRANSFORM_NEW_API_ENDPOINT, Version.V_7_5_0), + entry(DATA_STREAMS_DATE_IN_INDEX_NAME, Version.V_7_11_0), + entry(ML_INDICES_HIDDEN, Version.V_7_7_0), + entry(ML_ANALYTICS_MAPPINGS, Version.V_7_3_0) ); } } diff --git a/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 07ed594770649..7a90907b9cf39 100644 --- a/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.distribution.DistributionType; @@ -88,7 +89,10 @@ protected Settings restClientSettings() { @BeforeClass public static void checkClusterVersion() { - assumeTrue("no shutdown in versions before " + Version.V_7_15_0, getOldClusterVersion().onOrAfter(Version.V_7_15_0)); + @UpdateForV9 // always true + var originalClusterSupportsShutdown = parseLegacyVersion(getOldClusterVersion()).map(v -> v.onOrAfter(Version.V_7_15_0)) + .orElse(true); + assumeTrue("no shutdown in versions before 7.15", originalClusterSupportsShutdown); } @SuppressWarnings("unchecked") diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index ee75052ae1da7..2ad66f071d784 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -23,10 +23,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.test.StreamsUtils; import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.xcontent.ObjectPath; import org.elasticsearch.xcontent.XContentBuilder; @@ -289,7 +291,11 @@ public void testWatcherWithApiKey() throws Exception { } public void testServiceAccountApiKey() throws IOException { - assumeTrue("no service accounts in versions before " + Version.V_7_13_0, getOldClusterVersion().onOrAfter(Version.V_7_13_0)); + @UpdateForV9 + var originalClusterSupportsServiceAccounts = parseLegacyVersion(getOldClusterVersion()).map(v -> v.onOrAfter(Version.V_7_13_0)) + .orElse(true); + assumeTrue("no service accounts in versions before 7.13", originalClusterSupportsServiceAccounts); + if (isRunningAgainstOldCluster()) { final Request createServiceTokenRequest = new Request("POST", "/_security/service/elastic/fleet-server/credential/token"); final Response createServiceTokenResponse = client().performRequest(createServiceTokenRequest); @@ -352,7 +358,7 @@ public void testApiKeySuperuser() throws IOException { ) ) ); - if (getOldClusterVersion().onOrAfter(Version.V_7_3_0)) { + if (clusterHasFeature(RestTestLegacyFeatures.SECURITY_ROLE_DESCRIPTORS_OPTIONAL)) { createApiKeyRequest.setJsonEntity(""" { "name": "super_legacy_key" @@ -385,13 +391,13 @@ public void testApiKeySuperuser() throws IOException { saveApiKeyRequest.setJsonEntity("{\"auth_header\":\"" + apiKeyAuthHeader + "\"}"); assertOK(client().performRequest(saveApiKeyRequest)); - if (getOldClusterVersion().before(Version.V_8_0_0)) { + if (clusterHasFeature(RestTestLegacyFeatures.SYSTEM_INDICES_REST_ACCESS_ENFORCED) == false) { final Request indexRequest = new Request("POST", ".security/_doc"); indexRequest.setJsonEntity(""" { "doc_type": "foo" }"""); - if (getOldClusterVersion().onOrAfter(Version.V_7_10_0)) { + if (clusterHasFeature(RestTestLegacyFeatures.SYSTEM_INDICES_REST_ACCESS_DEPRECATED)) { indexRequest.setOptions(systemIndexWarningHandlerOptions(".security-7").addHeader("Authorization", apiKeyAuthHeader)); } else { indexRequest.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", apiKeyAuthHeader)); @@ -447,7 +453,7 @@ public void testRollupAfterRestart() throws Exception { final Request createRollupJobRequest = new Request("PUT", "/_rollup/job/rollup-job-test"); String intervalType; - if (getOldClusterVersion().onOrAfter(Version.V_7_2_0)) { + if (clusterHasFeature(RestTestLegacyFeatures.SEARCH_AGGREGATIONS_FORCE_INTERVAL_SELECTION_DATE_HISTOGRAM)) { intervalType = "fixed_interval"; } else { intervalType = "interval"; @@ -497,7 +503,11 @@ public void testRollupAfterRestart() throws Exception { } public void testTransformLegacyTemplateCleanup() throws Exception { - assumeTrue("Before 7.2 transforms didn't exist", getOldClusterVersion().onOrAfter(Version.V_7_2_0)); + @UpdateForV9 + var originalClusterSupportsTransform = parseLegacyVersion(getOldClusterVersion()).map(v -> v.onOrAfter(Version.V_7_2_0)) + .orElse(true); + assumeTrue("Before 7.2 transforms didn't exist", originalClusterSupportsTransform); + if (isRunningAgainstOldCluster()) { // create the source index @@ -520,7 +530,7 @@ public void testTransformLegacyTemplateCleanup() throws Exception { assertThat(createIndexResponse.get("acknowledged"), equalTo(Boolean.TRUE)); // create a transform - String endpoint = getOldClusterVersion().onOrAfter(Version.V_7_5_0) + String endpoint = clusterHasFeature(RestTestLegacyFeatures.TRANSFORM_NEW_API_ENDPOINT) ? "_transform/transform-full-cluster-restart-test" : "_data_frame/transforms/transform-full-cluster-restart-test"; final Request createTransformRequest = new Request("PUT", endpoint); @@ -584,7 +594,7 @@ public void testSlmPolicyAndStats() throws IOException { Collections.singletonMap("indices", Collections.singletonList("*")), null ); - if (isRunningAgainstOldCluster() && getOldClusterVersion().onOrAfter(Version.V_7_4_0)) { + if (isRunningAgainstOldCluster() && has(ProductFeature.SLM)) { Request createRepoRequest = new Request("PUT", "_snapshot/test-repo"); String repoCreateJson = "{" + " \"type\": \"fs\"," + " \"settings\": {" + " \"location\": \"test-repo\"" + " }" + "}"; createRepoRequest.setJsonEntity(repoCreateJson); @@ -598,7 +608,7 @@ public void testSlmPolicyAndStats() throws IOException { client().performRequest(createSlmPolicyRequest); } - if (isRunningAgainstOldCluster() == false && getOldClusterVersion().onOrAfter(Version.V_7_4_0)) { + if (isRunningAgainstOldCluster() == false && has(ProductFeature.SLM)) { Request getSlmPolicyRequest = new Request("GET", "_slm/policy/test-policy"); Response response = client().performRequest(getSlmPolicyRequest); Map responseMap = entityAsMap(response); @@ -749,11 +759,7 @@ private void waitForHits(String indexName, int expectedHits) throws Exception { Map hits = (Map) response.get("hits"); logger.info("Hits are: {}", hits); Integer total; - if (getOldClusterVersion().onOrAfter(Version.V_7_0_0) || isRunningAgainstOldCluster() == false) { - total = (Integer) ((Map) hits.get("total")).get("value"); - } else { - total = (Integer) hits.get("total"); - } + total = (Integer) ((Map) hits.get("total")).get("value"); assertThat(total, greaterThanOrEqualTo(expectedHits)); } catch (IOException ioe) { if (ioe instanceof ResponseException) { @@ -929,7 +935,12 @@ private void waitForRollUpJob(final String rollupJob, final Matcher expectedS @SuppressWarnings("unchecked") public void testDataStreams() throws Exception { - assumeTrue("no data streams in versions before " + Version.V_7_9_0, getOldClusterVersion().onOrAfter(Version.V_7_9_0)); + + @UpdateForV9 + var originalClusterSupportsDataStreams = parseLegacyVersion(getOldClusterVersion()).map(v -> v.onOrAfter(Version.V_7_9_0)) + .orElse(true); + + assumeTrue("no data streams in versions before 7.9.0", originalClusterSupportsDataStreams); if (isRunningAgainstOldCluster()) { createComposableTemplate(client(), "dst", "ds"); @@ -966,7 +977,12 @@ public void testDataStreams() throws Exception { assertEquals("ds", ds.get("name")); assertEquals(1, indices.size()); assertEquals( - DataStreamTestHelper.getLegacyDefaultBackingIndexName("ds", 1, timestamp, getOldClusterVersion()), + DataStreamTestHelper.getLegacyDefaultBackingIndexName( + "ds", + 1, + timestamp, + clusterHasFeature(RestTestLegacyFeatures.DATA_STREAMS_DATE_IN_INDEX_NAME) + ), indices.get(0).get("index_name") ); assertNumHits("ds", 1, 1); diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java index f67d1e4c37b28..b2594eaf02ea4 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java @@ -92,7 +92,7 @@ protected Settings restClientSettings() { public void testDeploymentSurvivesRestart() throws Exception { @UpdateForV9 // upgrade will always be from v8, condition can be removed - var originalClusterAtLeastV8 = getOldClusterVersion().onOrAfter(Version.V_8_0_0); + var originalClusterAtLeastV8 = parseLegacyVersion(getOldClusterVersion()).map(v -> v.onOrAfter(Version.V_8_0_0)).orElse(true); // These tests assume the original cluster is v8 - testing for features on the _current_ cluster will break for NEW assumeTrue("NLP model deployments added in 8.0", originalClusterAtLeastV8); diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java index e4ab3957f2627..3674f811ebb0a 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java @@ -8,13 +8,13 @@ import com.carrotsearch.randomizedtesting.annotations.Name; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Strings; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.xpack.test.rest.IndexMappingTemplateAsserter; import org.elasticsearch.xpack.test.rest.XPackRestTestConstants; @@ -53,7 +53,7 @@ public void waitForMlTemplates() throws Exception { XPackRestTestHelper.waitForTemplates( client(), XPackRestTestConstants.ML_POST_V7120_TEMPLATES, - getOldClusterVersion().onOrAfter(Version.V_7_8_0) + clusterHasFeature(RestTestLegacyFeatures.COMPONENT_TEMPLATE_SUPPORTED) ); } } @@ -62,7 +62,7 @@ public void testMlConfigIndexMappingsAfterMigration() throws Exception { if (isRunningAgainstOldCluster()) { // trigger .ml-config index creation createAnomalyDetectorJob(OLD_CLUSTER_JOB_ID); - if (getOldClusterVersion().onOrAfter(Version.V_7_3_0)) { + if (clusterHasFeature(RestTestLegacyFeatures.ML_ANALYTICS_MAPPINGS)) { // .ml-config has mappings for analytics as the feature was introduced in 7.3.0 assertThat(getDataFrameAnalysisMappings().keySet(), hasItem("outlier_detection")); } else { diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlHiddenIndicesFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlHiddenIndicesFullClusterRestartIT.java index aeb3dad547946..16345a19fc950 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlHiddenIndicesFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlHiddenIndicesFullClusterRestartIT.java @@ -8,7 +8,6 @@ import com.carrotsearch.randomizedtesting.annotations.Name; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -18,6 +17,7 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Strings; import org.elasticsearch.core.Tuple; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -68,7 +68,7 @@ public void waitForMlTemplates() throws Exception { XPackRestTestHelper.waitForTemplates( client(), XPackRestTestConstants.ML_POST_V7120_TEMPLATES, - getOldClusterVersion().onOrAfter(Version.V_7_8_0) + clusterHasFeature(RestTestLegacyFeatures.COMPONENT_TEMPLATE_SUPPORTED) ); } } @@ -79,7 +79,7 @@ public void testMlIndicesBecomeHidden() throws Exception { createAnomalyDetectorJob(JOB_ID); openAnomalyDetectorJob(JOB_ID); - if (getOldClusterVersion().before(Version.V_7_7_0)) { + if (clusterHasFeature(RestTestLegacyFeatures.ML_INDICES_HIDDEN) == false) { Map indexSettingsMap = contentAsMap(getMlIndicesSettings()); Map aliasesMap = contentAsMap(getMlAliases()); diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java index 2bbda9123ae34..0b15e98f201a0 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java @@ -8,7 +8,6 @@ import com.carrotsearch.randomizedtesting.annotations.Name; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.Strings; @@ -19,6 +18,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; @@ -61,7 +61,7 @@ public void waitForMlTemplates() throws Exception { XPackRestTestHelper.waitForTemplates( client(), XPackRestTestConstants.ML_POST_V7120_TEMPLATES, - getOldClusterVersion().onOrAfter(Version.V_7_8_0) + clusterHasFeature(RestTestLegacyFeatures.COMPONENT_TEMPLATE_SUPPORTED) ); } } diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/WatcherMappingUpdateIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/WatcherMappingUpdateIT.java index 1f0e245691b57..fb7c22845b788 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/WatcherMappingUpdateIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/WatcherMappingUpdateIT.java @@ -11,12 +11,13 @@ import org.apache.http.util.EntityUtils; import org.apache.lucene.tests.util.LuceneTestCase; -import org.elasticsearch.Version; +import org.elasticsearch.Build; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import java.nio.charset.StandardCharsets; @@ -64,18 +65,18 @@ public void testMappingsAreUpdated() throws Exception { """); client().performRequest(putWatchRequest); - if (getOldClusterVersion().onOrAfter(Version.V_7_13_0)) { + if (clusterHasFeature(RestTestLegacyFeatures.WATCHES_VERSION_IN_META)) { assertMappingVersion(".watches", getOldClusterVersion()); } else { // watches indices from before 7.10 do not have mapping versions in _meta assertNoMappingVersion(".watches"); } } else { - assertMappingVersion(".watches", Version.CURRENT); + assertMappingVersion(".watches", Build.current().version()); } } - private void assertMappingVersion(String index, Version clusterVersion) throws Exception { + private void assertMappingVersion(String index, String clusterVersion) throws Exception { assertBusy(() -> { Request mappingRequest = new Request("GET", index + "/_mappings"); mappingRequest.setOptions(getWarningHandlerOptions(index)); @@ -88,7 +89,8 @@ private void assertMappingVersion(String index, Version clusterVersion) throws E private void assertNoMappingVersion(String index) throws Exception { assertBusy(() -> { Request mappingRequest = new Request("GET", index + "/_mappings"); - if (isRunningAgainstOldCluster() == false || getOldClusterVersion().onOrAfter(Version.V_7_10_0)) { + assert isRunningAgainstOldCluster(); + if (clusterHasFeature(RestTestLegacyFeatures.SYSTEM_INDICES_REST_ACCESS_DEPRECATED)) { mappingRequest.setOptions(getWarningHandlerOptions(index)); } Response response = client().performRequest(mappingRequest); From 8e332b65e9df183c5c0ee631528fd2f11523a398 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Tue, 5 Dec 2023 10:23:30 +0100 Subject: [PATCH 146/181] [Enterprise Search] Add update sync job error endpoint (#102913) Add update sync job error endpoint. --- .../api/connector_sync_job.error.json | 39 +++++ .../450_connector_sync_job_error.yml | 40 +++++ .../xpack/application/EnterpriseSearch.java | 5 + .../connector/syncjob/ConnectorSyncJob.java | 2 +- .../syncjob/ConnectorSyncJobIndexService.java | 37 +++++ ...RestUpdateConnectorSyncJobErrorAction.java | 52 +++++++ ...portUpdateConnectorSyncJobErrorAction.java | 53 +++++++ .../UpdateConnectorSyncJobErrorAction.java | 145 ++++++++++++++++++ .../ConnectorSyncJobIndexServiceTests.java | 55 +++++++ .../syncjob/ConnectorSyncJobTestUtils.java | 5 + ...pdateConnectorSyncJobErrorActionTests.java | 74 +++++++++ ...rorActionRequestBWCSerializationTests.java | 53 +++++++ ...pdateConnectorSyncJobErrorActionTests.java | 47 ++++++ .../xpack/security/operator/Constants.java | 7 +- 14 files changed, 610 insertions(+), 4 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.error.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/450_connector_sync_job_error.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobErrorAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobErrorAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobErrorActionTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorActionRequestBWCSerializationTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorActionTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.error.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.error.json new file mode 100644 index 0000000000000..5db061eda6e48 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.error.json @@ -0,0 +1,39 @@ +{ + "connector_sync_job.error": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Sets an error for a connector sync job." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/_sync_job/{connector_sync_job_id}/_error", + "methods": [ + "PUT" + ], + "parts": { + "connector_sync_job_id": { + "type": "string", + "description": "The unique identifier of the connector sync job to set an error for." + } + } + } + ] + }, + "body": { + "description": "The error to set in the connector sync job.", + "required": true + } + } +} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/450_connector_sync_job_error.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/450_connector_sync_job_error.yml new file mode 100644 index 0000000000000..1ba3cf1c50b7c --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/450_connector_sync_job_error.yml @@ -0,0 +1,40 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + - do: + connector.put: + connector_id: test-connector + body: + index_name: search-test + name: my-connector + language: de + is_native: false + service_type: super-connector + +--- +"Set an error for a connector sync job": + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: full + trigger_method: on_demand + - set: { id: id } + - do: + connector_sync_job.error: + connector_sync_job_id: $id + body: + error: error + + - match: { acknowledged: true } + + +--- +"Set an error for a Connector Sync Job - Connector Sync Job does not exist": + - do: + connector_sync_job.error: + connector_sync_job_id: test-nonexistent-connector-sync-job-id + body: + error: error + catch: missing diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index c4dbee214f37a..ce9bbfa4d6a4b 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -85,13 +85,16 @@ import org.elasticsearch.xpack.application.connector.syncjob.action.RestDeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestGetConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestPostConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.RestUpdateConnectorSyncJobErrorAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestUpdateConnectorSyncJobIngestionStatsAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportCancelConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportCheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportDeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportGetConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportPostConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.TransportUpdateConnectorSyncJobErrorAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportUpdateConnectorSyncJobIngestionStatsAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobErrorAction; import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction; import org.elasticsearch.xpack.application.rules.QueryRulesConfig; import org.elasticsearch.xpack.application.rules.QueryRulesIndexService; @@ -227,6 +230,7 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(DeleteConnectorSyncJobAction.INSTANCE, TransportDeleteConnectorSyncJobAction.class), new ActionHandler<>(CheckInConnectorSyncJobAction.INSTANCE, TransportCheckInConnectorSyncJobAction.class), new ActionHandler<>(CancelConnectorSyncJobAction.INSTANCE, TransportCancelConnectorSyncJobAction.class), + new ActionHandler<>(UpdateConnectorSyncJobErrorAction.INSTANCE, TransportUpdateConnectorSyncJobErrorAction.class), new ActionHandler<>( UpdateConnectorSyncJobIngestionStatsAction.INSTANCE, TransportUpdateConnectorSyncJobIngestionStatsAction.class @@ -300,6 +304,7 @@ public List getRestHandlers( new RestDeleteConnectorSyncJobAction(), new RestCancelConnectorSyncJobAction(), new RestCheckInConnectorSyncJobAction(), + new RestUpdateConnectorSyncJobErrorAction(), new RestUpdateConnectorSyncJobIngestionStatsAction() ) ); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java index 0781bb515fe93..211a423dab99e 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java @@ -77,7 +77,7 @@ public class ConnectorSyncJob implements Writeable, ToXContentObject { public static final ParseField DELETED_DOCUMENT_COUNT_FIELD = new ParseField("deleted_document_count"); - static final ParseField ERROR_FIELD = new ParseField("error"); + public static final ParseField ERROR_FIELD = new ParseField("error"); public static final ParseField ID_FIELD = new ParseField("id"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index f105e6ece72aa..9bcd03eb21ca9 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -356,6 +356,43 @@ public void onFailure(Exception e) { } } + /** + * Sets the error for the {@link ConnectorSyncJob} in the underlying index. + * This also sets the {@link ConnectorSyncStatus} to 'ERROR'. + * + * @param connectorSyncJobId The id of the connector sync job object. + * @param error The error to set. + * @param listener The action listener to invoke on response/failure. + */ + public void updateConnectorSyncJobError(String connectorSyncJobId, String error, ActionListener listener) { + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_SYNC_JOB_INDEX_NAME, connectorSyncJobId).setRefreshPolicy( + WriteRequest.RefreshPolicy.IMMEDIATE + ) + .doc( + Map.of( + ConnectorSyncJob.ERROR_FIELD.getPreferredName(), + error, + ConnectorSyncJob.STATUS_FIELD.getPreferredName(), + ConnectorSyncStatus.ERROR + ) + ); + + try { + clientWithOrigin.update( + updateRequest, + new DelegatingIndexNotFoundOrDocumentMissingActionListener<>(connectorSyncJobId, listener, (l, updateResponse) -> { + if (updateResponse.getResult() == DocWriteResponse.Result.NOT_FOUND) { + l.onFailure(new ResourceNotFoundException(connectorSyncJobId)); + return; + } + l.onResponse(updateResponse); + }) + ); + } catch (Exception e) { + listener.onFailure(e); + } + } + /** * Listeners that checks failures for IndexNotFoundException and DocumentMissingException, * and transforms them in ResourceNotFoundException, invoking onFailure on the delegate listener. diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobErrorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobErrorAction.java new file mode 100644 index 0000000000000..e19a9675beebb --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobErrorAction.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants.CONNECTOR_SYNC_JOB_ID_PARAM; + +public class RestUpdateConnectorSyncJobErrorAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_sync_job_update_error_action"; + } + + @Override + public List routes() { + return List.of( + new Route( + RestRequest.Method.PUT, + "/" + EnterpriseSearch.CONNECTOR_SYNC_JOB_API_ENDPOINT + "/{" + CONNECTOR_SYNC_JOB_ID_PARAM + "}/_error" + ) + ); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + UpdateConnectorSyncJobErrorAction.Request request = UpdateConnectorSyncJobErrorAction.Request.fromXContentBytes( + restRequest.param(CONNECTOR_SYNC_JOB_ID_PARAM), + restRequest.content(), + restRequest.getXContentType() + ); + + return restChannel -> client.execute( + UpdateConnectorSyncJobErrorAction.INSTANCE, + request, + new RestToXContentListener<>(restChannel) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobErrorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobErrorAction.java new file mode 100644 index 0000000000000..c814092f2e7a2 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobErrorAction.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobIndexService; + +public class TransportUpdateConnectorSyncJobErrorAction extends HandledTransportAction< + UpdateConnectorSyncJobErrorAction.Request, + AcknowledgedResponse> { + + protected final ConnectorSyncJobIndexService connectorSyncJobIndexService; + + @Inject + public TransportUpdateConnectorSyncJobErrorAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + UpdateConnectorSyncJobErrorAction.NAME, + transportService, + actionFilters, + UpdateConnectorSyncJobErrorAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorSyncJobIndexService = new ConnectorSyncJobIndexService(client); + } + + @Override + protected void doExecute(Task task, UpdateConnectorSyncJobErrorAction.Request request, ActionListener listener) { + connectorSyncJobIndexService.updateConnectorSyncJobError( + request.getConnectorSyncJobId(), + request.getError(), + listener.map(r -> AcknowledgedResponse.TRUE) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorAction.java new file mode 100644 index 0000000000000..820630bccee03 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorAction.java @@ -0,0 +1,145 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJob; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public class UpdateConnectorSyncJobErrorAction extends ActionType { + + public static final UpdateConnectorSyncJobErrorAction INSTANCE = new UpdateConnectorSyncJobErrorAction(); + public static final String NAME = "cluster:admin/xpack/connector/sync_job/update_error"; + public static final String ERROR_EMPTY_MESSAGE = "[error] of the connector sync job cannot be null or empty"; + + private UpdateConnectorSyncJobErrorAction() { + super(NAME, AcknowledgedResponse::readFrom); + } + + public static class Request extends ActionRequest implements ToXContentObject { + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_sync_job_error_request", + false, + ((args, connectorSyncJobId) -> new Request(connectorSyncJobId, (String) args[0])) + ); + + static { + PARSER.declareString(constructorArg(), ConnectorSyncJob.ERROR_FIELD); + } + + private final String connectorSyncJobId; + + private final String error; + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorSyncJobId = in.readString(); + this.error = in.readString(); + } + + public Request(String connectorSyncJobId, String error) { + this.connectorSyncJobId = connectorSyncJobId; + this.error = error; + } + + public static Request fromXContentBytes(String connectorSyncJobId, BytesReference source, XContentType xContentType) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return UpdateConnectorSyncJobErrorAction.Request.fromXContent(parser, connectorSyncJobId); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse: " + source.utf8ToString()); + } + } + + public static UpdateConnectorSyncJobErrorAction.Request fromXContent(XContentParser parser, String connectorSyncJobId) + throws IOException { + return PARSER.parse(parser, connectorSyncJobId); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorSyncJobId)) { + validationException = addValidationError( + ConnectorSyncJobConstants.EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE, + validationException + ); + } + + if (Strings.isNullOrEmpty(error)) { + validationException = addValidationError(ERROR_EMPTY_MESSAGE, validationException); + } + + return validationException; + } + + public String getConnectorSyncJobId() { + return connectorSyncJobId; + } + + public String getError() { + return error; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorSyncJobId); + out.writeString(error); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorSyncJobId, request.connectorSyncJobId) && Objects.equals(error, request.error); + } + + @Override + public int hashCode() { + return Objects.hash(connectorSyncJobId, error); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(ConnectorSyncJob.ERROR_FIELD.getPreferredName(), error); + } + builder.endObject(); + return builder; + } + + public static UpdateConnectorSyncJobErrorAction.Request parse(XContentParser parser) { + return PARSER.apply(parser, null); + } + } + +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java index 2dcf43c6f3f22..ab16fb8a46eb0 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobErrorAction; import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction; import org.junit.Before; @@ -253,6 +254,35 @@ public void testCancelConnectorSyncJob_WithMissingSyncJobId_ExpectException() { expectThrows(ResourceNotFoundException.class, () -> awaitCancelConnectorSyncJob(NON_EXISTING_SYNC_JOB_ID)); } + public void testUpdateConnectorSyncJobError() throws Exception { + PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( + connector.getConnectorId() + ); + PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); + String syncJobId = response.getId(); + + UpdateConnectorSyncJobErrorAction.Request request = ConnectorSyncJobTestUtils.getRandomUpdateConnectorSyncJobErrorActionRequest(); + String errorInRequest = request.getError(); + + UpdateResponse updateResponse = awaitUpdateConnectorSyncJob(syncJobId, errorInRequest); + Map connectorSyncJobSource = getConnectorSyncJobSourceById(syncJobId); + String error = (String) connectorSyncJobSource.get(ConnectorSyncJob.ERROR_FIELD.getPreferredName()); + ConnectorSyncStatus syncStatus = ConnectorSyncStatus.fromString( + (String) connectorSyncJobSource.get(ConnectorSyncJob.STATUS_FIELD.getPreferredName()) + ); + + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + assertThat(error, equalTo(errorInRequest)); + assertThat(syncStatus, equalTo(ConnectorSyncStatus.ERROR)); + } + + public void testUpdateConnectorSyncJobError_WithMissingSyncJobId_ExceptException() { + expectThrows( + ResourceNotFoundException.class, + () -> awaitUpdateConnectorSyncJob(NON_EXISTING_SYNC_JOB_ID, randomAlphaOfLengthBetween(5, 100)) + ); + } + public void testUpdateConnectorSyncJobIngestionStats() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( connector.getConnectorId() @@ -435,6 +465,31 @@ private static void assertFieldsDidNotUpdateExceptFieldList( } } + private UpdateResponse awaitUpdateConnectorSyncJob(String syncJobId, String error) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorSyncJobIndexService.updateConnectorSyncJobError(syncJobId, error, new ActionListener<>() { + @Override + public void onResponse(UpdateResponse updateResponse) { + resp.set(updateResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for update request", latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from update request", resp.get()); + return resp.get(); + } + private UpdateResponse awaitCancelConnectorSyncJob(String syncJobId) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java index 8170391094356..fb412db168605 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.application.connector.syncjob.action.DeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.GetConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobErrorAction; import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction; import java.time.Instant; @@ -104,6 +105,10 @@ public static CheckInConnectorSyncJobAction.Request getRandomCheckInConnectorSyn return new CheckInConnectorSyncJobAction.Request(randomAlphaOfLength(10)); } + public static UpdateConnectorSyncJobErrorAction.Request getRandomUpdateConnectorSyncJobErrorActionRequest() { + return new UpdateConnectorSyncJobErrorAction.Request(randomAlphaOfLength(10), randomAlphaOfLengthBetween(5, 100)); + } + public static UpdateConnectorSyncJobIngestionStatsAction.Request getRandomUpdateConnectorSyncJobIngestionStatsActionRequest() { Instant lowerBoundInstant = Instant.ofEpochSecond(0L); Instant upperBoundInstant = Instant.ofEpochSecond(3000000000L); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobErrorActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobErrorActionTests.java new file mode 100644 index 0000000000000..fd974d5fd21f7 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobErrorActionTests.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.junit.Before; + +import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.mockito.Mockito.mock; + +public class TransportUpdateConnectorSyncJobErrorActionTests extends ESSingleNodeTestCase { + + private static final Long TIMEOUT_SECONDS = 10L; + private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); + private TransportUpdateConnectorSyncJobErrorAction action; + + @Before + public void setup() { + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + x -> null, + null, + Collections.emptySet() + ); + + action = new TransportUpdateConnectorSyncJobErrorAction(transportService, clusterService, mock(ActionFilters.class), client()); + } + + @Override + public void tearDown() throws Exception { + super.tearDown(); + ThreadPool.terminate(threadPool, TIMEOUT_SECONDS, TimeUnit.SECONDS); + } + + public void testUpdateConnectorSyncJobError_ExpectNoWarnings() throws InterruptedException { + UpdateConnectorSyncJobErrorAction.Request request = ConnectorSyncJobTestUtils.getRandomUpdateConnectorSyncJobErrorActionRequest(); + + executeRequest(request); + + ensureNoWarnings(); + } + + private void executeRequest(UpdateConnectorSyncJobErrorAction.Request request) throws InterruptedException { + final CountDownLatch latch = new CountDownLatch(1); + action.doExecute(mock(Task.class), request, ActionListener.wrap(response -> latch.countDown(), exception -> latch.countDown())); + + boolean requestTimedOut = latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + + assertTrue("Timeout waiting for update request", requestTimedOut); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorActionRequestBWCSerializationTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorActionRequestBWCSerializationTests.java new file mode 100644 index 0000000000000..a6c52d8cbf62c --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorActionRequestBWCSerializationTests.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorSyncJobErrorActionRequestBWCSerializationTests extends AbstractBWCSerializationTestCase< + UpdateConnectorSyncJobErrorAction.Request> { + + private String connectorSyncJobId; + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorSyncJobErrorAction.Request::new; + } + + @Override + protected UpdateConnectorSyncJobErrorAction.Request createTestInstance() { + UpdateConnectorSyncJobErrorAction.Request request = ConnectorSyncJobTestUtils.getRandomUpdateConnectorSyncJobErrorActionRequest(); + this.connectorSyncJobId = request.getConnectorSyncJobId(); + return request; + } + + @Override + protected UpdateConnectorSyncJobErrorAction.Request mutateInstance(UpdateConnectorSyncJobErrorAction.Request instance) + throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorSyncJobErrorAction.Request doParseInstance(XContentParser parser) throws IOException { + return UpdateConnectorSyncJobErrorAction.Request.fromXContent(parser, this.connectorSyncJobId); + } + + @Override + protected UpdateConnectorSyncJobErrorAction.Request mutateInstanceForVersion( + UpdateConnectorSyncJobErrorAction.Request instance, + TransportVersion version + ) { + return new UpdateConnectorSyncJobErrorAction.Request(instance.getConnectorSyncJobId(), instance.getError()); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorActionTests.java new file mode 100644 index 0000000000000..0899aa3b599df --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorActionTests.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class UpdateConnectorSyncJobErrorActionTests extends ESTestCase { + + public void testValidate_WhenConnectorSyncJobIdAndErrorArePresent_ExpectNoValidationError() { + UpdateConnectorSyncJobErrorAction.Request request = ConnectorSyncJobTestUtils.getRandomUpdateConnectorSyncJobErrorActionRequest(); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, nullValue()); + } + + public void testValidate_WhenConnectorSyncJobIdIsEmpty_ExceptionValidationError() { + UpdateConnectorSyncJobErrorAction.Request request = new UpdateConnectorSyncJobErrorAction.Request( + "", + randomAlphaOfLengthBetween(10, 100) + ); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString(ConnectorSyncJobConstants.EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE)); + } + + public void testValidate_WhenErrorIsEmpty_ExceptionValidationError() { + UpdateConnectorSyncJobErrorAction.Request request = new UpdateConnectorSyncJobErrorAction.Request(randomAlphaOfLength(10), ""); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString(UpdateConnectorSyncJobErrorAction.ERROR_EMPTY_MESSAGE)); + } + +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 11e293d8675f7..5c4fd44d77c9b 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -134,11 +134,12 @@ public class Constants { "cluster:admin/xpack/connector/update_last_sync_stats", "cluster:admin/xpack/connector/update_pipeline", "cluster:admin/xpack/connector/update_scheduling", - "cluster:admin/xpack/connector/sync_job/post", - "cluster:admin/xpack/connector/sync_job/delete", + "cluster:admin/xpack/connector/sync_job/cancel", "cluster:admin/xpack/connector/sync_job/check_in", + "cluster:admin/xpack/connector/sync_job/delete", "cluster:admin/xpack/connector/sync_job/get", - "cluster:admin/xpack/connector/sync_job/cancel", + "cluster:admin/xpack/connector/sync_job/post", + "cluster:admin/xpack/connector/sync_job/update_error", "cluster:admin/xpack/connector/sync_job/update_stats", "cluster:admin/xpack/deprecation/info", "cluster:admin/xpack/deprecation/nodes/info", From 2690fe3cf35e4432f33a0d8c6f0c9d8e89417dc3 Mon Sep 17 00:00:00 2001 From: Andrei Dan Date: Tue, 5 Dec 2023 11:27:39 +0200 Subject: [PATCH 147/181] Report DSL error entry information to the health node (#102635) Add support to the DataStreamLifecycleService to report information regarding the top 500 (configurable) error entries, sorted by retry count, to the health node. After every DSL loop (every 5 minutes by default) the error entries information is reported to the health node. We omit the error message in the entries as that can be long and we will not display it in the health API as we will recommend checking the explain API to proceed further towards green health. --- .../DataStreamLifecycleServiceIT.java | 83 ++++++- .../src/main/java/module-info.java | 2 + .../datastreams/DataStreamFeatures.java | 28 +++ .../datastreams/DataStreamsPlugin.java | 16 +- .../DataStreamLifecycleErrorStore.java | 30 +++ .../lifecycle/DataStreamLifecycleService.java | 25 +- ...ataStreamLifecycleHealthInfoPublisher.java | 116 ++++++++++ ...lasticsearch.features.FeatureSpecification | 9 + .../DataStreamLifecycleErrorStoreTests.java | 31 +++ .../DataStreamLifecycleServiceTests.java | 27 ++- ...plainDataStreamLifecycleResponseTests.java | 6 +- ...reamLifecycleHealthInfoPublisherTests.java | 214 ++++++++++++++++++ ...sAvailabilityHealthIndicatorServiceIT.java | 6 +- .../org/elasticsearch/TransportVersions.java | 2 + .../datastreams/lifecycle/ErrorEntry.java | 11 +- .../node/DataStreamLifecycleHealthInfo.java | 34 +++ .../health/node/DslErrorInfo.java | 34 +++ .../elasticsearch/health/node/HealthInfo.java | 22 +- .../health/node/HealthInfoCache.java | 19 +- .../node/UpdateHealthInfoCacheAction.java | 64 +++++- .../health/HealthServiceTests.java | 3 +- .../node/DiskHealthIndicatorServiceTests.java | 4 +- .../node/FetchHealthInfoCacheActionTests.java | 15 +- .../health/node/HealthInfoCacheTests.java | 24 +- .../health/node/HealthInfoTests.java | 14 +- 25 files changed, 795 insertions(+), 44 deletions(-) create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthInfoPublisher.java create mode 100644 modules/data-streams/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification create mode 100644 modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthInfoPublisherTests.java create mode 100644 server/src/main/java/org/elasticsearch/health/node/DataStreamLifecycleHealthInfo.java create mode 100644 server/src/main/java/org/elasticsearch/health/node/DslErrorInfo.java diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java index 7ac86c8aee614..d3eaee36f67f7 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java @@ -46,6 +46,9 @@ import org.elasticsearch.datastreams.DataStreamsPlugin; import org.elasticsearch.datastreams.lifecycle.action.ExplainDataStreamLifecycleAction; import org.elasticsearch.datastreams.lifecycle.action.PutDataStreamLifecycleAction; +import org.elasticsearch.health.node.DataStreamLifecycleHealthInfo; +import org.elasticsearch.health.node.DslErrorInfo; +import org.elasticsearch.health.node.FetchHealthInfoCacheAction; import org.elasticsearch.index.Index; import org.elasticsearch.index.MergePolicyConfig; import org.elasticsearch.index.mapper.DateFieldMapper; @@ -63,6 +66,7 @@ import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.backingIndexEqualTo; @@ -77,7 +81,9 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; @@ -95,6 +101,8 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { Settings.Builder settings = Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)); settings.put(DataStreamLifecycleService.DATA_STREAM_LIFECYCLE_POLL_INTERVAL, "1s"); settings.put(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING.getKey(), "min_docs=1,max_docs=1"); + // we'll test DSL errors reach the health node, so we're lowering the threshold over which we report errors + settings.put(DataStreamLifecycleService.DATA_STREAM_SIGNALLING_ERROR_RETRY_INTERVAL_SETTING.getKey(), "3"); return settings.build(); } @@ -394,8 +402,8 @@ public void testErrorRecordingOnRollover() throws Exception { indexDocs(dataStreamName, 1); + String writeIndexName = getBackingIndices(dataStreamName).get(1); assertBusy(() -> { - String writeIndexName = getBackingIndices(dataStreamName).get(1); ErrorEntry writeIndexRolloverError = null; Iterable lifecycleServices = internalCluster().getInstances(DataStreamLifecycleService.class); @@ -408,6 +416,35 @@ public void testErrorRecordingOnRollover() throws Exception { assertThat(writeIndexRolloverError, is(notNullValue())); assertThat(writeIndexRolloverError.error(), containsString("maximum normal shards open")); + + ExplainDataStreamLifecycleAction.Request explainRequest = new ExplainDataStreamLifecycleAction.Request( + new String[] { dataStreamName } + ); + ExplainDataStreamLifecycleAction.Response response = client().execute(ExplainDataStreamLifecycleAction.INSTANCE, explainRequest) + .actionGet(); + boolean found = false; + for (ExplainIndexDataStreamLifecycle index : response.getIndices()) { + if (index.getError() != null && index.getError().retryCount() > 3) { + found = true; + break; + } + } + assertTrue(found); + }, 30, TimeUnit.SECONDS); + + // DSL should signal to the health node that there's an error in the store that's been retried at least 3 times + assertBusy(() -> { + FetchHealthInfoCacheAction.Response healthNodeResponse = client().execute( + FetchHealthInfoCacheAction.INSTANCE, + new FetchHealthInfoCacheAction.Request() + ).get(); + DataStreamLifecycleHealthInfo dslHealthInfoOnHealthNode = healthNodeResponse.getHealthInfo().dslHealthInfo(); + assertThat(dslHealthInfoOnHealthNode, is(not(DataStreamLifecycleHealthInfo.NO_DSL_ERRORS))); + assertThat(dslHealthInfoOnHealthNode.dslErrorsInfo().size(), is(1)); + DslErrorInfo errorInfo = dslHealthInfoOnHealthNode.dslErrorsInfo().get(0); + + assertThat(errorInfo.indexName(), is(writeIndexName)); + assertThat(errorInfo.retryCount(), greaterThanOrEqualTo(3)); }); // let's reset the cluster max shards per node limit to allow rollover to proceed and check the error store is empty @@ -429,6 +466,16 @@ public void testErrorRecordingOnRollover() throws Exception { assertThat(lifecycleService.getErrorStore().getError(previousWriteInddex), nullValue()); } }); + + // the error has been fixed so the health information shouldn't be reported anymore + assertBusy(() -> { + FetchHealthInfoCacheAction.Response healthNodeResponse = client().execute( + FetchHealthInfoCacheAction.INSTANCE, + new FetchHealthInfoCacheAction.Request() + ).get(); + DataStreamLifecycleHealthInfo dslHealthInfoOnHealthNode = healthNodeResponse.getHealthInfo().dslHealthInfo(); + assertThat(dslHealthInfoOnHealthNode, is(DataStreamLifecycleHealthInfo.NO_DSL_ERRORS)); + }); } public void testErrorRecordingOnRetention() throws Exception { @@ -470,7 +517,9 @@ public void testErrorRecordingOnRetention() throws Exception { assertThat(writeIndex, backingIndexEqualTo(dataStreamName, 2)); }); - String firstGenerationIndex = getBackingIndices(dataStreamName).get(0); + List dsBackingIndices = getBackingIndices(dataStreamName); + String firstGenerationIndex = dsBackingIndices.get(0); + String secondGenerationIndex = dsBackingIndices.get(1); // mark the first generation index as read-only so deletion fails when we enable the retention configuration updateIndexSettings(Settings.builder().put(READ_ONLY.settingName(), true), firstGenerationIndex); @@ -493,7 +542,7 @@ public void testErrorRecordingOnRetention() throws Exception { for (DataStreamLifecycleService lifecycleService : lifecycleServices) { recordedRetentionExecutionError = lifecycleService.getErrorStore().getError(firstGenerationIndex); - if (recordedRetentionExecutionError != null) { + if (recordedRetentionExecutionError != null && recordedRetentionExecutionError.retryCount() > 3) { break; } } @@ -502,6 +551,24 @@ public void testErrorRecordingOnRetention() throws Exception { assertThat(recordedRetentionExecutionError.error(), containsString("blocked by: [FORBIDDEN/5/index read-only (api)")); }); + // DSL should signal to the health node that there's an error in the store that's been retried at least 3 times + assertBusy(() -> { + FetchHealthInfoCacheAction.Response healthNodeResponse = client().execute( + FetchHealthInfoCacheAction.INSTANCE, + new FetchHealthInfoCacheAction.Request() + ).get(); + DataStreamLifecycleHealthInfo dslHealthInfoOnHealthNode = healthNodeResponse.getHealthInfo().dslHealthInfo(); + assertThat(dslHealthInfoOnHealthNode, is(not(DataStreamLifecycleHealthInfo.NO_DSL_ERRORS))); + // perhaps surprisingly rollover and delete are error-ing due to the read_only block on the first generation + // index which prevents metadata updates so rolling over the data stream is also blocked (note that both indices error at + // the same time so they'll have an equal retry count - the order becomes of the results, usually ordered by retry count, + // becomes non deterministic, hence the dynamic matching of index name) + assertThat(dslHealthInfoOnHealthNode.dslErrorsInfo().size(), is(2)); + DslErrorInfo errorInfo = dslHealthInfoOnHealthNode.dslErrorsInfo().get(0); + assertThat(errorInfo.retryCount(), greaterThanOrEqualTo(3)); + assertThat(List.of(firstGenerationIndex, secondGenerationIndex).contains(errorInfo.indexName()), is(true)); + }); + // let's mark the index as writeable and make sure it's deleted and the error store is empty updateIndexSettings(Settings.builder().put(READ_ONLY.settingName(), false), firstGenerationIndex); @@ -521,6 +588,16 @@ public void testErrorRecordingOnRetention() throws Exception { assertThat(lifecycleService.getErrorStore().getError(firstGenerationIndex), nullValue()); } }); + + // health info for DSL should be EMPTY as everything's healthy + assertBusy(() -> { + FetchHealthInfoCacheAction.Response healthNodeResponse = client().execute( + FetchHealthInfoCacheAction.INSTANCE, + new FetchHealthInfoCacheAction.Request() + ).get(); + DataStreamLifecycleHealthInfo dslHealthInfoOnHealthNode = healthNodeResponse.getHealthInfo().dslHealthInfo(); + assertThat(dslHealthInfoOnHealthNode, is(DataStreamLifecycleHealthInfo.NO_DSL_ERRORS)); + }); } finally { // when the test executes successfully this will not be needed however, otherwise we need to make sure the index is // "delete-able" for test cleanup diff --git a/modules/data-streams/src/main/java/module-info.java b/modules/data-streams/src/main/java/module-info.java index 385c550d770e0..f21edca356ad9 100644 --- a/modules/data-streams/src/main/java/module-info.java +++ b/modules/data-streams/src/main/java/module-info.java @@ -16,4 +16,6 @@ exports org.elasticsearch.datastreams.action to org.elasticsearch.server; exports org.elasticsearch.datastreams.lifecycle.action to org.elasticsearch.server; exports org.elasticsearch.datastreams.lifecycle; + + provides org.elasticsearch.features.FeatureSpecification with org.elasticsearch.datastreams.DataStreamFeatures; } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java new file mode 100644 index 0000000000000..53b3ca3353bab --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.datastreams; + +import org.elasticsearch.datastreams.lifecycle.health.DataStreamLifecycleHealthInfoPublisher; +import org.elasticsearch.features.FeatureSpecification; +import org.elasticsearch.features.NodeFeature; + +import java.util.Set; + +/** + * Provides the features for data streams that this version of the code supports + */ +public class DataStreamFeatures implements FeatureSpecification { + + @Override + public Set getFeatures() { + return Set.of( + DataStreamLifecycleHealthInfoPublisher.DSL_HEALTH_INFO_FEATURE // Added in 8.12 + ); + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java index dd8e13cf18408..9ac3a1afed5a5 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java @@ -47,6 +47,7 @@ import org.elasticsearch.datastreams.lifecycle.action.TransportGetDataStreamLifecycleAction; import org.elasticsearch.datastreams.lifecycle.action.TransportGetDataStreamLifecycleStatsAction; import org.elasticsearch.datastreams.lifecycle.action.TransportPutDataStreamLifecycleAction; +import org.elasticsearch.datastreams.lifecycle.health.DataStreamLifecycleHealthInfoPublisher; import org.elasticsearch.datastreams.lifecycle.rest.RestDataStreamLifecycleStatsAction; import org.elasticsearch.datastreams.lifecycle.rest.RestDeleteDataStreamLifecycleAction; import org.elasticsearch.datastreams.lifecycle.rest.RestExplainDataStreamLifecycleAction; @@ -110,7 +111,7 @@ public class DataStreamsPlugin extends Plugin implements ActionPlugin { private final SetOnce errorStoreInitialisationService = new SetOnce<>(); private final SetOnce dataLifecycleInitialisationService = new SetOnce<>(); - + private final SetOnce dataStreamLifecycleErrorsPublisher = new SetOnce<>(); private final Settings settings; public DataStreamsPlugin(Settings settings) { @@ -160,6 +161,15 @@ public Collection createComponents(PluginServices services) { this.updateTimeSeriesRangeService.set(updateTimeSeriesRangeService); components.add(this.updateTimeSeriesRangeService.get()); errorStoreInitialisationService.set(new DataStreamLifecycleErrorStore(services.threadPool()::absoluteTimeInMillis)); + dataStreamLifecycleErrorsPublisher.set( + new DataStreamLifecycleHealthInfoPublisher( + settings, + services.client(), + services.clusterService(), + errorStoreInitialisationService.get(), + services.featureService() + ) + ); dataLifecycleInitialisationService.set( new DataStreamLifecycleService( settings, @@ -169,12 +179,14 @@ public Collection createComponents(PluginServices services) { services.threadPool(), services.threadPool()::absoluteTimeInMillis, errorStoreInitialisationService.get(), - services.allocationService() + services.allocationService(), + dataStreamLifecycleErrorsPublisher.get() ) ); dataLifecycleInitialisationService.get().init(); components.add(errorStoreInitialisationService.get()); components.add(dataLifecycleInitialisationService.get()); + components.add(dataStreamLifecycleErrorsPublisher.get()); return components; } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleErrorStore.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleErrorStore.java index 01ccbdbe3ffec..a1f0c639f51f1 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleErrorStore.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleErrorStore.java @@ -12,11 +12,16 @@ import org.elasticsearch.action.datastreams.lifecycle.ErrorEntry; import org.elasticsearch.common.Strings; import org.elasticsearch.core.Nullable; +import org.elasticsearch.health.node.DslErrorInfo; +import java.util.List; +import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.function.LongSupplier; +import java.util.function.Predicate; +import java.util.stream.Collectors; import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; @@ -90,4 +95,29 @@ public ErrorEntry getError(String indexName) { public Set getAllIndices() { return Set.copyOf(indexNameToError.keySet()); } + + /** + * Retrieve the error entries in the error store that satisfy the provided predicate. + * This will return the error entries information (a subset of all the fields an {@link ErrorEntry} holds) sorted by the number of + * retries DSL attempted (descending order) and the number of entries will be limited according to the provided limit parameter. + * Returns empty list if no entries are present in the error store or none satisfy the predicate. + */ + public List getErrorsInfo(Predicate errorEntryPredicate, int limit) { + if (indexNameToError.isEmpty()) { + return List.of(); + } + return indexNameToError.entrySet() + .stream() + .filter(keyValue -> errorEntryPredicate.test(keyValue.getValue())) + .sorted(Map.Entry.comparingByValue()) + .limit(limit) + .map( + keyValue -> new DslErrorInfo( + keyValue.getKey(), + keyValue.getValue().firstOccurrenceTimestamp(), + keyValue.getValue().retryCount() + ) + ) + .collect(Collectors.toList()); + } } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java index 9f9a90704167d..4d2c2af2266b1 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java @@ -62,6 +62,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.datastreams.lifecycle.downsampling.DeleteSourceAndAddDownsampleIndexExecutor; import org.elasticsearch.datastreams.lifecycle.downsampling.DeleteSourceAndAddDownsampleToDS; +import org.elasticsearch.datastreams.lifecycle.health.DataStreamLifecycleHealthInfoPublisher; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexMode; @@ -158,6 +159,7 @@ public class DataStreamLifecycleService implements ClusterStateListener, Closeab private final ThreadPool threadPool; final ResultDeduplicator transportActionsDeduplicator; final ResultDeduplicator clusterStateChangesDeduplicator; + private final DataStreamLifecycleHealthInfoPublisher dslHealthInfoPublisher; private LongSupplier nowSupplier; private final Clock clock; private final DataStreamLifecycleErrorStore errorStore; @@ -204,7 +206,8 @@ public DataStreamLifecycleService( ThreadPool threadPool, LongSupplier nowSupplier, DataStreamLifecycleErrorStore errorStore, - AllocationService allocationService + AllocationService allocationService, + DataStreamLifecycleHealthInfoPublisher dataStreamLifecycleHealthInfoPublisher ) { this.settings = settings; this.client = client; @@ -232,6 +235,7 @@ public DataStreamLifecycleService( Priority.URGENT, // urgent priority as this deletes indices new DeleteSourceAndAddDownsampleIndexExecutor(allocationService) ); + this.dslHealthInfoPublisher = dataStreamLifecycleHealthInfoPublisher; } /** @@ -296,6 +300,25 @@ public void triggered(SchedulerEngine.Event event) { event.getTriggeredTime() ); run(clusterService.state()); + dslHealthInfoPublisher.publishDslErrorEntries(new ActionListener<>() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) { + assert acknowledgedResponse.isAcknowledged() : "updating the health info is always acknowledged"; + } + + @Override + public void onFailure(Exception e) { + logger.debug( + String.format( + Locale.ROOT, + "unable to update the health cache with DSL errors related information " + + "due to [%s]. Will retry on the next DSL run", + e.getMessage() + ), + e + ); + } + }); } } } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthInfoPublisher.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthInfoPublisher.java new file mode 100644 index 0000000000000..12abbe125cffb --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthInfoPublisher.java @@ -0,0 +1,116 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.datastreams.lifecycle.health; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleErrorStore; +import org.elasticsearch.features.FeatureService; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.health.node.DataStreamLifecycleHealthInfo; +import org.elasticsearch.health.node.DslErrorInfo; +import org.elasticsearch.health.node.UpdateHealthInfoCacheAction; +import org.elasticsearch.health.node.selection.HealthNode; + +import java.util.List; + +import static org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleService.DATA_STREAM_SIGNALLING_ERROR_RETRY_INTERVAL_SETTING; + +/** + * Provides the infrastructure to send errors encountered by indices managed by data stream lifecycle service to the health node. + */ +public class DataStreamLifecycleHealthInfoPublisher { + private static final Logger logger = LogManager.getLogger(DataStreamLifecycleHealthInfoPublisher.class); + /** + * Controls the number of DSL error entries we publish to the health node. + */ + public static final Setting DATA_STREAM_LIFECYCLE_MAX_ERRORS_TO_PUBLISH_SETTING = Setting.intSetting( + "data_streams.lifecycle.max_errors_to_publish", + 500, + 0, + Setting.Property.Dynamic, + Setting.Property.NodeScope + ); + public static final NodeFeature DSL_HEALTH_INFO_FEATURE = new NodeFeature("health.dsl.info"); + + private final Client client; + private final ClusterService clusterService; + private final DataStreamLifecycleErrorStore errorStore; + private final FeatureService featureService; + private volatile int signallingErrorRetryInterval; + private volatile int maxNumberOfErrorsToPublish; + + public DataStreamLifecycleHealthInfoPublisher( + Settings settings, + Client client, + ClusterService clusterService, + DataStreamLifecycleErrorStore errorStore, + FeatureService featureService + ) { + this.client = client; + this.clusterService = clusterService; + this.errorStore = errorStore; + this.featureService = featureService; + this.signallingErrorRetryInterval = DATA_STREAM_SIGNALLING_ERROR_RETRY_INTERVAL_SETTING.get(settings); + this.maxNumberOfErrorsToPublish = DATA_STREAM_LIFECYCLE_MAX_ERRORS_TO_PUBLISH_SETTING.get(settings); + } + + public void init() { + clusterService.getClusterSettings() + .addSettingsUpdateConsumer(DATA_STREAM_SIGNALLING_ERROR_RETRY_INTERVAL_SETTING, this::updateSignallingRetryThreshold); + clusterService.getClusterSettings() + .addSettingsUpdateConsumer(DATA_STREAM_LIFECYCLE_MAX_ERRORS_TO_PUBLISH_SETTING, this::updateNumberOfErrorsToPublish); + } + + private void updateSignallingRetryThreshold(int newValue) { + this.signallingErrorRetryInterval = newValue; + } + + private void updateNumberOfErrorsToPublish(int newValue) { + this.maxNumberOfErrorsToPublish = newValue; + } + + /** + * Publishes the DSL errors that have passed the signaling threshold (as defined by + * {@link org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleService#DATA_STREAM_SIGNALLING_ERROR_RETRY_INTERVAL_SETTING} + */ + public void publishDslErrorEntries(ActionListener actionListener) { + if (featureService.clusterHasFeature(clusterService.state(), DSL_HEALTH_INFO_FEATURE) == false) { + return; + } + // fetching the entries that persist in the error store for more than the signalling retry interval + // note that we're reporting this view into the error store on every publishing iteration + List errorEntriesToSignal = errorStore.getErrorsInfo( + entry -> entry.retryCount() >= signallingErrorRetryInterval, + maxNumberOfErrorsToPublish + ); + DiscoveryNode currentHealthNode = HealthNode.findHealthNode(clusterService.state()); + if (currentHealthNode != null) { + String healthNodeId = currentHealthNode.getId(); + logger.trace("reporting [{}] DSL error entries to to health node [{}]", errorEntriesToSignal.size(), healthNodeId); + client.execute( + UpdateHealthInfoCacheAction.INSTANCE, + new UpdateHealthInfoCacheAction.Request( + healthNodeId, + new DataStreamLifecycleHealthInfo(errorEntriesToSignal, errorStore.getAllIndices().size()) + ), + actionListener + ); + } else { + logger.trace("unable to report DSL health because there is no health node in the cluster. will retry on the next DSL run"); + } + } +} diff --git a/modules/data-streams/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/modules/data-streams/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification new file mode 100644 index 0000000000000..3f1579eac4f85 --- /dev/null +++ b/modules/data-streams/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification @@ -0,0 +1,9 @@ +# +# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +# or more contributor license agreements. Licensed under the Elastic License +# 2.0 and the Server Side Public License, v 1; you may not use this file except +# in compliance with, at your election, the Elastic License 2.0 or the Server +# Side Public License, v 1. +# + +org.elasticsearch.datastreams.DataStreamFeatures diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleErrorStoreTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleErrorStoreTests.java index 9f1928374eb5f..41e0f6578cb7d 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleErrorStoreTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleErrorStoreTests.java @@ -9,10 +9,13 @@ package org.elasticsearch.datastreams.lifecycle; import org.elasticsearch.action.datastreams.lifecycle.ErrorEntry; +import org.elasticsearch.health.node.DslErrorInfo; import org.elasticsearch.test.ESTestCase; import org.junit.Before; +import java.util.List; import java.util.Set; +import java.util.stream.IntStream; import java.util.stream.Stream; import static org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleErrorStore.MAX_ERROR_MESSAGE_LENGTH; @@ -84,4 +87,32 @@ public void testRecordedErrorIsMaxOneThousandChars() { assertThat(errorStore.getError("test"), is(notNullValue())); assertThat(errorStore.getError("test").error().length(), is(MAX_ERROR_MESSAGE_LENGTH)); } + + public void testGetFilteredEntries() { + IntStream.range(0, 20).forEach(i -> errorStore.recordError("test20", new NullPointerException("testing"))); + IntStream.range(0, 5).forEach(i -> errorStore.recordError("test5", new NullPointerException("testing"))); + + { + List entries = errorStore.getErrorsInfo(entry -> entry.retryCount() > 7, 100); + assertThat(entries.size(), is(1)); + assertThat(entries.get(0).indexName(), is("test20")); + } + + { + List entries = errorStore.getErrorsInfo(entry -> entry.retryCount() > 7, 0); + assertThat(entries.size(), is(0)); + } + + { + List entries = errorStore.getErrorsInfo(entry -> entry.retryCount() > 50, 100); + assertThat(entries.size(), is(0)); + } + + { + List entries = errorStore.getErrorsInfo(entry -> entry.retryCount() > 2, 100); + assertThat(entries.size(), is(2)); + assertThat(entries.get(0).indexName(), is("test20")); + assertThat(entries.get(1).indexName(), is("test5")); + } + } } diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java index 2445e6b0d72df..befa16573de23 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java @@ -62,6 +62,9 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; +import org.elasticsearch.datastreams.DataStreamFeatures; +import org.elasticsearch.datastreams.lifecycle.health.DataStreamLifecycleHealthInfoPublisher; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; @@ -159,6 +162,7 @@ public void setupServices() { EmptySnapshotsInfoService.INSTANCE, TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY ); + DataStreamLifecycleErrorStore errorStore = new DataStreamLifecycleErrorStore(() -> now); dataStreamLifecycleService = new DataStreamLifecycleService( Settings.EMPTY, client, @@ -166,8 +170,15 @@ public void setupServices() { clock, threadPool, () -> now, - new DataStreamLifecycleErrorStore(() -> now), - allocationService + errorStore, + allocationService, + new DataStreamLifecycleHealthInfoPublisher( + Settings.EMPTY, + client, + clusterService, + errorStore, + new FeatureService(List.of(new DataStreamFeatures())) + ) ); clientDelegate = null; dataStreamLifecycleService.init(); @@ -1383,6 +1394,7 @@ public void testTimeSeriesIndicesStillWithinTimeBounds() { public void testTrackingTimeStats() { AtomicLong now = new AtomicLong(0); long delta = randomLongBetween(10, 10000); + DataStreamLifecycleErrorStore errorStore = new DataStreamLifecycleErrorStore(() -> Clock.systemUTC().millis()); DataStreamLifecycleService service = new DataStreamLifecycleService( Settings.EMPTY, getTransportRequestsRecordingClient(), @@ -1390,8 +1402,15 @@ public void testTrackingTimeStats() { Clock.systemUTC(), threadPool, () -> now.getAndAdd(delta), - new DataStreamLifecycleErrorStore(() -> Clock.systemUTC().millis()), - mock(AllocationService.class) + errorStore, + mock(AllocationService.class), + new DataStreamLifecycleHealthInfoPublisher( + Settings.EMPTY, + getTransportRequestsRecordingClient(), + clusterService, + errorStore, + new FeatureService(List.of(new DataStreamFeatures())) + ) ); assertThat(service.getLastRunDuration(), is(nullValue())); assertThat(service.getTimeBetweenStarts(), is(nullValue())); diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleResponseTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleResponseTests.java index 0c10e3964e168..829fe454f7463 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleResponseTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleResponseTests.java @@ -195,8 +195,9 @@ public void testToXContent() throws IOException { } { // Make sure generation_date is not present if it is null (which it is for a write index): + String index = randomAlphaOfLengthBetween(10, 30); ExplainIndexDataStreamLifecycle explainIndexWithNullGenerationDate = new ExplainIndexDataStreamLifecycle( - randomAlphaOfLengthBetween(10, 30), + index, true, now, randomBoolean() ? now + TimeValue.timeValueDays(1).getMillis() : null, @@ -252,8 +253,9 @@ private static ExplainIndexDataStreamLifecycle createRandomIndexDataStreamLifecy long now, @Nullable DataStreamLifecycle lifecycle ) { + String index = randomAlphaOfLengthBetween(10, 30); return new ExplainIndexDataStreamLifecycle( - randomAlphaOfLengthBetween(10, 30), + index, true, now, randomBoolean() ? now + TimeValue.timeValueDays(1).getMillis() : null, diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthInfoPublisherTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthInfoPublisherTests.java new file mode 100644 index 0000000000000..31393a3fc18ed --- /dev/null +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthInfoPublisherTests.java @@ -0,0 +1,214 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.datastreams.lifecycle.health; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.node.DiscoveryNodeUtils; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.datastreams.DataStreamFeatures; +import org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleErrorStore; +import org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleService; +import org.elasticsearch.features.FeatureService; +import org.elasticsearch.health.node.DataStreamLifecycleHealthInfo; +import org.elasticsearch.health.node.DslErrorInfo; +import org.elasticsearch.health.node.UpdateHealthInfoCacheAction; +import org.elasticsearch.test.ClusterServiceUtils; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.client.NoOpClient; +import org.elasticsearch.threadpool.TestThreadPool; +import org.junit.After; +import org.junit.Before; + +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CopyOnWriteArrayList; + +import static org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleService.DATA_STREAM_MERGE_POLICY_TARGET_FACTOR_SETTING; +import static org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleService.DATA_STREAM_MERGE_POLICY_TARGET_FLOOR_SEGMENT_SETTING; +import static org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleService.DATA_STREAM_SIGNALLING_ERROR_RETRY_INTERVAL_SETTING; +import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +public class DataStreamLifecycleHealthInfoPublisherTests extends ESTestCase { + + private long now; + private ClusterService clusterService; + private TestThreadPool threadPool; + private CopyOnWriteArrayList clientSeenRequests; + private DataStreamLifecycleHealthInfoPublisher dslHealthInfoPublisher; + private final DiscoveryNode node1 = DiscoveryNodeUtils.builder("node_1") + .roles(Set.of(DiscoveryNodeRole.MASTER_ROLE, DiscoveryNodeRole.DATA_ROLE)) + .build(); + private final DiscoveryNode node2 = DiscoveryNodeUtils.builder("node_2") + .roles(Set.of(DiscoveryNodeRole.MASTER_ROLE, DiscoveryNodeRole.DATA_ROLE)) + .build(); + private final DiscoveryNode[] allNodes = new DiscoveryNode[] { node1, node2 }; + private DataStreamLifecycleErrorStore errorStore; + + @Before + public void setupServices() { + threadPool = new TestThreadPool(getTestName()); + Set> builtInClusterSettings = new HashSet<>(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + builtInClusterSettings.add(DataStreamLifecycleService.DATA_STREAM_LIFECYCLE_POLL_INTERVAL_SETTING); + builtInClusterSettings.add(DATA_STREAM_MERGE_POLICY_TARGET_FLOOR_SEGMENT_SETTING); + builtInClusterSettings.add(DATA_STREAM_MERGE_POLICY_TARGET_FACTOR_SETTING); + builtInClusterSettings.add(DATA_STREAM_SIGNALLING_ERROR_RETRY_INTERVAL_SETTING); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, builtInClusterSettings); + clusterService = createClusterService(threadPool, clusterSettings); + + now = System.currentTimeMillis(); + clientSeenRequests = new CopyOnWriteArrayList<>(); + + final Client client = getTransportRequestsRecordingClient(); + errorStore = new DataStreamLifecycleErrorStore(() -> now); + dslHealthInfoPublisher = new DataStreamLifecycleHealthInfoPublisher( + Settings.EMPTY, + client, + clusterService, + errorStore, + new FeatureService(List.of(new DataStreamFeatures())) + ); + } + + @After + public void cleanup() { + clientSeenRequests.clear(); + clusterService.close(); + threadPool.shutdownNow(); + } + + public void testPublishDslErrorEntries() { + for (int i = 0; i < 11; i++) { + errorStore.recordError("testIndexOverSignalThreshold", new NullPointerException("ouch")); + } + errorStore.recordError("testIndex", new IllegalStateException("bad state")); + ClusterState stateWithHealthNode = ClusterStateCreationUtils.state(node1, node1, node1, allNodes); + stateWithHealthNode = ClusterState.builder(stateWithHealthNode) + .nodeFeatures( + Map.of( + node1.getId(), + Set.of(DataStreamLifecycleHealthInfoPublisher.DSL_HEALTH_INFO_FEATURE.id()), + node2.getId(), + Set.of(DataStreamLifecycleHealthInfoPublisher.DSL_HEALTH_INFO_FEATURE.id()) + ) + ) + .build(); + ClusterServiceUtils.setState(clusterService, stateWithHealthNode); + dslHealthInfoPublisher.publishDslErrorEntries(new ActionListener<>() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) {} + + @Override + public void onFailure(Exception e) { + + } + }); + + assertThat(clientSeenRequests.size(), is(1)); + DataStreamLifecycleHealthInfo dslHealthInfo = clientSeenRequests.get(0).getDslHealthInfo(); + assertThat(dslHealthInfo, is(notNullValue())); + List dslErrorsInfo = dslHealthInfo.dslErrorsInfo(); + assertThat(dslErrorsInfo.size(), is(1)); + assertThat(dslErrorsInfo.get(0).indexName(), is("testIndexOverSignalThreshold")); + assertThat(dslHealthInfo.totalErrorEntriesCount(), is(2)); + } + + public void testPublishDslErrorEntriesNoHealthNode() { + // no requests are being executed + for (int i = 0; i < 11; i++) { + errorStore.recordError("testIndexOverSignalThreshold", new NullPointerException("ouch")); + } + errorStore.recordError("testIndex", new IllegalStateException("bad state")); + + ClusterState stateNoHealthNode = ClusterStateCreationUtils.state(node1, node1, null, allNodes); + stateNoHealthNode = ClusterState.builder(stateNoHealthNode) + .nodeFeatures( + Map.of( + node1.getId(), + Set.of(DataStreamLifecycleHealthInfoPublisher.DSL_HEALTH_INFO_FEATURE.id()), + node2.getId(), + Set.of(DataStreamLifecycleHealthInfoPublisher.DSL_HEALTH_INFO_FEATURE.id()) + ) + ) + .build(); + ClusterServiceUtils.setState(clusterService, stateNoHealthNode); + dslHealthInfoPublisher.publishDslErrorEntries(new ActionListener<>() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) {} + + @Override + public void onFailure(Exception e) { + + } + }); + + assertThat(clientSeenRequests.size(), is(0)); + } + + public void testPublishDslErrorEntriesEmptyErrorStore() { + // publishes the empty error store (this is the "back to healthy" state where all errors have been fixed) + ClusterState state = ClusterStateCreationUtils.state(node1, node1, node1, allNodes); + state = ClusterState.builder(state) + .nodeFeatures( + Map.of( + node1.getId(), + Set.of(DataStreamLifecycleHealthInfoPublisher.DSL_HEALTH_INFO_FEATURE.id()), + node2.getId(), + Set.of(DataStreamLifecycleHealthInfoPublisher.DSL_HEALTH_INFO_FEATURE.id()) + ) + ) + .build(); + ClusterServiceUtils.setState(clusterService, state); + dslHealthInfoPublisher.publishDslErrorEntries(new ActionListener<>() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) {} + + @Override + public void onFailure(Exception e) { + + } + }); + + assertThat(clientSeenRequests.size(), is(1)); + DataStreamLifecycleHealthInfo dslHealthInfo = clientSeenRequests.get(0).getDslHealthInfo(); + assertThat(dslHealthInfo, is(notNullValue())); + List dslErrorsInfo = dslHealthInfo.dslErrorsInfo(); + assertThat(dslErrorsInfo.size(), is(0)); + assertThat(dslHealthInfo.totalErrorEntriesCount(), is(0)); + } + + private Client getTransportRequestsRecordingClient() { + return new NoOpClient(threadPool) { + @Override + protected void doExecute( + ActionType action, + Request request, + ActionListener listener + ) { + clientSeenRequests.add((UpdateHealthInfoCacheAction.Request) request); + } + }; + } + +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorServiceIT.java index b862d0b2f20b6..92194a94ab44c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorServiceIT.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.health.HealthIndicatorResult; import org.elasticsearch.health.HealthStatus; +import org.elasticsearch.health.node.DataStreamLifecycleHealthInfo; import org.elasticsearch.health.node.HealthInfo; import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.test.ESIntegTestCase; @@ -132,7 +133,10 @@ private void assertHealthDuring(Matcher statusMatcher, Runnable ac @Override public void clusterChanged(ClusterChangedEvent event) { states.add( - new RoutingNodesAndHealth(event.state().getRoutingNodes(), service.calculate(false, 1, new HealthInfo(Map.of()))) + new RoutingNodesAndHealth( + event.state().getRoutingNodes(), + service.calculate(false, 1, new HealthInfo(Map.of(), DataStreamLifecycleHealthInfo.NO_DSL_ERRORS)) + ) ); } }; diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index ca79be9453cfe..7c3568986dccd 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -186,6 +186,8 @@ static TransportVersion def(int id) { public static final TransportVersion ML_INFERENCE_HF_SERVICE_ADDED = def(8_553_00_0); public static final TransportVersion INFERENCE_USAGE_ADDED = def(8_554_00_0); public static final TransportVersion UPGRADE_TO_LUCENE_9_9 = def(8_555_00_0); + public static final TransportVersion HEALTH_INFO_ENRICHED_WITH_DSL_STATUS = def(8_556_00_0); + /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ErrorEntry.java b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ErrorEntry.java index 79c59314d7425..62406ccd4f853 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ErrorEntry.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ErrorEntry.java @@ -25,7 +25,8 @@ public record ErrorEntry(long firstOccurrenceTimestamp, String error, long recordedTimestamp, int retryCount) implements Writeable, - ToXContentObject { + ToXContentObject, + Comparable { public ErrorEntry(StreamInput in) throws IOException { this(in.readLong(), in.readString(), in.readLong(), in.readInt()); @@ -77,4 +78,12 @@ public void writeTo(StreamOutput out) throws IOException { out.writeLong(recordedTimestamp); out.writeInt(retryCount); } + + /** + * Compares two error entries by the number of retries, in reversed order by default. + */ + @Override + public int compareTo(ErrorEntry o) { + return Integer.compare(o.retryCount, retryCount); + } } diff --git a/server/src/main/java/org/elasticsearch/health/node/DataStreamLifecycleHealthInfo.java b/server/src/main/java/org/elasticsearch/health/node/DataStreamLifecycleHealthInfo.java new file mode 100644 index 0000000000000..c94ca87104718 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/health/node/DataStreamLifecycleHealthInfo.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.health.node; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; +import java.util.List; + +/** + * Represents the data stream lifecycle information that would help shape the functionality's health. + */ +public record DataStreamLifecycleHealthInfo(List dslErrorsInfo, int totalErrorEntriesCount) implements Writeable { + + public static final DataStreamLifecycleHealthInfo NO_DSL_ERRORS = new DataStreamLifecycleHealthInfo(List.of(), 0); + + public DataStreamLifecycleHealthInfo(StreamInput in) throws IOException { + this(in.readCollectionAsList(DslErrorInfo::new), in.readVInt()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(dslErrorsInfo); + out.writeVInt(totalErrorEntriesCount); + } +} diff --git a/server/src/main/java/org/elasticsearch/health/node/DslErrorInfo.java b/server/src/main/java/org/elasticsearch/health/node/DslErrorInfo.java new file mode 100644 index 0000000000000..7c037ffe085d8 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/health/node/DslErrorInfo.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.health.node; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; + +/** + * Represents a reduced view into an {@link org.elasticsearch.action.datastreams.lifecycle.ErrorEntry}, removing the + * exception message and last occurrence timestamp as we could potentially send thousands of entries over the wire + * and the omitted fields would not be used. + */ +public record DslErrorInfo(String indexName, long firstOccurrence, int retryCount) implements Writeable { + + public DslErrorInfo(StreamInput in) throws IOException { + this(in.readString(), in.readLong(), in.readVInt()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(indexName); + out.writeLong(firstOccurrence); + out.writeVInt(retryCount); + } +} diff --git a/server/src/main/java/org/elasticsearch/health/node/HealthInfo.java b/server/src/main/java/org/elasticsearch/health/node/HealthInfo.java index e8e9dd9747a9f..0bb8027f8299d 100644 --- a/server/src/main/java/org/elasticsearch/health/node/HealthInfo.java +++ b/server/src/main/java/org/elasticsearch/health/node/HealthInfo.java @@ -8,26 +8,42 @@ package org.elasticsearch.health.node; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Nullable; import java.io.IOException; import java.util.Map; +import static org.elasticsearch.health.node.DataStreamLifecycleHealthInfo.NO_DSL_ERRORS; + /** * This class wraps all the data returned by the health node. * @param diskInfoByNode A Map of node id to DiskHealthInfo for that node + * @param dslHealthInfo The data stream lifecycle health information */ -public record HealthInfo(Map diskInfoByNode) implements Writeable { - public static final HealthInfo EMPTY_HEALTH_INFO = new HealthInfo(Map.of()); +public record HealthInfo(Map diskInfoByNode, @Nullable DataStreamLifecycleHealthInfo dslHealthInfo) + implements + Writeable { + + public static final HealthInfo EMPTY_HEALTH_INFO = new HealthInfo(Map.of(), NO_DSL_ERRORS); public HealthInfo(StreamInput input) throws IOException { - this(input.readMap(DiskHealthInfo::new)); + this( + input.readMap(DiskHealthInfo::new), + input.getTransportVersion().onOrAfter(TransportVersions.HEALTH_INFO_ENRICHED_WITH_DSL_STATUS) + ? input.readOptionalWriteable(DataStreamLifecycleHealthInfo::new) + : null + ); } @Override public void writeTo(StreamOutput output) throws IOException { output.writeMap(diskInfoByNode, StreamOutput::writeWriteable); + if (output.getTransportVersion().onOrAfter(TransportVersions.HEALTH_INFO_ENRICHED_WITH_DSL_STATUS)) { + output.writeOptionalWriteable(dslHealthInfo); + } } } diff --git a/server/src/main/java/org/elasticsearch/health/node/HealthInfoCache.java b/server/src/main/java/org/elasticsearch/health/node/HealthInfoCache.java index bb295f6401941..986b5e13dce6e 100644 --- a/server/src/main/java/org/elasticsearch/health/node/HealthInfoCache.java +++ b/server/src/main/java/org/elasticsearch/health/node/HealthInfoCache.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.core.Nullable; import org.elasticsearch.health.node.selection.HealthNode; import java.util.Map; @@ -26,6 +27,8 @@ public class HealthInfoCache implements ClusterStateListener { private static final Logger logger = LogManager.getLogger(HealthInfoCache.class); private volatile ConcurrentHashMap diskInfoByNode = new ConcurrentHashMap<>(); + @Nullable + private volatile DataStreamLifecycleHealthInfo dslHealthInfo = null; private HealthInfoCache() {} @@ -35,8 +38,17 @@ public static HealthInfoCache create(ClusterService clusterService) { return healthInfoCache; } - public void updateNodeHealth(String nodeId, DiskHealthInfo diskHealthInfo) { - diskInfoByNode.put(nodeId, diskHealthInfo); + public void updateNodeHealth( + String nodeId, + @Nullable DiskHealthInfo diskHealthInfo, + @Nullable DataStreamLifecycleHealthInfo latestDslHealthInfo + ) { + if (diskHealthInfo != null) { + diskInfoByNode.put(nodeId, diskHealthInfo); + } + if (latestDslHealthInfo != null) { + dslHealthInfo = latestDslHealthInfo; + } } @Override @@ -56,6 +68,7 @@ public void clusterChanged(ClusterChangedEvent event) { } else if (diskInfoByNode.isEmpty() == false) { logger.debug("Node [{}][{}] is no longer the health node, emptying the cache.", localNode.getName(), localNode.getId()); diskInfoByNode = new ConcurrentHashMap<>(); + dslHealthInfo = null; } } @@ -65,6 +78,6 @@ public void clusterChanged(ClusterChangedEvent event) { */ public HealthInfo getHealthInfo() { // A shallow copy is enough because the inner data is immutable. - return new HealthInfo(Map.copyOf(diskInfoByNode)); + return new HealthInfo(Map.copyOf(diskInfoByNode), dslHealthInfo); } } diff --git a/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java b/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java index 1499c278a4209..d1961c597bc1e 100644 --- a/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java +++ b/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.health.node; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; @@ -18,6 +19,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.health.node.action.HealthNodeRequest; import org.elasticsearch.health.node.action.TransportHealthNodeAction; import org.elasticsearch.tasks.Task; @@ -36,17 +38,37 @@ public class UpdateHealthInfoCacheAction extends ActionType listener ) { - nodeHealthOverview.updateNodeHealth(request.getNodeId(), request.getDiskHealthInfo()); + nodeHealthOverview.updateNodeHealth(request.getNodeId(), request.getDiskHealthInfo(), request.getDslHealthInfo()); listener.onResponse(AcknowledgedResponse.of(true)); } } diff --git a/server/src/test/java/org/elasticsearch/health/HealthServiceTests.java b/server/src/test/java/org/elasticsearch/health/HealthServiceTests.java index 7d7eb5c1a5697..8ca531b678c4a 100644 --- a/server/src/test/java/org/elasticsearch/health/HealthServiceTests.java +++ b/server/src/test/java/org/elasticsearch/health/HealthServiceTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionType; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.health.node.DataStreamLifecycleHealthInfo; import org.elasticsearch.health.node.DiskHealthInfo; import org.elasticsearch.health.node.FetchHealthInfoCacheAction; import org.elasticsearch.health.node.HealthInfo; @@ -252,7 +253,7 @@ public void testThatIndicatorsGetHealthInfoData() throws Exception { randomAlphaOfLength(30), new DiskHealthInfo(randomFrom(HealthStatus.values()), randomFrom(DiskHealthInfo.Cause.values())) ); - HealthInfo healthInfo = new HealthInfo(diskHealthInfoMap); + HealthInfo healthInfo = new HealthInfo(diskHealthInfoMap, DataStreamLifecycleHealthInfo.NO_DSL_ERRORS); var service = new HealthService( // The preflight indicator does not get data because the data is not fetched until after the preflight check diff --git a/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java index 6ec9acfb44c00..1584c4a57dd32 100644 --- a/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java @@ -258,7 +258,7 @@ public void testRedNoBlockedIndicesAndRedAllRoleNodes() throws IOException { diskInfoByNode.put(discoveryNode.getId(), new DiskHealthInfo(HealthStatus.GREEN)); } } - HealthInfo healthInfo = new HealthInfo(diskInfoByNode); + HealthInfo healthInfo = new HealthInfo(diskInfoByNode, DataStreamLifecycleHealthInfo.NO_DSL_ERRORS); HealthIndicatorResult result = diskHealthIndicatorService.calculate(true, healthInfo); assertThat(result.status(), equalTo(HealthStatus.RED)); @@ -1021,7 +1021,7 @@ private HealthInfo createHealthInfo(List healthInfoConfigs) { diskInfoByNode.put(node.getId(), diskHealthInfo); } } - return new HealthInfo(diskInfoByNode); + return new HealthInfo(diskInfoByNode, DataStreamLifecycleHealthInfo.NO_DSL_ERRORS); } private static ClusterService createClusterService(Collection nodes, boolean withBlockedIndex) { diff --git a/server/src/test/java/org/elasticsearch/health/node/FetchHealthInfoCacheActionTests.java b/server/src/test/java/org/elasticsearch/health/node/FetchHealthInfoCacheActionTests.java index 2200f59d3510b..f921c03686da4 100644 --- a/server/src/test/java/org/elasticsearch/health/node/FetchHealthInfoCacheActionTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/FetchHealthInfoCacheActionTests.java @@ -35,6 +35,7 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.health.node.HealthInfoTests.randomDslHealthInfo; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.equalTo; @@ -101,7 +102,7 @@ public void testAction() throws ExecutionException, InterruptedException { setState(clusterService, ClusterStateCreationUtils.state(localNode, localNode, localNode, allNodes)); HealthInfoCache healthInfoCache = getTestHealthInfoCache(); final FetchHealthInfoCacheAction.Response expectedResponse = new FetchHealthInfoCacheAction.Response( - new HealthInfo(healthInfoCache.getHealthInfo().diskInfoByNode()) + new HealthInfo(healthInfoCache.getHealthInfo().diskInfoByNode(), healthInfoCache.getHealthInfo().dslHealthInfo()) ); ActionTestUtils.execute( new FetchHealthInfoCacheAction.TransportAction( @@ -126,7 +127,8 @@ private HealthInfoCache getTestHealthInfoCache() { String nodeId = allNode.getId(); healthInfoCache.updateNodeHealth( nodeId, - new DiskHealthInfo(randomFrom(HealthStatus.values()), randomFrom(DiskHealthInfo.Cause.values())) + new DiskHealthInfo(randomFrom(HealthStatus.values()), randomFrom(DiskHealthInfo.Cause.values())), + randomDslHealthInfo() ); } return healthInfoCache; @@ -134,7 +136,7 @@ private HealthInfoCache getTestHealthInfoCache() { public void testResponseSerialization() { FetchHealthInfoCacheAction.Response response = new FetchHealthInfoCacheAction.Response( - new HealthInfo(getTestHealthInfoCache().getHealthInfo().diskInfoByNode()) + new HealthInfo(getTestHealthInfoCache().getHealthInfo().diskInfoByNode(), DataStreamLifecycleHealthInfo.NO_DSL_ERRORS) ); EqualsHashCodeTestUtils.checkEqualsAndHashCode( response, @@ -150,6 +152,11 @@ private FetchHealthInfoCacheAction.Response mutateResponse(FetchHealthInfoCacheA randomAlphaOfLength(10), new DiskHealthInfo(randomFrom(HealthStatus.values()), randomFrom(DiskHealthInfo.Cause.values())) ); - return new FetchHealthInfoCacheAction.Response(new HealthInfo(diskHealthInfoMapCopy)); + return new FetchHealthInfoCacheAction.Response( + new HealthInfo( + diskHealthInfoMapCopy, + randomValueOtherThan(originalResponse.getHealthInfo().dslHealthInfo(), HealthInfoTests::randomDslHealthInfo) + ) + ); } } diff --git a/server/src/test/java/org/elasticsearch/health/node/HealthInfoCacheTests.java b/server/src/test/java/org/elasticsearch/health/node/HealthInfoCacheTests.java index a278c9f17b330..fec3504d17218 100644 --- a/server/src/test/java/org/elasticsearch/health/node/HealthInfoCacheTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/HealthInfoCacheTests.java @@ -21,7 +21,9 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.health.node.HealthInfoTests.randomDslHealthInfo; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.mockito.Mockito.mock; @@ -43,20 +45,24 @@ public class HealthInfoCacheTests extends ESTestCase { public void testAddHealthInfo() { HealthInfoCache healthInfoCache = HealthInfoCache.create(clusterService); - healthInfoCache.updateNodeHealth(node1.getId(), GREEN); - healthInfoCache.updateNodeHealth(node2.getId(), RED); + DataStreamLifecycleHealthInfo latestDslHealthInfo = randomDslHealthInfo(); + healthInfoCache.updateNodeHealth(node1.getId(), GREEN, latestDslHealthInfo); + healthInfoCache.updateNodeHealth(node2.getId(), RED, null); Map diskHealthInfo = healthInfoCache.getHealthInfo().diskInfoByNode(); - healthInfoCache.updateNodeHealth(node1.getId(), RED); + healthInfoCache.updateNodeHealth(node1.getId(), RED, null); assertThat(diskHealthInfo.get(node1.getId()), equalTo(GREEN)); assertThat(diskHealthInfo.get(node2.getId()), equalTo(RED)); + // dsl health info has not changed as a new value has not been reported + assertThat(healthInfoCache.getHealthInfo().dslHealthInfo(), is(latestDslHealthInfo)); } public void testRemoveNodeFromTheCluster() { HealthInfoCache healthInfoCache = HealthInfoCache.create(clusterService); - healthInfoCache.updateNodeHealth(node1.getId(), GREEN); - healthInfoCache.updateNodeHealth(node2.getId(), RED); + healthInfoCache.updateNodeHealth(node1.getId(), GREEN, null); + DataStreamLifecycleHealthInfo latestDslHealthInfo = randomDslHealthInfo(); + healthInfoCache.updateNodeHealth(node2.getId(), RED, latestDslHealthInfo); ClusterState previous = ClusterStateCreationUtils.state(node1, node1, node1, allNodes); ClusterState current = ClusterStateCreationUtils.state(node1, node1, node1, new DiscoveryNode[] { node1 }); @@ -65,12 +71,15 @@ public void testRemoveNodeFromTheCluster() { Map diskHealthInfo = healthInfoCache.getHealthInfo().diskInfoByNode(); assertThat(diskHealthInfo.get(node1.getId()), equalTo(GREEN)); assertThat(diskHealthInfo.get(node2.getId()), nullValue()); + // the dsl info is not removed when the node that reported it leaves the cluster as the next DSL run will report it and + // override it (if the health node stops being the designated health node the health cache nullifies the existing DSL info) + assertThat(healthInfoCache.getHealthInfo().dslHealthInfo(), is(latestDslHealthInfo)); } public void testNotAHealthNode() { HealthInfoCache healthInfoCache = HealthInfoCache.create(clusterService); - healthInfoCache.updateNodeHealth(node1.getId(), GREEN); - healthInfoCache.updateNodeHealth(node2.getId(), RED); + healthInfoCache.updateNodeHealth(node1.getId(), GREEN, randomDslHealthInfo()); + healthInfoCache.updateNodeHealth(node2.getId(), RED, null); ClusterState previous = ClusterStateCreationUtils.state(node1, node1, node1, allNodes); ClusterState current = ClusterStateCreationUtils.state(node1, node1, node2, allNodes); @@ -78,5 +87,6 @@ public void testNotAHealthNode() { Map diskHealthInfo = healthInfoCache.getHealthInfo().diskInfoByNode(); assertThat(diskHealthInfo.isEmpty(), equalTo(true)); + assertThat(healthInfoCache.getHealthInfo().dslHealthInfo(), is(nullValue())); } } diff --git a/server/src/test/java/org/elasticsearch/health/node/HealthInfoTests.java b/server/src/test/java/org/elasticsearch/health/node/HealthInfoTests.java index 371d19c9bb22d..c8ccda1c5b88d 100644 --- a/server/src/test/java/org/elasticsearch/health/node/HealthInfoTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/HealthInfoTests.java @@ -31,7 +31,7 @@ protected HealthInfo createTestInstance() { : new DiskHealthInfo(randomFrom(HealthStatus.values()), randomFrom(DiskHealthInfo.Cause.values())); diskInfoByNode.put(randomAlphaOfLengthBetween(10, 100), diskHealthInfo); } - return new HealthInfo(diskInfoByNode); + return new HealthInfo(diskInfoByNode, randomBoolean() ? randomDslHealthInfo() : null); } @Override @@ -67,6 +67,16 @@ public HealthInfo mutateInstance(HealthInfo originalHealthInfo) { default -> throw new IllegalStateException(); } } - return new HealthInfo(diskHealthInfoMapCopy); + return new HealthInfo( + diskHealthInfoMapCopy, + randomValueOtherThan(originalHealthInfo.dslHealthInfo(), HealthInfoTests::randomDslHealthInfo) + ); + } + + static DataStreamLifecycleHealthInfo randomDslHealthInfo() { + return new DataStreamLifecycleHealthInfo( + randomList(5, () -> new DslErrorInfo(randomAlphaOfLength(100), System.currentTimeMillis(), randomIntBetween(15, 500))), + randomIntBetween(6, 1000) + ); } } From 56170b4be737034d318b39ce89f29bd01e0e6c36 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Tue, 5 Dec 2023 10:37:05 +0100 Subject: [PATCH 148/181] [Transform] Ensure transform `_schedule_now` API only triggers the expected transform task (#102958) --- docs/changelog/102958.yaml | 7 +++++++ .../transform/action/ScheduleNowTransformAction.java | 10 ++++++++++ .../action/ScheduleNowTransformActionRequestTests.java | 9 +++++++++ 3 files changed, 26 insertions(+) create mode 100644 docs/changelog/102958.yaml diff --git a/docs/changelog/102958.yaml b/docs/changelog/102958.yaml new file mode 100644 index 0000000000000..bb357c1eb09b5 --- /dev/null +++ b/docs/changelog/102958.yaml @@ -0,0 +1,7 @@ +pr: 102958 +summary: Ensure transform `_schedule_now` API only triggers the expected transform + task +area: Transform +type: bug +issues: + - 102956 diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformAction.java index f48e06a3f743c..6a50bd40517e1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.tasks.Task; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.transform.TransformField; @@ -94,6 +95,15 @@ public boolean equals(Object obj) { // the base class does not implement equals, therefore we need to check timeout ourselves return this.id.equals(other.id) && getTimeout().equals(other.getTimeout()); } + + @Override + public boolean match(Task task) { + if (task.getDescription().startsWith(TransformField.PERSISTENT_TASK_DESCRIPTION_PREFIX)) { + String taskId = task.getDescription().substring(TransformField.PERSISTENT_TASK_DESCRIPTION_PREFIX.length()); + return taskId.equals(this.id); + } + return false; + } } public static class Response extends BaseTasksResponse implements Writeable, ToXContentObject { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformActionRequestTests.java index e98e14e341cf7..80c415065e1bf 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformActionRequestTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.persistent.AllocatedPersistentTask; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.core.transform.action.ScheduleNowTransformAction.Request; @@ -55,4 +56,12 @@ public void testValidationFailure() { assertThat(e, is(notNullValue())); assertThat(e.validationErrors(), contains("_schedule_now API does not support _all wildcard")); } + + public void testMatch() { + Request request = new Request("my-transform-7", TimeValue.timeValueSeconds(5)); + assertTrue(request.match(new AllocatedPersistentTask(123, "", "", "data_frame_my-transform-7", null, null))); + assertFalse(request.match(new AllocatedPersistentTask(123, "", "", "data_frame_my-transform-", null, null))); + assertFalse(request.match(new AllocatedPersistentTask(123, "", "", "data_frame_my-transform-77", null, null))); + assertFalse(request.match(new AllocatedPersistentTask(123, "", "", "my-transform-7", null, null))); + } } From 89faf4497e1afc71f5d58ce979c4b23efee91258 Mon Sep 17 00:00:00 2001 From: Abdon Pijpelink Date: Tue, 5 Dec 2023 10:47:39 +0100 Subject: [PATCH 149/181] [DOCS] Add ES|QL 'getting started' code snippets to CSV tests (#102653) * [DOCS] Add ES|QL 'getting started' code snippets to CSV tests * Change dots in columns names into underscores * Add LIMIT 0 to ENRICH test * Move code snippets out of docs.csv-spec * Replace code snippets by includes * Add missing semicolon --- docs/reference/esql/esql-get-started.asciidoc | 70 ++++++------------- .../xpack/esql/CsvTestsDataLoader.java | 11 ++- .../src/main/resources/clientips.csv | 6 ++ .../src/main/resources/date.csv-spec | 42 +++++++++++ .../src/main/resources/dissect.csv-spec | 27 +++++++ .../src/main/resources/docs.csv-spec | 66 ++++++++++++++++- .../resources/enrich-policy-clientips.json | 7 ++ ...ages.json => enrich-policy-languages.json} | 0 .../src/main/resources/enrich.csv-spec | 27 +++++++ .../src/main/resources/eval.csv-spec | 22 ++++++ .../src/main/resources/mapping-clientips.json | 10 +++ .../main/resources/mapping-sample_data.json | 16 +++++ .../src/main/resources/sample_data.csv | 8 +++ .../src/main/resources/stats.csv-spec | 32 +++++++++ .../src/main/resources/where-like.csv-spec | 11 +++ .../src/main/resources/where.csv-spec | 11 +++ 16 files changed, 313 insertions(+), 53 deletions(-) create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/clientips.csv create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-policy-clientips.json rename x-pack/plugin/esql/qa/testFixtures/src/main/resources/{enricy-policy-languages.json => enrich-policy-languages.json} (100%) create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-clientips.json create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-sample_data.json create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/sample_data.csv diff --git a/docs/reference/esql/esql-get-started.asciidoc b/docs/reference/esql/esql-get-started.asciidoc index e54825406257f..4109d9d6f4ba3 100644 --- a/docs/reference/esql/esql-get-started.asciidoc +++ b/docs/reference/esql/esql-get-started.asciidoc @@ -39,7 +39,7 @@ This query returns up to 500 documents from the `sample_data` index: [source,esql] ---- -FROM sample_data +include::{esql-specs}/docs.csv-spec[tag=gs-from] ---- Each column corresponds to a field, and can be accessed by the name of that @@ -52,7 +52,7 @@ previous one: [source,esql] ---- -from sample_data +include::{esql-specs}/docs.csv-spec[tag=gs-from-lowercase] ---- ==== @@ -73,8 +73,7 @@ that are returned, up to a maximum of 10,000 rows: [source,esql] ---- -FROM sample_data -| LIMIT 3 +include::{esql-specs}/docs.csv-spec[tag=gs-limit] ---- [TIP] @@ -84,7 +83,7 @@ have to. The following query is identical to the previous one: [source,esql] ---- -FROM sample_data | LIMIT 3 +include::{esql-specs}/docs.csv-spec[tag=gs-limit-one-line] ---- ==== @@ -100,8 +99,7 @@ sort rows on one or more columns: [source,esql] ---- -FROM sample_data -| SORT @timestamp DESC +include::{esql-specs}/docs.csv-spec[tag=gs-sort] ---- [discrete] @@ -113,16 +111,14 @@ events with a duration longer than 5ms: [source,esql] ---- -FROM sample_data -| WHERE event.duration > 5000000 +include::{esql-specs}/where.csv-spec[tag=gs-where] ---- `WHERE` supports several <>. For example, you can use <> to run a wildcard query against the `message` column: [source,esql] ---- -FROM sample_data -| WHERE message LIKE "Connected*" +include::{esql-specs}/where-like.csv-spec[tag=gs-like] ---- [discrete] @@ -149,9 +145,7 @@ result set to 3 rows: [source,esql] ---- -FROM sample_data -| SORT @timestamp DESC -| LIMIT 3 +include::{esql-specs}/docs.csv-spec[tag=gs-chaining] ---- NOTE: The order of processing commands is important. First limiting the result @@ -169,8 +163,7 @@ other words: `event.duration` converted from nanoseconds to milliseconds. [source,esql] ---- -FROM sample_data -| EVAL duration_ms = event.duration / 1000000.0 +include::{esql-specs}/eval.csv-spec[tag=gs-eval] ---- `EVAL` supports several <>. For example, to round a @@ -179,8 +172,7 @@ number to the closest number with the specified number of digits, use the [source,esql] ---- -FROM sample_data -| EVAL duration_ms = ROUND(event.duration / 1000000.0, 1) +include::{esql-specs}/eval.csv-spec[tag=gs-round] ---- [discrete] @@ -193,16 +185,14 @@ example, the median duration: [source,esql] ---- -FROM sample_data -| STATS median_duration = MEDIAN(event.duration) +include::{esql-specs}/stats.csv-spec[tag=gs-stats] ---- You can calculate multiple stats with one command: [source,esql] ---- -FROM sample_data -| STATS median_duration = MEDIAN(event.duration), max_duration = MAX(event.duration) +include::{esql-specs}/stats.csv-spec[tag=gs-two-stats] ---- Use `BY` to group calculated stats by one or more columns. For example, to @@ -210,8 +200,7 @@ calculate the median duration per client IP: [source,esql] ---- -FROM sample_data -| STATS median_duration = MEDIAN(event.duration) BY client.ip +include::{esql-specs}/stats.csv-spec[tag=gs-stats-by] ---- [discrete] @@ -227,9 +216,7 @@ For example, to create hourly buckets for the data on October 23rd: [source,esql] ---- -FROM sample_data -| KEEP @timestamp -| EVAL bucket = AUTO_BUCKET (@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") +include::{esql-specs}/date.csv-spec[tag=gs-auto_bucket] ---- Combine `AUTO_BUCKET` with <> to create a histogram. For example, @@ -237,20 +224,14 @@ to count the number of events per hour: [source,esql] ---- -FROM sample_data -| KEEP @timestamp, event.duration -| EVAL bucket = AUTO_BUCKET (@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") -| STATS COUNT(*) BY bucket +include::{esql-specs}/date.csv-spec[tag=gs-auto_bucket-stats-by] ---- Or the median duration per hour: [source,esql] ---- -FROM sample_data -| KEEP @timestamp, event.duration -| EVAL bucket = AUTO_BUCKET (@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") -| STATS median_duration = MEDIAN(event.duration) BY bucket +include::{esql-specs}/date.csv-spec[tag=gs-auto_bucket-stats-by-median] ---- [discrete] @@ -273,10 +254,7 @@ command: [source,esql] ---- -FROM sample_data -| KEEP @timestamp, client.ip, event.duration -| EVAL client.ip = TO_STRING(client.ip) -| ENRICH clientip_policy ON client.ip WITH env +include::{esql-specs}/enrich.csv-spec[tag=gs-enrich] ---- You can use the new `env` column that's added by the `ENRICH` command in @@ -285,11 +263,7 @@ environment: [source,esql] ---- -FROM sample_data -| KEEP @timestamp, client.ip, event.duration -| EVAL client.ip = TO_STRING(client.ip) -| ENRICH clientip_policy ON client.ip WITH env -| STATS median_duration = MEDIAN(event.duration) BY env +include::{esql-specs}/enrich.csv-spec[tag=gs-enrich-stats-by] ---- For more about data enrichment with {esql}, refer to <>. @@ -321,8 +295,7 @@ string, you can use the following `DISSECT` command: [source,esql] ---- -FROM sample_data -| DISSECT message "Connected to %{server.ip}" +include::{esql-specs}/dissect.csv-spec[tag=gs-dissect] ---- This adds a `server.ip` column to those rows that have a `message` that matches @@ -334,10 +307,7 @@ has accepted: [source,esql] ---- -FROM sample_data -| WHERE STARTS_WITH(message, "Connected to") -| DISSECT message "Connected to %{server.ip}" -| STATS COUNT(*) BY server.ip +include::{esql-specs}/dissect.csv-spec[tag=gs-dissect-stats-by] ---- For more about data processing with {esql}, refer to diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java index fd4600e5e64ff..3df70b3b83d37 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java @@ -52,6 +52,8 @@ public class CsvTestsDataLoader { private static final TestsDataset APPS = new TestsDataset("apps", "mapping-apps.json", "apps.csv"); private static final TestsDataset LANGUAGES = new TestsDataset("languages", "mapping-languages.json", "languages.csv"); private static final TestsDataset UL_LOGS = new TestsDataset("ul_logs", "mapping-ul_logs.json", "ul_logs.csv"); + private static final TestsDataset SAMPLE_DATA = new TestsDataset("sample_data", "mapping-sample_data.json", "sample_data.csv"); + private static final TestsDataset CLIENT_IPS = new TestsDataset("clientips", "mapping-clientips.json", "clientips.csv"); private static final TestsDataset AIRPORTS = new TestsDataset("airports", "mapping-airports.json", "airports.csv"); private static final TestsDataset AIRPORTS_WEB = new TestsDataset("airports_web", "mapping-airports_web.json", "airports_web.csv"); @@ -66,15 +68,20 @@ public class CsvTestsDataLoader { LANGUAGES, UL_LOGS.indexName, UL_LOGS, + SAMPLE_DATA.indexName, + SAMPLE_DATA, + CLIENT_IPS.indexName, + CLIENT_IPS, AIRPORTS.indexName, AIRPORTS, AIRPORTS_WEB.indexName, AIRPORTS_WEB ); - private static final EnrichConfig LANGUAGES_ENRICH = new EnrichConfig("languages_policy", "enricy-policy-languages.json"); + private static final EnrichConfig LANGUAGES_ENRICH = new EnrichConfig("languages_policy", "enrich-policy-languages.json"); + private static final EnrichConfig CLIENT_IPS_ENRICH = new EnrichConfig("clientip_policy", "enrich-policy-clientips.json"); - public static final List ENRICH_POLICIES = List.of(LANGUAGES_ENRICH); + public static final List ENRICH_POLICIES = List.of(LANGUAGES_ENRICH, CLIENT_IPS_ENRICH); /** *

    diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/clientips.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/clientips.csv new file mode 100644 index 0000000000000..80918bc22d1fb --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/clientips.csv @@ -0,0 +1,6 @@ +client_ip:keyword,env:keyword +172.21.0.5,Development +172.21.2.113,QA +172.21.2.162,QA +172.21.3.15,Production +172.21.3.16,Production diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 8b94c022aaf6a..f6c0666c54ed8 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -725,3 +725,45 @@ birth_date:datetime 1952-02-27T00:00:00.000Z 1953-04-21T00:00:00.000Z ; + +docsGettingStartedAutoBucket +// tag::gs-auto_bucket[] +FROM sample_data +| KEEP @timestamp +| EVAL bucket = AUTO_BUCKET (@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") +// end::gs-auto_bucket[] +| LIMIT 0 +; + +@timestamp:date | bucket:date +; + +docsGettingStartedAutoBucketStatsBy +// tag::gs-auto_bucket-stats-by[] +FROM sample_data +| KEEP @timestamp, event_duration +| EVAL bucket = AUTO_BUCKET (@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") +| STATS COUNT(*) BY bucket +// end::gs-auto_bucket-stats-by[] +| SORT bucket +; + +COUNT(*):long | bucket:date +2 |2023-10-23T12:00:00.000Z +5 |2023-10-23T13:00:00.000Z +; + +docsGettingStartedAutoBucketStatsByMedian +// tag::gs-auto_bucket-stats-by-median[] +FROM sample_data +| KEEP @timestamp, event_duration +| EVAL bucket = AUTO_BUCKET (@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") +| STATS median_duration = MEDIAN(event_duration) BY bucket +// end::gs-auto_bucket-stats-by-median[] +| SORT bucket +; + +median_duration:double | bucket:date +3107561.0 |2023-10-23T12:00:00.000Z +1756467.0 |2023-10-23T13:00:00.000Z +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/dissect.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/dissect.csv-spec index 4c9c3a2681f50..1133b24cd1cf3 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/dissect.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/dissect.csv-spec @@ -159,6 +159,33 @@ emp_no:integer | a:keyword | b:keyword | c:keyword 10006 | [Principal, Senior] | [Support, Team] | [Engineer, Lead] ; +docsGettingStartedDissect +// tag::gs-dissect[] +FROM sample_data +| DISSECT message "Connected to %{server_ip}" +// end::gs-dissect[] +| LIMIT 0 +; + +@timestamp:date | client_ip:ip | event_duration:long | message:keyword | server_ip:keyword +; + +docsGettingStartedDissectStatsBy +// tag::gs-dissect-stats-by[] +FROM sample_data +| WHERE STARTS_WITH(message, "Connected to") +| DISSECT message "Connected to %{server_ip}" +| STATS COUNT(*) BY server_ip +// end::gs-dissect-stats-by[] +| SORT server_ip +; + +COUNT(*):long | server_ip:keyword +1 |10.1.0.1 +1 |10.1.0.2 +1 |10.1.0.3 +; + emptyPattern#[skip:-8.11.99] ROW a="b c d"| DISSECT a "%{b} %{} %{d}"; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec index bbbfa287ea695..a754194739992 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec @@ -650,4 +650,68 @@ FROM employees first_name:keyword | last_name:keyword Alejandro |McAlpine // end::rlike-result[] -; \ No newline at end of file +; + +docsGettingStartedFrom +// tag::gs-from[] +FROM sample_data +// end::gs-from[] +| LIMIT 0 +; + +@timestamp:date | client_ip:ip | event_duration:long | message:keyword +; + +docsGettingStartedFromLowercase +// tag::gs-from-lowercase[] +from sample_data +// end::gs-from-lowercase[] +| LIMIT 0 +; + +@timestamp:date | client_ip:ip | event_duration:long | message:keyword +; + +docsGettingStartedLimit +// tag::gs-limit[] +FROM sample_data +| LIMIT 3 +// end::gs-limit[] +| LIMIT 0 +; + +@timestamp:date | client_ip:ip | event_duration:long | message:keyword +; + +docsGettingStartedLimitOneLine +// tag::gs-limit-one-line[] +FROM sample_data | LIMIT 3 +// end::gs-limit-one-line[] +| LIMIT 0 +; + +@timestamp:date | client_ip:ip | event_duration:long | message:keyword +; + +docsGettingStartedSort +// tag::gs-sort[] +FROM sample_data +| SORT @timestamp DESC +// end::gs-sort[] +| LIMIT 0 +; + +@timestamp:date | client_ip:ip | event_duration:long | message:keyword +; + +docsGettingStartedChaining +// tag::gs-chaining[] +FROM sample_data +| SORT @timestamp DESC +| LIMIT 3 +// end::gs-chaining[] +| LIMIT 0 +; + +@timestamp:date | client_ip:ip | event_duration:long | message:keyword +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-policy-clientips.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-policy-clientips.json new file mode 100644 index 0000000000000..2ca29a39c284d --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-policy-clientips.json @@ -0,0 +1,7 @@ +{ + "match": { + "indices": "clientips", + "match_field": "client_ip", + "enrich_fields": ["env"] + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enricy-policy-languages.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-policy-languages.json similarity index 100% rename from x-pack/plugin/esql/qa/testFixtures/src/main/resources/enricy-policy-languages.json rename to x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-policy-languages.json diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec index 796a7bceca55d..f5847260bbb16 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec @@ -3,3 +3,30 @@ from employees | eval x = 1, y = to_string(languages) | enrich languages_policy emp_no:integer | language_name:keyword ; + +docsGettingStartedEnrich +// tag::gs-enrich[] +FROM sample_data +| KEEP @timestamp, client_ip, event_duration +| EVAL client_ip = TO_STRING(client_ip) +| ENRICH clientip_policy ON client_ip WITH env +// end::gs-enrich[] +| LIMIT 0 +; + +@timestamp:date | event_duration:long | client_ip:keyword | env:keyword +; + +docsGettingStartedEnrichStatsBy +// tag::gs-enrich-stats-by[] +FROM sample_data +| KEEP @timestamp, client_ip, event_duration +| EVAL client_ip = TO_STRING(client_ip) +| ENRICH clientip_policy ON client_ip WITH env +| STATS median_duration = MEDIAN(event_duration) BY env +// end::gs-enrich-stats-by[] +| LIMIT 0 +; + +median_duration:double | env:keyword +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec index b29c8024950f9..7a5a90fb398eb 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec @@ -215,3 +215,25 @@ emp_no:integer | foldable:keyword | folded_mv:keyword 10001 | "foo,bar" | [foo, bar] 10002 | "foo,bar" | [foo, bar] ; + +docsGettingStartedEval +// tag::gs-eval[] +FROM sample_data +| EVAL duration_ms = event_duration / 1000000.0 +// end::gs-eval[] +| LIMIT 0 +; + +@timestamp:date | client_ip:ip | event_duration:long | message:keyword | duration_ms:double +; + +docsGettingStartedRound +// tag::gs-round[] +FROM sample_data +| EVAL duration_ms = ROUND(event_duration / 1000000.0, 1) +// end::gs-round[] +| LIMIT 0 +; + +@timestamp:date | client_ip:ip | event_duration:long | message:keyword | duration_ms:double +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-clientips.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-clientips.json new file mode 100644 index 0000000000000..39bd37ce26c7f --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-clientips.json @@ -0,0 +1,10 @@ +{ + "properties": { + "client_ip": { + "type": "keyword" + }, + "env": { + "type": "keyword" + } + } + } \ No newline at end of file diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-sample_data.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-sample_data.json new file mode 100644 index 0000000000000..838a8ba09b45a --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-sample_data.json @@ -0,0 +1,16 @@ +{ + "properties": { + "@timestamp": { + "type": "date" + }, + "client_ip": { + "type": "ip" + }, + "event_duration": { + "type": "long" + }, + "message": { + "type": "keyword" + } + } +} \ No newline at end of file diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/sample_data.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/sample_data.csv new file mode 100644 index 0000000000000..3a62394014102 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/sample_data.csv @@ -0,0 +1,8 @@ +@timestamp:date,client_ip:ip,event_duration:long,message:keyword +2023-10-23T13:55:01.543Z,172.21.3.15,1756467,Connected to 10.1.0.1 +2023-10-23T13:53:55.832Z,172.21.3.15,5033755,Connection error +2023-10-23T13:52:55.015Z,172.21.3.15,8268153,Connection error +2023-10-23T13:51:54.732Z,172.21.3.15,725448,Connection error +2023-10-23T13:33:34.937Z,172.21.0.5,1232382,Disconnected +2023-10-23T12:27:28.948Z,172.21.2.113,2764889,Connected to 10.1.0.2 +2023-10-23T12:15:03.360Z,172.21.2.162,3450233,Connected to 10.1.0.3 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 6050dba6acf3b..dc96d1736858c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -750,3 +750,35 @@ c:long | a:long 1 | 1 ; +docsGettingStartedStats +// tag::gs-stats[] +FROM sample_data +| STATS median_duration = MEDIAN(event_duration) +// end::gs-stats[] +; + +median_duration:double +2764889.0 +; + +docsGettingStartedTwoStats +// tag::gs-two-stats[] +FROM sample_data +| STATS median_duration = MEDIAN(event_duration), max_duration = MAX(event_duration) +// end::gs-two-stats[] +; + +median_duration:double | max_duration:long +2764889.0 |8268153 +; + +docsGettingStartedStatsBy +// tag::gs-stats-by[] +FROM sample_data +| STATS median_duration = MEDIAN(event_duration) BY client_ip +// end::gs-stats-by[] +| LIMIT 0 +; + +median_duration:double | client_ip:ip +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/where-like.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/where-like.csv-spec index 49bf62bf77db7..37a1978524e7f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/where-like.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/where-like.csv-spec @@ -287,3 +287,14 @@ row x = "C:\\foo\\bar.exe" | mv_expand x | where x LIKE "C:\\\\\\\\*"; x:keyword ; + +docsGettingStartedLike +// tag::gs-like[] +FROM sample_data +| WHERE message LIKE "Connected*" +// end::gs-like[] +| LIMIT 0 +; + +@timestamp:date | client_ip:ip | event_duration:long | message:keyword +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/where.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/where.csv-spec index 89f329bc6dcb9..4a76f6bca0310 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/where.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/where.csv-spec @@ -76,3 +76,14 @@ emp_no:integer | first_name:keyword 10010 |Duangkaew 10011 |Mary ; + +docsGettingStartedWhere +// tag::gs-where[] +FROM sample_data +| WHERE event_duration > 5000000 +// end::gs-where[] +| LIMIT 0 +; + +@timestamp:date | client_ip:ip | event_duration:long | message:keyword +; From 50d6552e3814cb592177c4b7c3e30cfffc52d5c3 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 5 Dec 2023 12:01:48 +0200 Subject: [PATCH 150/181] Error log when license verification fails locally (#102919) This change implements logging if the license verification fails on local nodes (after the the license has already been (erroneously) published in the cluster state). --- docs/changelog/102919.yaml | 5 ++ .../license/ClusterStateLicenseService.java | 88 +++++++++---------- .../license/LicenseVerifier.java | 24 ++++- 3 files changed, 70 insertions(+), 47 deletions(-) create mode 100644 docs/changelog/102919.yaml diff --git a/docs/changelog/102919.yaml b/docs/changelog/102919.yaml new file mode 100644 index 0000000000000..0de2e75abc6cf --- /dev/null +++ b/docs/changelog/102919.yaml @@ -0,0 +1,5 @@ +pr: 102919 +summary: Error log when license verification fails locally +area: License +type: bug +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/ClusterStateLicenseService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/ClusterStateLicenseService.java index d4d62a75e98c7..d65fda90b87e8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/ClusterStateLicenseService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/ClusterStateLicenseService.java @@ -413,14 +413,6 @@ public void clusterChanged(ClusterChangedEvent event) { final ClusterState previousClusterState = event.previousState(); final ClusterState currentClusterState = event.state(); if (currentClusterState.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK) == false) { - if (XPackPlugin.isReadyForXPackCustomMetadata(currentClusterState) == false) { - logger.debug( - "cannot add license to cluster as the following nodes might not understand the license metadata: {}", - () -> XPackPlugin.nodesNotReadyForXPackCustomMetadata(currentClusterState) - ); - return; - } - final LicensesMetadata prevLicensesMetadata = previousClusterState.getMetadata().custom(LicensesMetadata.TYPE); final LicensesMetadata currentLicensesMetadata = currentClusterState.getMetadata().custom(LicensesMetadata.TYPE); // notify all interested plugins @@ -439,26 +431,7 @@ public void clusterChanged(ClusterChangedEvent event) { } else { logger.trace("license unchanged [{}]", currentLicensesMetadata); } - - License currentLicense = null; - boolean noLicenseInPrevMetadata = prevLicensesMetadata == null || prevLicensesMetadata.getLicense() == null; - if (noLicenseInPrevMetadata == false) { - currentLicense = prevLicensesMetadata.getLicense(); - } - boolean noLicenseInCurrentMetadata = (currentLicensesMetadata == null || currentLicensesMetadata.getLicense() == null); - if (noLicenseInCurrentMetadata == false) { - currentLicense = currentLicensesMetadata.getLicense(); - } - - boolean noLicense = noLicenseInPrevMetadata && noLicenseInCurrentMetadata; - // auto-generate license if no licenses ever existed or if the current license is basic and - // needs extended or if the license signature needs to be updated. this will trigger a subsequent cluster changed event - if (currentClusterState.getNodes().isLocalNodeElectedMaster() - && (noLicense - || LicenseUtils.licenseNeedsExtended(currentLicense) - || LicenseUtils.signatureNeedsUpdate(currentLicense, currentClusterState.nodes()))) { - registerOrUpdateSelfGeneratedLicense(); - } + maybeRegisterOrUpdateLicense(previousClusterState, currentClusterState); } else if (logger.isDebugEnabled()) { logger.debug("skipped license notifications reason: [{}]", GatewayService.STATE_NOT_RECOVERED_BLOCK); } @@ -468,24 +441,38 @@ private void updateXPackLicenseState(License license) { if (license == LicensesMetadata.LICENSE_TOMBSTONE) { // implies license has been explicitly deleted xPacklicenseState.update(LicenseUtils.getXPackLicenseStatus(license, clock)); - return; - } - checkForExpiredLicense(license); - } - - private boolean checkForExpiredLicense(License license) { - if (license != null) { + } else if (license != null) { XPackLicenseStatus xPackLicenseStatus = LicenseUtils.getXPackLicenseStatus(license, clock); xPacklicenseState.update(xPackLicenseStatus); if (xPackLicenseStatus.active()) { logger.debug("license [{}] - valid", license.uid()); - return false; } else { logger.warn("license [{}] - expired", license.uid()); - return true; } } - return false; + } + + private void maybeRegisterOrUpdateLicense(ClusterState previousClusterState, ClusterState currentClusterState) { + final LicensesMetadata prevLicensesMetadata = previousClusterState.getMetadata().custom(LicensesMetadata.TYPE); + final LicensesMetadata currentLicensesMetadata = currentClusterState.getMetadata().custom(LicensesMetadata.TYPE); + License currentLicense = null; + boolean noLicenseInPrevMetadata = prevLicensesMetadata == null || prevLicensesMetadata.getLicense() == null; + if (noLicenseInPrevMetadata == false) { + currentLicense = prevLicensesMetadata.getLicense(); + } + boolean noLicenseInCurrentMetadata = (currentLicensesMetadata == null || currentLicensesMetadata.getLicense() == null); + if (noLicenseInCurrentMetadata == false) { + currentLicense = currentLicensesMetadata.getLicense(); + } + boolean noLicense = noLicenseInPrevMetadata && noLicenseInCurrentMetadata; + // auto-generate license if no licenses ever existed or if the current license is basic and + // needs extended or if the license signature needs to be updated. this will trigger a subsequent cluster changed event + if (currentClusterState.getNodes().isLocalNodeElectedMaster() + && (noLicense + || LicenseUtils.licenseNeedsExtended(currentLicense) + || LicenseUtils.signatureNeedsUpdate(currentLicense, currentClusterState.nodes()))) { + registerOrUpdateSelfGeneratedLicense(); + } } /** @@ -496,12 +483,14 @@ private boolean checkForExpiredLicense(License license) { */ private void onUpdate(final LicensesMetadata currentLicensesMetadata) { final License license = getLicenseFromLicensesMetadata(currentLicensesMetadata); + // first update the XPackLicenseState + updateXPackLicenseState(license); // license can be null if the trial license is yet to be auto-generated // in this case, it is a no-op if (license != null) { - final License previousLicense = currentLicenseHolder.get(); + final License previousLicense = currentLicenseHolder.getAndSet(license); if (license.equals(previousLicense) == false) { - currentLicenseHolder.set(license); + // then register periodic job to update the XPackLicenseState with the latest expiration message scheduler.add(new SchedulerEngine.Job(LICENSE_JOB, nextLicenseCheck(license))); for (ExpirationCallback expirationCallback : expirationCallbacks) { scheduler.add( @@ -517,24 +506,25 @@ private void onUpdate(final LicensesMetadata currentLicensesMetadata) { } logger.info("license [{}] mode [{}] - valid", license.uid(), license.operationMode().name().toLowerCase(Locale.ROOT)); } - updateXPackLicenseState(license); } } // pkg private for tests SchedulerEngine.Schedule nextLicenseCheck(License license) { + final long licenseIssueDate = license.issueDate(); + final long licenseExpiryDate = LicenseUtils.getExpiryDate(license); return (startTime, time) -> { - if (time < license.issueDate()) { + if (time < licenseIssueDate) { // when we encounter a license with a future issue date // which can happen with autogenerated license, // we want to schedule a notification on the license issue date // so the license is notified once it is valid // see https://github.com/elastic/x-plugins/issues/983 - return license.issueDate(); - } else if (time < LicenseUtils.getExpiryDate(license)) { + return licenseIssueDate; + } else if (time < licenseExpiryDate) { // Re-check the license every day during the warning period up to the license expiration. // This will cause the warning message to be updated that is emitted on soon-expiring license use. - long nextTime = LicenseUtils.getExpiryDate(license) - LicenseSettings.LICENSE_EXPIRATION_WARNING_PERIOD.getMillis(); + long nextTime = licenseExpiryDate - LicenseSettings.LICENSE_EXPIRATION_WARNING_PERIOD.getMillis(); while (nextTime <= time) { nextTime += TimeValue.timeValueDays(1).getMillis(); } @@ -550,6 +540,7 @@ public License getLicense(final Metadata metadata) { } // visible for tests + @Nullable License getLicenseFromLicensesMetadata(@Nullable final LicensesMetadata metadata) { if (metadata != null) { License license = metadata.getLicense(); @@ -558,6 +549,13 @@ License getLicenseFromLicensesMetadata(@Nullable final LicensesMetadata metadata } else if (license != null) { if (license.verified()) { return license; + } else { + // this is an "error" level because an unverified license should not be present in the cluster state in the first place + logger.error( + "{} with uid [{}] failed verification on the local node.", + License.isAutoGeneratedLicense(license.signature()) ? "Autogenerated license" : "License", + license.uid() + ); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseVerifier.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseVerifier.java index 1dff1aca29b3a..5c761c4811642 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseVerifier.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseVerifier.java @@ -6,9 +6,12 @@ */ package org.elasticsearch.license; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefIterator; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.core.Streams; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -28,11 +31,15 @@ import java.util.Base64; import java.util.Collections; +import static org.elasticsearch.core.Strings.format; + /** * Responsible for verifying signed licenses */ public class LicenseVerifier { + private static final Logger logger = LogManager.getLogger(LicenseVerifier.class); + /** * verifies the license content with the signature using the packaged * public key @@ -65,7 +72,17 @@ public static boolean verifyLicense(final License license, PublicKey publicKey) while ((ref = iterator.next()) != null) { rsa.update(ref.bytes, ref.offset, ref.length); } - return rsa.verify(signedContent); + boolean verifyResult = rsa.verify(signedContent); + if (verifyResult == false) { + logger.warn( + () -> format( + "License with uid [%s] failed signature verification with the public key with sha256 [%s].", + license.uid(), + PUBLIC_KEY_DIGEST_HEX_STRING + ) + ); + } + return verifyResult; } catch (IOException | NoSuchAlgorithmException | SignatureException | InvalidKeyException e) { throw new IllegalStateException(e); } finally { @@ -76,12 +93,15 @@ public static boolean verifyLicense(final License license, PublicKey publicKey) } private static final PublicKey PUBLIC_KEY; + private static final String PUBLIC_KEY_DIGEST_HEX_STRING; static { try (InputStream is = LicenseVerifier.class.getResourceAsStream("/public.key")) { ByteArrayOutputStream out = new ByteArrayOutputStream(); Streams.copy(is, out); - PUBLIC_KEY = CryptUtils.readPublicKey(out.toByteArray()); + byte[] publicKeyBytes = out.toByteArray(); + PUBLIC_KEY = CryptUtils.readPublicKey(publicKeyBytes); + PUBLIC_KEY_DIGEST_HEX_STRING = MessageDigests.toHexString(MessageDigests.sha256().digest(publicKeyBytes)); } catch (IOException e) { throw new AssertionError("key file is part of the source and must deserialize correctly", e); } From f64bb490550edb94715f1783203c46cc18653869 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 5 Dec 2023 12:23:34 +0200 Subject: [PATCH 151/181] Patterns support for allowed subjects by the JWT realm (#102426) This adds support for allowing JWT token sub claims with Lucene patterns and wildcards, by introducing a new JWT realm setting allowed_subject_patterns that can be used alongside the exist allowed_subjects realm setting. --- docs/changelog/102426.yaml | 5 + .../authentication/jwt-realm.asciidoc | 26 +- .../security/authc/jwt/JwtRealmSettings.java | 77 +++- .../xpack/security/authc/jwt/JwtRestIT.java | 10 +- .../security/authc/jwt/JwtAuthenticator.java | 40 +- .../authc/jwt/JwtStringClaimValidator.java | 97 +++-- .../JwtAuthenticatorAccessTokenTypeTests.java | 12 +- .../authc/jwt/JwtAuthenticatorTests.java | 241 +++++++++-- .../jwt/JwtStringClaimValidatorTests.java | 398 ++++++++++++++++-- 9 files changed, 797 insertions(+), 109 deletions(-) create mode 100644 docs/changelog/102426.yaml diff --git a/docs/changelog/102426.yaml b/docs/changelog/102426.yaml new file mode 100644 index 0000000000000..3aad50ed1eee0 --- /dev/null +++ b/docs/changelog/102426.yaml @@ -0,0 +1,5 @@ +pr: 102426 +summary: Patterns support for allowed subjects by the JWT realm +area: Authentication +type: feature +issues: [] diff --git a/docs/reference/security/authentication/jwt-realm.asciidoc b/docs/reference/security/authentication/jwt-realm.asciidoc index 68e20380449a5..4c9198956d21b 100644 --- a/docs/reference/security/authentication/jwt-realm.asciidoc +++ b/docs/reference/security/authentication/jwt-realm.asciidoc @@ -53,7 +53,8 @@ tokens can be issued by an OIDC Provider (OP), including ID Tokens. ID Tokens from an OIDC provider are well-defined JSON Web Tokens (JWT) and should be always compatible with a JWT realm of the `id_token` token type. The subject claim of an ID token represents the end-user. This means that ID tokens will generally have many allowed subjects. -Therefore, a JWT realm of `id_token` token type does _not_ mandate the `allowed_subjects` validation. +Therefore, a JWT realm of `id_token` token type does _not_ mandate the `allowed_subjects` +(or `allowed_subject_patterns`) validation. NOTE: Because JWTs are obtained external to {es}, you can define a custom workflow instead of using the OIDC workflow. However, the JWT format must still be JSON @@ -74,8 +75,8 @@ A typical usage of this flow is for an application to get a credential for itsel This is the use case that the `access_token` token type is designed for. It is likely that this application also obtains ID Tokens for its end-users. To prevent end-user ID Tokens being used to authenticate with the JWT realm configured -for the application, we mandate `allowed_subjects` validation when a JWT realm -has token type `access_token`. +for the application, we mandate `allowed_subjects` or `allowed_subject_patterns` +validation when a JWT realm has token type `access_token`. NOTE: Not every access token is formatted as a JSON Web Token (JWT). For it to be compatible with the JWT realm, it must at least use the JWT format and satisfies @@ -162,7 +163,8 @@ xpack.security.authc.realms.jwt.jwt2: token_type: access_token client_authentication.type: shared_secret allowed_issuer: "https://issuer.example.com/jwt/" - allowed_subjects: [ "123456-compute@developer.example.com" ] + allowed_subjects: [ "123456-compute@admin.example.com" ] + allowed_subject_patterns: [ "wild*@developer?.example.com", "/[a-z]+<1-10>\\@dev\\.example\\.com/"] allowed_audiences: [ "elasticsearch" ] required_claims: token_use: access @@ -181,7 +183,21 @@ Instructs the realm to treat and validate incoming JWTs as access tokens (`acces Specifies a list of JWT subjects that the realm will allow. These values are typically URLs, UUIDs, or other case-sensitive string values. -NOTE: This setting is mandatory for when `token_type` is `access_token`. +`allowed_subject_patterns`:: +Analogous to `allowed_subjects` but it accepts a list of <> +and wildcards for the allowed JWT subjects. Wildcards use the `*` and `?` special +characters (which are escaped by `\`) to mean "any string" and "any single character" +respectively, for example "a?\\**", matches "a1*" and "ab*whatever", but not "a", "abc", or "abc*" +(in Java strings `\` must itself be escaped by another `\`). +<> must be enclosed between `/`, +for example "/https?://[^/]+/?/" matches any http or https URL with no path component +(matches "https://elastic.co/" but not "https://elastic.co/guide"). + +NOTE: At least one of the `allowed_subjects` or `allowed_subject_patterns` settings must be specified +(and be non-empty) when `token_type` is `access_token`. + +NOTE: When both `allowed_subjects` and `allowed_subject_patterns` settings are specified +an incoming JWT's `sub` claim is accepted if it matches any of the two lists. `required_claims`:: Specifies a list of key/value pairs for additional verifications to be performed diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/jwt/JwtRealmSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/jwt/JwtRealmSettings.java index 1903dd5146f69..e75ff1ac321bc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/jwt/JwtRealmSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/jwt/JwtRealmSettings.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.authc.RealmSettings; @@ -162,6 +163,7 @@ private static Set> getNonSecureSettings() { set.addAll( List.of( ALLOWED_SUBJECTS, + ALLOWED_SUBJECT_PATTERNS, FALLBACK_SUB_CLAIM, FALLBACK_AUD_CLAIM, REQUIRED_CLAIMS, @@ -255,11 +257,82 @@ private static Set> getSecureSettings() { ); // JWT end-user settings - public static final Setting.AffixSetting> ALLOWED_SUBJECTS = Setting.affixKeySetting( RealmSettings.realmSettingPrefix(TYPE), "allowed_subjects", - key -> Setting.stringListSetting(key, values -> verifyNonNullNotEmpty(key, values, null), Setting.Property.NodeScope) + key -> Setting.stringListSetting(key, new Setting.Validator<>() { + + @Override + public void validate(List allowedSubjects) { + // validate values themselves are not null or empty + allowedSubjects.forEach(allowedSubject -> verifyNonNullNotEmpty(key, allowedSubject, null)); + } + + @Override + @SuppressWarnings("unchecked") + public void validate(List allowedSubjects, Map, Object> settings) { + // validate both allowed_subjects and allowed_subject_patterns are not simultaneously empty (which is the default value) + final String namespace = ALLOWED_SUBJECTS.getNamespace(ALLOWED_SUBJECTS.getConcreteSetting(key)); + final List allowedSubjectPatterns = (List) settings.get( + ALLOWED_SUBJECT_PATTERNS.getConcreteSettingForNamespace(namespace) + ); + if (allowedSubjects.isEmpty() && allowedSubjectPatterns.isEmpty()) { + throw new SettingsException( + "One of either [" + + ALLOWED_SUBJECTS.getConcreteSettingForNamespace(namespace).getKey() + + "] or [" + + ALLOWED_SUBJECT_PATTERNS.getConcreteSettingForNamespace(namespace).getKey() + + "] must be specified and not be empty." + ); + } + } + + @Override + public Iterator> settings() { + final String namespace = ALLOWED_SUBJECTS.getNamespace(ALLOWED_SUBJECTS.getConcreteSetting(key)); + final List> settings = List.of(ALLOWED_SUBJECT_PATTERNS.getConcreteSettingForNamespace(namespace)); + return settings.iterator(); + } + }, Setting.Property.NodeScope) + ); + + public static final Setting.AffixSetting> ALLOWED_SUBJECT_PATTERNS = Setting.affixKeySetting( + RealmSettings.realmSettingPrefix(TYPE), + "allowed_subject_patterns", + key -> Setting.stringListSetting(key, new Setting.Validator<>() { + + @Override + public void validate(List allowedSubjectPatterns) { + // validate values themselves are not null or empty + allowedSubjectPatterns.forEach(allowedSubjectPattern -> verifyNonNullNotEmpty(key, allowedSubjectPattern, null)); + } + + @Override + @SuppressWarnings("unchecked") + public void validate(List allowedSubjectPatterns, Map, Object> settings) { + // validate both allowed_subjects and allowed_subject_patterns are not simultaneously empty (which is the default value) + final String namespace = ALLOWED_SUBJECT_PATTERNS.getNamespace(ALLOWED_SUBJECT_PATTERNS.getConcreteSetting(key)); + final List allowedSubjects = (List) settings.get( + ALLOWED_SUBJECTS.getConcreteSettingForNamespace(namespace) + ); + if (allowedSubjects.isEmpty() && allowedSubjectPatterns.isEmpty()) { + throw new SettingsException( + "One of either [" + + ALLOWED_SUBJECTS.getConcreteSettingForNamespace(namespace).getKey() + + "] or [" + + ALLOWED_SUBJECT_PATTERNS.getConcreteSettingForNamespace(namespace).getKey() + + "] must be specified and not be empty." + ); + } + } + + @Override + public Iterator> settings() { + final String namespace = ALLOWED_SUBJECT_PATTERNS.getNamespace(ALLOWED_SUBJECT_PATTERNS.getConcreteSetting(key)); + final List> settings = List.of(ALLOWED_SUBJECTS.getConcreteSettingForNamespace(namespace)); + return settings.iterator(); + } + }, Setting.Property.NodeScope) ); // Registered claim names from the JWT spec https://www.rfc-editor.org/rfc/rfc7519#section-4.1. diff --git a/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRestIT.java b/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRestIT.java index db59bea999852..8f134a9d37502 100644 --- a/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRestIT.java +++ b/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRestIT.java @@ -149,7 +149,15 @@ private static Map realmSettings(LocalClusterSpec.LocalNodeSpec settings.put("xpack.security.authc.realms.jwt.jwt2.fallback_claims.sub", "email"); settings.put("xpack.security.authc.realms.jwt.jwt2.fallback_claims.aud", "scope"); settings.put("xpack.security.authc.realms.jwt.jwt2.allowed_issuer", "my-issuer"); - settings.put("xpack.security.authc.realms.jwt.jwt2.allowed_subjects", SERVICE_SUBJECT.get()); + if (randomBoolean()) { + if (randomBoolean()) { + settings.put("xpack.security.authc.realms.jwt.jwt2.allowed_subjects", SERVICE_SUBJECT.get()); + } else { + settings.put("xpack.security.authc.realms.jwt.jwt2.allowed_subject_patterns", SERVICE_SUBJECT.get()); + } + } else { + settings.put("xpack.security.authc.realms.jwt.jwt2.allowed_subject_patterns", "service_*@app?.example.com"); + } settings.put("xpack.security.authc.realms.jwt.jwt2.allowed_audiences", "es01,es02,es03"); settings.put("xpack.security.authc.realms.jwt.jwt2.allowed_signature_algorithms", "HS256,HS384"); // Both email or sub works because of fallback diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtAuthenticator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtAuthenticator.java index 9c1deff9ed891..b06aba1c9d87a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtAuthenticator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtAuthenticator.java @@ -15,6 +15,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.authc.RealmConfig; @@ -127,18 +128,19 @@ private static List configureFieldValidatorsForIdToken(RealmC final Clock clock = Clock.systemUTC(); final JwtStringClaimValidator subjectClaimValidator; - if (realmConfig.hasSetting(JwtRealmSettings.ALLOWED_SUBJECTS)) { - subjectClaimValidator = new JwtStringClaimValidator("sub", realmConfig.getSetting(JwtRealmSettings.ALLOWED_SUBJECTS), true); + if (realmConfig.hasSetting(JwtRealmSettings.ALLOWED_SUBJECTS) + || realmConfig.hasSetting(JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS)) { + subjectClaimValidator = getSubjectClaimValidator(realmConfig, null); } else { - // Allow any value for the sub claim as long as there is a non-null value + // Allows any non-null value for the sub claim subjectClaimValidator = JwtStringClaimValidator.ALLOW_ALL_SUBJECTS; } return List.of( JwtTypeValidator.INSTANCE, - new JwtStringClaimValidator("iss", List.of(realmConfig.getSetting(JwtRealmSettings.ALLOWED_ISSUER)), true), + new JwtStringClaimValidator("iss", true, List.of(realmConfig.getSetting(JwtRealmSettings.ALLOWED_ISSUER)), List.of()), subjectClaimValidator, - new JwtStringClaimValidator("aud", realmConfig.getSetting(JwtRealmSettings.ALLOWED_AUDIENCES), false), + new JwtStringClaimValidator("aud", false, realmConfig.getSetting(JwtRealmSettings.ALLOWED_AUDIENCES), List.of()), new JwtAlgorithmValidator(realmConfig.getSetting(JwtRealmSettings.ALLOWED_SIGNATURE_ALGORITHMS)), new JwtDateClaimValidator(clock, "iat", allowedClockSkew, JwtDateClaimValidator.Relationship.BEFORE_NOW, false), new JwtDateClaimValidator(clock, "exp", allowedClockSkew, JwtDateClaimValidator.Relationship.AFTER_NOW, false), @@ -157,9 +159,15 @@ private static List configureFieldValidatorsForAccessToken( return List.of( JwtTypeValidator.INSTANCE, - new JwtStringClaimValidator("iss", List.of(realmConfig.getSetting(JwtRealmSettings.ALLOWED_ISSUER)), true), - new JwtStringClaimValidator("sub", fallbackClaimLookup, realmConfig.getSetting(JwtRealmSettings.ALLOWED_SUBJECTS), true), - new JwtStringClaimValidator("aud", fallbackClaimLookup, realmConfig.getSetting(JwtRealmSettings.ALLOWED_AUDIENCES), false), + new JwtStringClaimValidator("iss", true, List.of(realmConfig.getSetting(JwtRealmSettings.ALLOWED_ISSUER)), List.of()), + getSubjectClaimValidator(realmConfig, fallbackClaimLookup), + new JwtStringClaimValidator( + "aud", + false, + fallbackClaimLookup, + realmConfig.getSetting(JwtRealmSettings.ALLOWED_AUDIENCES), + List.of() + ), new JwtAlgorithmValidator(realmConfig.getSetting(JwtRealmSettings.ALLOWED_SIGNATURE_ALGORITHMS)), new JwtDateClaimValidator(clock, "iat", allowedClockSkew, JwtDateClaimValidator.Relationship.BEFORE_NOW, false), new JwtDateClaimValidator(clock, "exp", allowedClockSkew, JwtDateClaimValidator.Relationship.AFTER_NOW, false) @@ -170,7 +178,21 @@ private List getRequireClaimsValidators() { final Settings requiredClaims = realmConfig.getSetting(JwtRealmSettings.REQUIRED_CLAIMS); return requiredClaims.names().stream().map(name -> { final List allowedValues = requiredClaims.getAsList(name); - return new JwtStringClaimValidator(name, allowedValues, false); + return new JwtStringClaimValidator(name, false, allowedValues, List.of()); }).toList(); } + + private static JwtStringClaimValidator getSubjectClaimValidator( + RealmConfig realmConfig, + @Nullable Map fallbackClaimLookup + ) { + // validateAllowedSubjectsSettings(realmConfig); + return new JwtStringClaimValidator( + "sub", + true, + fallbackClaimLookup, + realmConfig.getSetting(JwtRealmSettings.ALLOWED_SUBJECTS), + realmConfig.getSetting(JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS) + ); + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtStringClaimValidator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtStringClaimValidator.java index 30ea0979a624f..76a1b243f5277 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtStringClaimValidator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtStringClaimValidator.java @@ -11,49 +11,73 @@ import com.nimbusds.jwt.JWTClaimsSet; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.core.Nullable; +import org.elasticsearch.xpack.core.security.support.Automatons; +import java.util.Collection; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.function.Predicate; /** - * Validates a string claim against a list of allowed values. The validation is successful - * if the claim's value matches any of the allowed values. - * The claim's value can be either a single string or an array of strings. When it is an array - * of string, the validation passes when any member of the string array matches any of the allowed - * values. - * Whether a claim's value can be an array of strings is customised with the {@link #singleValuedClaim} - * field, which enforces the claim's value to be a single string if it is configured to {@code true}. - * - * NOTE the allowed values can be null which means skipping the actual value check, i.e. the validator - * succeeds as long as there is a (non-null) value. + * Validates a specific string claim form a {@link JWTClaimsSet} against both a list of explicit values and a list of Lucene patterns. + * The validation is successful if the claim's value matches any of the allowed values or patterns from the lists. + * The {@link JWTClaimsSet} claim value can either be a single string or an array of strings. + * The {@link JwtStringClaimValidator} can be configured to only accept a single string claim value + * (and reject string array claims) when the {@link #singleValuedClaim} field is set to {@code true}. + * When it is an array of string, the validation is successful when ANY array element matches ANY of the allowed values or patterns + * (and {@link #singleValuedClaim} field is {@code false}). */ public class JwtStringClaimValidator implements JwtFieldValidator { - public static JwtStringClaimValidator ALLOW_ALL_SUBJECTS = new JwtStringClaimValidator("sub", null, true); + // Allows any non-null value for the sub claim + public static final JwtStringClaimValidator ALLOW_ALL_SUBJECTS = new JwtStringClaimValidator("sub", true, List.of(), List.of("*")); private final String claimName; - @Nullable - private final Map fallbackClaimNames; - @Nullable - private final List allowedClaimValues; // Whether the claim should be a single string private final boolean singleValuedClaim; + @Nullable + private final Map fallbackClaimNames; + private final Predicate allowedClaimValuesPredicate; - public JwtStringClaimValidator(String claimName, List allowedClaimValues, boolean singleValuedClaim) { - this(claimName, null, allowedClaimValues, singleValuedClaim); + public JwtStringClaimValidator( + String claimName, + boolean singleValuedClaim, + Collection allowedClaimValues, + Collection allowedClaimValuePatterns + ) { + this(claimName, singleValuedClaim, null, allowedClaimValues, allowedClaimValuePatterns); } public JwtStringClaimValidator( String claimName, + boolean singleValuedClaim, Map fallbackClaimNames, - List allowedClaimValues, - boolean singleValuedClaim + Collection allowedClaimValues, + Collection allowedClaimValuePatterns ) { + assert allowedClaimValues != null : "allowed claim values should be empty rather than null"; + assert allowedClaimValuePatterns != null : "allowed claim value patterns should be empty rather than null"; this.claimName = claimName; - this.fallbackClaimNames = fallbackClaimNames; - this.allowedClaimValues = allowedClaimValues; this.singleValuedClaim = singleValuedClaim; + this.fallbackClaimNames = fallbackClaimNames; + this.allowedClaimValuesPredicate = new Predicate<>() { + private final Set allowedClaimsSet = new HashSet<>(allowedClaimValues); + private final Predicate allowedClaimPatternsPredicate = predicateFromPatterns(claimName, allowedClaimValuePatterns); + + @Override + public boolean test(String s) { + return allowedClaimsSet.contains(s) || allowedClaimPatternsPredicate.test(s); + } + + @Override + public String toString() { + return "[" + Strings.collectionToCommaDelimitedString(allowedClaimsSet) + "] || [" + allowedClaimPatternsPredicate + "]"; + } + }; } @Override @@ -63,18 +87,19 @@ public void validate(JWSHeader jwsHeader, JWTClaimsSet jwtClaimsSet) { if (claimValues == null) { throw new IllegalArgumentException("missing required string claim [" + fallbackableClaim + "]"); } - - if (allowedClaimValues != null && false == claimValues.stream().anyMatch(allowedClaimValues::contains)) { - throw new IllegalArgumentException( - "string claim [" - + fallbackableClaim - + "] has value [" - + Strings.collectionToCommaDelimitedString(claimValues) - + "] which does not match allowed claim values [" - + Strings.collectionToCommaDelimitedString(allowedClaimValues) - + "]" - ); + for (String claimValue : claimValues) { + if (allowedClaimValuesPredicate.test(claimValue)) { + return; + } } + throw new IllegalArgumentException( + "string claim [" + + fallbackableClaim + + "] has value [" + + Strings.collectionToCommaDelimitedString(claimValues) + + "] which does not match allowed claim values " + + allowedClaimValuesPredicate + ); } private List getStringClaimValues(FallbackableClaim fallbackableClaim) { @@ -85,4 +110,12 @@ private List getStringClaimValues(FallbackableClaim fallbackableClaim) { return fallbackableClaim.getStringListClaimValue(); } } + + private static Predicate predicateFromPatterns(String claimName, Collection patterns) { + try { + return Automatons.predicate(patterns); + } catch (Exception e) { + throw new SettingsException("Invalid patterns for allowed claim values for [" + claimName + "].", e); + } + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtAuthenticatorAccessTokenTypeTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtAuthenticatorAccessTokenTypeTests.java index 40bf021a48a9c..b1bb03b95dd8c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtAuthenticatorAccessTokenTypeTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtAuthenticatorAccessTokenTypeTests.java @@ -30,11 +30,17 @@ public void testSubjectIsRequired() throws ParseException { public void testAccessTokenTypeMandatesAllowedSubjects() { allowedSubject = null; + allowedSubjectPattern = null; final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> buildJwtAuthenticator()); - assertThat( - e.getMessage(), - containsString("Invalid empty list for [" + RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECTS) + "]") + e.getCause().getMessage(), + containsString( + "One of either [" + + RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECTS) + + "] or [" + + RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS) + + "] must be specified and not be empty." + ) ); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtAuthenticatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtAuthenticatorTests.java index dd1a984a0dcb5..7a44ebae95738 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtAuthenticatorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtAuthenticatorTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; @@ -25,6 +26,7 @@ import org.elasticsearch.xpack.core.security.authc.RealmSettings; import org.elasticsearch.xpack.core.security.authc.jwt.JwtAuthenticationToken; import org.elasticsearch.xpack.core.security.authc.jwt.JwtRealmSettings; +import org.elasticsearch.xpack.core.ssl.SSLService; import org.junit.Before; import java.text.ParseException; @@ -32,10 +34,8 @@ import java.time.temporal.ChronoUnit; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; import static org.elasticsearch.test.ActionListenerUtils.anyActionListener; -import static org.elasticsearch.test.TestMatchers.throwableWithMessage; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.mockito.ArgumentMatchers.any; @@ -51,6 +51,7 @@ public abstract class JwtAuthenticatorTests extends ESTestCase { protected String allowedIssuer; @Nullable protected String allowedSubject; + protected String allowedSubjectPattern; protected String allowedAudience; protected String fallbackSub; protected String fallbackAud; @@ -64,11 +65,19 @@ public void beforeTest() { allowedIssuer = randomAlphaOfLength(6); allowedAlgorithm = randomFrom(JwtRealmSettings.SUPPORTED_SIGNATURE_ALGORITHMS_HMAC); if (getTokenType() == JwtRealmSettings.TokenType.ID_TOKEN) { + // allowedSubject and allowedSubjectPattern can both be null for allowedSubject = randomBoolean() ? randomAlphaOfLength(8) : null; + allowedSubjectPattern = randomBoolean() ? randomAlphaOfLength(8) : null; fallbackSub = null; fallbackAud = null; } else { - allowedSubject = randomAlphaOfLength(8); + if (randomBoolean()) { + allowedSubject = randomAlphaOfLength(8); + allowedSubjectPattern = randomBoolean() ? randomAlphaOfLength(8) : null; + } else { + allowedSubject = randomBoolean() ? randomAlphaOfLength(8) : null; + allowedSubjectPattern = randomAlphaOfLength(8); + } fallbackSub = randomBoolean() ? "_" + randomAlphaOfLength(5) : null; fallbackAud = randomBoolean() ? "_" + randomAlphaOfLength(8) : null; } @@ -84,7 +93,7 @@ public void testRequiredClaims() throws ParseException { "iss", allowedIssuer, "sub", - allowedSubject == null ? randomAlphaOfLengthBetween(10, 18) : allowedSubject, + getValidSubClaimValue(), "aud", allowedAudience, requiredClaim.v1(), @@ -122,7 +131,7 @@ public void testMismatchedRequiredClaims() throws ParseException { "iss", allowedIssuer, "sub", - allowedSubject == null ? randomAlphaOfLengthBetween(10, 18) : allowedSubject, + getValidSubClaimValue(), "aud", allowedAudience, requiredClaim.v1(), @@ -155,10 +164,9 @@ public void testMismatchedRequiredClaims() throws ParseException { + "] has value [" + mismatchRequiredClaimValue + "] which does not match allowed claim values [" - + requiredClaim.v2().stream().collect(Collectors.joining(",")) - + "]" ) ); + requiredClaim.v2().stream().forEach(requiredClaim -> { assertThat(e.getMessage(), containsString(requiredClaim)); }); } public void testMissingRequiredClaims() throws ParseException { @@ -168,7 +176,7 @@ public void testMissingRequiredClaims() throws ParseException { "iss", allowedIssuer, "sub", - allowedSubject == null ? randomAlphaOfLengthBetween(10, 18) : allowedSubject, + getValidSubClaimValue(), "aud", allowedAudience, "iat", @@ -233,14 +241,188 @@ protected void doTestInvalidIssuerIsCheckedBeforeAlgorithm(JwtAuthenticator jwtA final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, future::actionGet); assertThat( - e, - throwableWithMessage( - "string claim [iss] has value [" + invalidIssuer + "] which does not match allowed claim values [" + allowedIssuer + "]" + e.getMessage(), + containsString( + "string claim [iss] has value [" + + invalidIssuer + + "] which does not match allowed claim " + + "values [" + + allowedIssuer + + "]" + ) + ); + } + + public void testInvalidAllowedSubjectClaimPattern() { + allowedSubjectPattern = "/invalid pattern"; + final SettingsException e = expectThrows(SettingsException.class, () -> buildJwtAuthenticator()); + assertThat(e.getMessage(), containsString("Invalid patterns for allowed claim values for [sub].")); + } + + public void testEmptyAllowedSubjectIsInvalid() { + allowedSubject = null; + allowedSubjectPattern = null; + RealmConfig someJWTRealmConfig = buildJWTRealmConfig(); + final Settings.Builder builder = Settings.builder(); + builder.put(someJWTRealmConfig.settings()); + boolean emptySubjects = randomBoolean(); + if (emptySubjects) { + builder.putList(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECTS), List.of("")); + } else { + builder.putList(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS), List.of("")); + } + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new JwtAuthenticator( + new RealmConfig( + someJWTRealmConfig.identifier(), + builder.build(), + someJWTRealmConfig.env(), + someJWTRealmConfig.threadContext() + ), + mock(SSLService.class), + () -> {} ) ); + if (emptySubjects) { + assertThat( + e.getMessage(), + containsString( + "Invalid empty value for [" + RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECTS) + "]." + ) + ); + } else { + assertThat( + e.getMessage(), + containsString( + "Invalid empty value for [" + + RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS) + + "]." + ) + ); + } + } + + public void testNoAllowedSubjectInvalidSettings() { + allowedSubject = null; + allowedSubjectPattern = null; + RealmConfig someJWTRealmConfig = buildJWTRealmConfig(); + { + final Settings.Builder builder = Settings.builder(); + builder.put(someJWTRealmConfig.settings()); + if (randomBoolean()) { + builder.putList(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECTS), List.of()); + } else { + builder.putNull(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECTS)); + } + if (randomBoolean()) { + builder.putList(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS), List.of()); + } else { + builder.putNull(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS)); + } + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new JwtAuthenticator( + new RealmConfig( + someJWTRealmConfig.identifier(), + builder.build(), + someJWTRealmConfig.env(), + someJWTRealmConfig.threadContext() + ), + mock(SSLService.class), + () -> {} + ) + ); + assertThat( + e.getCause().getMessage(), + containsString( + "One of either [" + + RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECTS) + + "] or [" + + RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS) + + "] must be specified and not be empty." + ) + ); + } + { + final Settings.Builder builder = Settings.builder(); + builder.put(someJWTRealmConfig.settings()); + if (randomBoolean()) { + builder.putNull(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECTS)); + } else { + builder.putList(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECTS), List.of()); + } + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new JwtAuthenticator( + new RealmConfig( + someJWTRealmConfig.identifier(), + builder.build(), + someJWTRealmConfig.env(), + someJWTRealmConfig.threadContext() + ), + mock(SSLService.class), + () -> {} + ) + ); + assertThat( + e.getCause().getMessage(), + containsString( + "One of either [" + + RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECTS) + + "] or [" + + RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS) + + "] must be specified and not be empty." + ) + ); + } + { + final Settings.Builder builder = Settings.builder(); + builder.put(someJWTRealmConfig.settings()); + if (randomBoolean()) { + builder.putNull(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS)); + } else { + builder.putList(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS), List.of()); + } + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new JwtAuthenticator( + new RealmConfig( + someJWTRealmConfig.identifier(), + builder.build(), + someJWTRealmConfig.env(), + someJWTRealmConfig.threadContext() + ), + mock(SSLService.class), + () -> {} + ) + ); + assertThat( + e.getCause().getMessage(), + containsString( + "One of either [" + + RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECTS) + + "] or [" + + RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS) + + "] must be specified and not be empty." + ) + ); + } } protected JwtAuthenticator buildJwtAuthenticator() { + final RealmConfig realmConfig = buildJWTRealmConfig(); + final JwtAuthenticator jwtAuthenticator = spy(new JwtAuthenticator(realmConfig, null, () -> {})); + // Short circuit signature validation to be always successful since this test class does not test it + doAnswer(invocation -> { + final ActionListener listener = invocation.getArgument(2); + listener.onResponse(null); + return null; + }).when(jwtAuthenticator).validateSignature(any(), any(), anyActionListener()); + return jwtAuthenticator; + } + + protected RealmConfig buildJWTRealmConfig() { final RealmConfig.RealmIdentifier realmIdentifier = new RealmConfig.RealmIdentifier(JwtRealmSettings.TYPE, realmName); final MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.HMAC_KEY), randomAlphaOfLength(40)); @@ -251,11 +433,12 @@ protected JwtAuthenticator buildJwtAuthenticator() { .put(RealmSettings.getFullSettingKey(realmIdentifier, RealmSettings.ORDER_SETTING), randomIntBetween(0, 99)) .put("path.home", randomAlphaOfLength(10)) .setSecureSettings(secureSettings); - if (allowedSubject != null) { builder.put(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECTS), allowedSubject); } - + if (allowedSubjectPattern != null) { + builder.put(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS), allowedSubjectPattern); + } if (getTokenType() == JwtRealmSettings.TokenType.ID_TOKEN) { if (randomBoolean()) { builder.put(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.TOKEN_TYPE), "id_token"); @@ -263,14 +446,12 @@ protected JwtAuthenticator buildJwtAuthenticator() { } else { builder.put(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.TOKEN_TYPE), "access_token"); } - if (fallbackSub != null) { builder.put(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.FALLBACK_SUB_CLAIM), fallbackSub); } if (fallbackAud != null) { builder.put(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.FALLBACK_AUD_CLAIM), fallbackAud); } - if (requiredClaim != null) { final String requiredClaimsKey = RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.REQUIRED_CLAIMS) + requiredClaim .v1(); @@ -280,24 +461,20 @@ protected JwtAuthenticator buildJwtAuthenticator() { builder.putList(requiredClaimsKey, requiredClaim.v2()); } } - final Settings settings = builder.build(); + return new RealmConfig(realmIdentifier, settings, TestEnvironment.newEnvironment(settings), new ThreadContext(settings)); + } - final RealmConfig realmConfig = new RealmConfig( - realmIdentifier, - settings, - TestEnvironment.newEnvironment(settings), - new ThreadContext(settings) - ); - - final JwtAuthenticator jwtAuthenticator = spy(new JwtAuthenticator(realmConfig, null, () -> {})); - // Short circuit signature validation to be always successful since this test class does not test it - doAnswer(invocation -> { - final ActionListener listener = invocation.getArgument(2); - listener.onResponse(null); - return null; - }).when(jwtAuthenticator).validateSignature(any(), any(), anyActionListener()); - - return jwtAuthenticator; + private String getValidSubClaimValue() { + if (allowedSubject == null && allowedSubjectPattern == null) { + // any subject is valid + return randomAlphaOfLengthBetween(10, 18); + } else if (allowedSubject == null) { + return allowedSubjectPattern; + } else if (allowedSubjectPattern == null) { + return allowedSubject; + } else { + return randomFrom(allowedSubject, allowedSubjectPattern); + } } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtStringClaimValidatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtStringClaimValidatorTests.java index 41e666db39a5f..c59c1e19e0ee1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtStringClaimValidatorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtStringClaimValidatorTests.java @@ -10,10 +10,12 @@ import com.nimbusds.jose.JWSHeader; import com.nimbusds.jwt.JWTClaimsSet; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.test.ESTestCase; import java.text.ParseException; import java.util.List; +import java.util.Locale; import java.util.Map; import static org.hamcrest.CoreMatchers.containsString; @@ -28,11 +30,11 @@ public void testClaimIsNotString() throws ParseException { final JwtStringClaimValidator validator; final JWTClaimsSet jwtClaimsSet; if (randomBoolean()) { - validator = new JwtStringClaimValidator(claimName, List.of(), randomBoolean()); + validator = new JwtStringClaimValidator(claimName, randomBoolean(), List.of(), List.of()); // fallback claim is ignored jwtClaimsSet = JWTClaimsSet.parse(Map.of(claimName, List.of(42), fallbackClaimName, randomAlphaOfLength(8))); } else { - validator = new JwtStringClaimValidator(claimName, Map.of(claimName, fallbackClaimName), List.of(), randomBoolean()); + validator = new JwtStringClaimValidator(claimName, randomBoolean(), Map.of(claimName, fallbackClaimName), List.of(), List.of()); jwtClaimsSet = JWTClaimsSet.parse(Map.of(fallbackClaimName, List.of(42))); } @@ -46,16 +48,16 @@ public void testClaimIsNotString() throws ParseException { public void testClaimIsNotSingleValued() throws ParseException { final String claimName = randomAlphaOfLengthBetween(10, 18); - final String fallbackClaimName = randomAlphaOfLength(12); + final String fallbackClaimName = randomAlphaOfLength(8); final JwtStringClaimValidator validator; final JWTClaimsSet jwtClaimsSet; if (randomBoolean()) { - validator = new JwtStringClaimValidator(claimName, List.of(), true); + validator = new JwtStringClaimValidator(claimName, true, List.of(), List.of()); // fallback claim is ignored jwtClaimsSet = JWTClaimsSet.parse(Map.of(claimName, List.of("foo", "bar"), fallbackClaimName, randomAlphaOfLength(8))); } else { - validator = new JwtStringClaimValidator(claimName, Map.of(claimName, fallbackClaimName), List.of(), true); + validator = new JwtStringClaimValidator(claimName, true, Map.of(claimName, fallbackClaimName), List.of(), List.of()); jwtClaimsSet = JWTClaimsSet.parse(Map.of(fallbackClaimName, List.of("foo", "bar"))); } @@ -69,14 +71,14 @@ public void testClaimIsNotSingleValued() throws ParseException { public void testClaimDoesNotExist() throws ParseException { final String claimName = randomAlphaOfLengthBetween(10, 18); - final String fallbackClaimName = randomAlphaOfLength(12); + final String fallbackClaimName = randomAlphaOfLength(8); final JwtStringClaimValidator validator; final JWTClaimsSet jwtClaimsSet; if (randomBoolean()) { - validator = new JwtStringClaimValidator(claimName, List.of(), randomBoolean()); + validator = new JwtStringClaimValidator(claimName, randomBoolean(), List.of(), List.of()); } else { - validator = new JwtStringClaimValidator(claimName, Map.of(claimName, fallbackClaimName), List.of(), randomBoolean()); + validator = new JwtStringClaimValidator(claimName, randomBoolean(), Map.of(claimName, fallbackClaimName), List.of(), List.of()); } jwtClaimsSet = JWTClaimsSet.parse(Map.of()); @@ -89,7 +91,7 @@ public void testClaimDoesNotExist() throws ParseException { public void testMatchingClaimValues() throws ParseException { final String claimName = randomAlphaOfLengthBetween(10, 18); - final String fallbackClaimName = randomAlphaOfLength(12); + final String fallbackClaimName = randomAlphaOfLength(8); final String claimValue = randomAlphaOfLength(10); final boolean singleValuedClaim = randomBoolean(); final List allowedClaimValues = List.of(claimValue, randomAlphaOfLengthBetween(11, 20)); @@ -99,11 +101,17 @@ public void testMatchingClaimValues() throws ParseException { final JWTClaimsSet validJwtClaimsSet; final boolean noFallback = randomBoolean(); if (noFallback) { - validator = new JwtStringClaimValidator(claimName, allowedClaimValues, singleValuedClaim); + validator = new JwtStringClaimValidator(claimName, singleValuedClaim, allowedClaimValues, List.of()); // fallback claim is ignored validJwtClaimsSet = JWTClaimsSet.parse(Map.of(claimName, incomingClaimValue, fallbackClaimName, List.of(42))); } else { - validator = new JwtStringClaimValidator(claimName, Map.of(claimName, fallbackClaimName), allowedClaimValues, singleValuedClaim); + validator = new JwtStringClaimValidator( + claimName, + singleValuedClaim, + Map.of(claimName, fallbackClaimName), + allowedClaimValues, + List.of() + ); validJwtClaimsSet = JWTClaimsSet.parse(Map.of(fallbackClaimName, incomingClaimValue)); } @@ -113,35 +121,128 @@ public void testMatchingClaimValues() throws ParseException { throw new AssertionError("validation should have passed without exception", e); } - final JWTClaimsSet invalidJwtClaimsSet; - if (noFallback) { - // fallback is ignored (even when it has a valid value) since the main claim exists - invalidJwtClaimsSet = JWTClaimsSet.parse(Map.of(claimName, "not-" + claimValue, fallbackClaimName, claimValue)); + String invalidClaimValue; + if (randomBoolean()) { + invalidClaimValue = "not-" + claimValue; } else { - invalidJwtClaimsSet = JWTClaimsSet.parse(Map.of(fallbackClaimName, "not-" + claimValue)); + // letter case mismatch: invert case at pos i + int i = randomIntBetween(0, claimValue.length() - 1); + invalidClaimValue = claimValue.substring(0, i); + if (Character.isUpperCase(claimValue.charAt(i))) { + invalidClaimValue += claimValue.substring(i, i).toLowerCase(Locale.ROOT); + } else if (Character.isLowerCase(claimValue.charAt(i))) { + invalidClaimValue += claimValue.substring(i, i).toUpperCase(Locale.ROOT); + } else { + throw new AssertionError("Unrecognized case"); + } + invalidClaimValue += claimValue.substring(i + 1); } + { + final JWTClaimsSet invalidJwtClaimsSet; + if (noFallback) { + // fallback is ignored (even when it has a valid value) since the main claim exists + invalidJwtClaimsSet = JWTClaimsSet.parse(Map.of(claimName, invalidClaimValue, fallbackClaimName, claimValue)); + } else { + invalidJwtClaimsSet = JWTClaimsSet.parse(Map.of(fallbackClaimName, invalidClaimValue)); + } - final IllegalArgumentException e = expectThrows( - IllegalArgumentException.class, - () -> validator.validate(getJwsHeader(), invalidJwtClaimsSet) - ); - assertThat(e.getMessage(), containsString("does not match allowed claim values")); + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> validator.validate(getJwsHeader(), invalidJwtClaimsSet) + ); + assertThat(e.getMessage(), containsString("does not match allowed claim values")); + } } - public void testDoesNotSupportWildcardOrRegex() throws ParseException { + public void testWildcardAndRegexMatchingClaimValues() throws ParseException { final String claimName = randomAlphaOfLengthBetween(10, 18); - final String fallbackClaimName = randomAlphaOfLength(12); + final String fallbackClaimName = randomAlphaOfLength(8); + final List allowedClaimPatterns = List.of("a?\\**", "/https?://[^/]+/?/"); + + final boolean noFallback = randomBoolean(); + final JwtStringClaimValidator validator; + if (noFallback) { + validator = new JwtStringClaimValidator(claimName, false, List.of(), allowedClaimPatterns); + } else { + validator = new JwtStringClaimValidator( + claimName, + false, + Map.of(claimName, fallbackClaimName), + List.of(), + allowedClaimPatterns + ); + } + for (String incomingClaimValue : List.of("a1*", "ab*whatever", "https://elastic.co/")) { + final JWTClaimsSet validJwtClaimsSet; + if (noFallback) { + // fallback claim is ignored + validJwtClaimsSet = JWTClaimsSet.parse( + Map.of( + claimName, + randomBoolean() ? incomingClaimValue : List.of(incomingClaimValue, "other_stuff"), + fallbackClaimName, + List.of(42) + ) + ); + } else { + validJwtClaimsSet = JWTClaimsSet.parse( + Map.of(fallbackClaimName, randomBoolean() ? incomingClaimValue : List.of(incomingClaimValue, "other_stuff")) + ); + } + try { + validator.validate(getJwsHeader(), validJwtClaimsSet); + } catch (Exception e) { + throw new AssertionError("validation should have passed without exception", e); + } + } + for (String invalidIncomingClaimValue : List.of("a", "abc", "abc*", "https://elastic.co/guide")) { + final JWTClaimsSet invalidJwtClaimsSet; + if (noFallback) { + // fallback claim is ignored + invalidJwtClaimsSet = JWTClaimsSet.parse( + Map.of( + claimName, + randomBoolean() ? invalidIncomingClaimValue : List.of(invalidIncomingClaimValue, "other_stuff"), + fallbackClaimName, + List.of(42) + ) + ); + } else { + invalidJwtClaimsSet = JWTClaimsSet.parse( + Map.of( + fallbackClaimName, + randomBoolean() ? invalidIncomingClaimValue : List.of(invalidIncomingClaimValue, "other_stuff") + ) + ); + } + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> validator.validate(getJwsHeader(), invalidJwtClaimsSet) + ); + assertThat(e.getMessage(), containsString("does not match allowed claim values")); + } + } + + public void testValueAllowSettingDoesNotSupportWildcardOrRegex() throws ParseException { + final String claimName = randomAlphaOfLengthBetween(10, 18); + final String fallbackClaimName = randomAlphaOfLength(8); final String claimValue = randomFrom("*", "/.*/"); final JwtStringClaimValidator validator; final JWTClaimsSet invalidJwtClaimsSet; final boolean noFallback = randomBoolean(); if (noFallback) { - validator = new JwtStringClaimValidator(claimName, List.of(claimValue), randomBoolean()); + validator = new JwtStringClaimValidator(claimName, randomBoolean(), List.of(claimValue), List.of()); // fallback is ignored (even when it has a valid value) since the main claim exists invalidJwtClaimsSet = JWTClaimsSet.parse(Map.of(claimName, randomAlphaOfLengthBetween(1, 10), fallbackClaimName, claimValue)); } else { - validator = new JwtStringClaimValidator(claimName, Map.of(claimName, fallbackClaimName), List.of(claimValue), randomBoolean()); + validator = new JwtStringClaimValidator( + claimName, + randomBoolean(), + Map.of(claimName, fallbackClaimName), + List.of(claimValue), + List.of() + ); invalidJwtClaimsSet = JWTClaimsSet.parse(Map.of(fallbackClaimName, randomAlphaOfLengthBetween(1, 10))); } @@ -167,6 +268,253 @@ public void testDoesNotSupportWildcardOrRegex() throws ParseException { } } + public void testSinglePatternSingleClaim() throws ParseException { + final String claimName = randomAlphaOfLengthBetween(10, 18); + final String fallbackClaimName = randomAlphaOfLength(8); + final String claimPattern = randomFrom("a*", "/a.*/"); + final JwtStringClaimValidator validator; + final JWTClaimsSet singleValueClaimSet; + final boolean noFallback = randomBoolean(); + if (noFallback) { + validator = new JwtStringClaimValidator(claimName, randomBoolean(), List.of(), List.of(claimPattern)); + singleValueClaimSet = JWTClaimsSet.parse( + Map.of(claimName, "a_claim", fallbackClaimName, randomFrom(List.of("invalid", "invalid2"), "invalid"), "something", "else") + ); + } else { + validator = new JwtStringClaimValidator( + claimName, + randomBoolean(), + Map.of(claimName, fallbackClaimName), + List.of(), + List.of(claimPattern) + ); + singleValueClaimSet = JWTClaimsSet.parse(Map.of(fallbackClaimName, "a_fallback_claim", "something", "else")); + } + try { + validator.validate(getJwsHeader(), singleValueClaimSet); + } catch (Exception e) { + throw new AssertionError("validation should have passed without exception", e); + } + { + JWTClaimsSet invalidClaimSet = JWTClaimsSet.parse( + Map.of(claimName, "invalid", fallbackClaimName, randomFrom(List.of("a_claim", "a_claim2"), "a_claim"), "something", "else") + ); + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> validator.validate(getJwsHeader(), invalidClaimSet) + ); + assertThat(e.getMessage(), containsString("does not match allowed claim values")); + } + { + JWTClaimsSet invalidClaimSet = JWTClaimsSet.parse(Map.of(fallbackClaimName, "invalid", "something", "else")); + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> validator.validate(getJwsHeader(), invalidClaimSet) + ); + if (noFallback) { + assertThat(e.getMessage(), containsString("missing required string claim")); + } else { + assertThat(e.getMessage(), containsString("does not match allowed claim values")); + } + } + } + + public void testPatternListSingleClaim() throws ParseException { + final String claimName = randomAlphaOfLengthBetween(10, 18); + final String fallbackClaimName = randomAlphaOfLength(8); + final List claimPatterns = List.of("a*", "/b.*b/"); + final JwtStringClaimValidator validator; + final JWTClaimsSet singleValueClaimSet; + final boolean noFallback = randomBoolean(); + if (noFallback) { + validator = new JwtStringClaimValidator(claimName, randomBoolean(), List.of(), claimPatterns); + singleValueClaimSet = JWTClaimsSet.parse( + Map.of( + claimName, + "b_claim_b", + fallbackClaimName, + randomFrom(List.of("invalid", "invalid2"), "invalid"), + "something", + "else" + ) + ); + } else { + validator = new JwtStringClaimValidator( + claimName, + randomBoolean(), + Map.of(claimName, fallbackClaimName), + List.of(), + claimPatterns + ); + singleValueClaimSet = JWTClaimsSet.parse(Map.of(fallbackClaimName, "b_fallback_claim_b", "something", "else")); + } + try { + validator.validate(getJwsHeader(), singleValueClaimSet); + } catch (Exception e) { + throw new AssertionError("validation should have passed without exception", e); + } + { + JWTClaimsSet invalidClaimSet = JWTClaimsSet.parse( + Map.of( + claimName, + "invalid", + fallbackClaimName, + randomFrom(List.of("b_claim_b", "b_claim2_b"), "b_claim_b"), + "something", + "else" + ) + ); + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> validator.validate(getJwsHeader(), invalidClaimSet) + ); + assertThat(e.getMessage(), containsString("does not match allowed claim values")); + } + { + JWTClaimsSet invalidClaimSet = JWTClaimsSet.parse(Map.of(fallbackClaimName, "invalid", "something", "else")); + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> validator.validate(getJwsHeader(), invalidClaimSet) + ); + if (noFallback) { + assertThat(e.getMessage(), containsString("missing required string claim")); + } else { + assertThat(e.getMessage(), containsString("does not match allowed claim values")); + } + } + } + + public void testPatternListClaimList() throws ParseException { + final String claimName = randomAlphaOfLengthBetween(10, 18); + final String fallbackClaimName = randomAlphaOfLength(8); + final List claimPatterns = List.of("a*", "/b.*b/"); + final JwtStringClaimValidator validator; + final JWTClaimsSet singleValueClaimSet; + final boolean noFallback = randomBoolean(); + if (noFallback) { + validator = new JwtStringClaimValidator(claimName, false, List.of(), claimPatterns); + singleValueClaimSet = JWTClaimsSet.parse( + Map.of( + claimName, + List.of("invalid", "b_claim_b"), + fallbackClaimName, + randomFrom(List.of("invalid", "invalid2"), "invalid"), + "something", + "else" + ) + ); + } else { + validator = new JwtStringClaimValidator(claimName, false, Map.of(claimName, fallbackClaimName), List.of(), claimPatterns); + singleValueClaimSet = JWTClaimsSet.parse( + Map.of(fallbackClaimName, List.of("invalid", "b_fallback_claim_b"), "something", "else") + ); + } + try { + validator.validate(getJwsHeader(), singleValueClaimSet); + } catch (Exception e) { + throw new AssertionError("validation should have passed without exception", e); + } + { + JWTClaimsSet invalidClaimSet = JWTClaimsSet.parse( + Map.of( + claimName, + List.of("invalid", "invalid2"), + fallbackClaimName, + randomFrom(List.of("b_claim_b", "a_claim"), "b_claim_b"), + "something", + "else" + ) + ); + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> validator.validate(getJwsHeader(), invalidClaimSet) + ); + assertThat(e.getMessage(), containsString("does not match allowed claim values")); + } + { + JWTClaimsSet invalidClaimSet = JWTClaimsSet.parse( + Map.of(fallbackClaimName, List.of("invalid", "invalid2"), "something", "else") + ); + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> validator.validate(getJwsHeader(), invalidClaimSet) + ); + if (noFallback) { + assertThat(e.getMessage(), containsString("missing required string claim")); + } else { + assertThat(e.getMessage(), containsString("does not match allowed claim values")); + } + } + } + + public void testBothPatternAndSimpleValue() { + final String claimName = randomAlphaOfLengthBetween(10, 18); + final String fallbackClaimName = randomFrom(randomAlphaOfLength(8), null); + final List claimPatterns = List.of("a*", "/.*Z.*/", "*b"); + final List claimValues = List.of("c", "dd", "eZe"); + final JwtStringClaimValidator singleValueValidator = new JwtStringClaimValidator( + claimName, + randomBoolean(), + fallbackClaimName == null ? null : Map.of(claimName, fallbackClaimName), + claimValues, + claimPatterns + ); + for (String claimValue : List.of("a_claim", "anotZer_claim", "Z", "claim_b", "c", "dd", "eZe")) { + if (fallbackClaimName != null) { + try { + singleValueValidator.validate( + getJwsHeader(), + JWTClaimsSet.parse(Map.of(fallbackClaimName, claimValue, "something", "else")) + ); + } catch (Exception e) { + throw new AssertionError("validation should have passed without exception", e); + } + } else { + try { + singleValueValidator.validate(getJwsHeader(), JWTClaimsSet.parse(Map.of(claimName, claimValue, "something", "else"))); + } catch (Exception e) { + throw new AssertionError("validation should have passed without exception", e); + } + } + } + for (String invalidClaimValue : List.of("invalid", "cc", "ca", "dda", "ba")) { + IllegalArgumentException e; + if (fallbackClaimName != null) { + e = expectThrows( + IllegalArgumentException.class, + () -> singleValueValidator.validate( + getJwsHeader(), + JWTClaimsSet.parse(Map.of(fallbackClaimName, invalidClaimValue, "something", "else")) + ) + ); + } else { + e = expectThrows( + IllegalArgumentException.class, + () -> singleValueValidator.validate( + getJwsHeader(), + JWTClaimsSet.parse(Map.of(claimName, invalidClaimValue, "something", "else")) + ) + ); + } + assertThat(e.getMessage(), containsString("does not match allowed claim values")); + } + } + + public void testInvalidPatternThrows() { + String claimName = randomAlphaOfLength(4); + SettingsException e = expectThrows( + SettingsException.class, + () -> new JwtStringClaimValidator( + claimName, + randomBoolean(), + randomBoolean() ? null : Map.of(randomAlphaOfLength(4), randomAlphaOfLength(8)), + randomBoolean() ? List.of() : List.of("dummy"), + List.of("/invalid pattern") + ) + ); + assertThat(e.getMessage(), containsString("Invalid patterns for allowed claim values for [" + claimName + "].")); + } + public void testAllowAllSubjects() { try { JwtStringClaimValidator.ALLOW_ALL_SUBJECTS.validate( From ad9cfcf2e4f2d162417ed63ce2aae5d929cbf57b Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Tue, 5 Dec 2023 10:30:30 +0000 Subject: [PATCH 152/181] Remove version field from CachedBlob (#102706) The field isn't actually used, but we need to keep it around for BwC for now --- .../cache/blob/BlobStoreCacheService.java | 2 - .../cache/blob/CachedBlob.java | 45 ++++--------------- 2 files changed, 9 insertions(+), 38 deletions(-) diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheService.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheService.java index 448e1e02d889e..a7ba0294d5c98 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheService.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheService.java @@ -12,7 +12,6 @@ import org.apache.lucene.index.IndexFileNames; import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.get.GetRequest; @@ -245,7 +244,6 @@ public final void putAsync( try { final CachedBlob cachedBlob = new CachedBlob( Instant.ofEpochMilli(timeInEpochMillis), - Version.CURRENT, repository, name, generatePath(snapshotId, indexId, shardId), diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/CachedBlob.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/CachedBlob.java index 91d2900553444..aba553e563c3e 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/CachedBlob.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/CachedBlob.java @@ -10,6 +10,7 @@ import org.elasticsearch.Version; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -23,18 +24,17 @@ public class CachedBlob implements ToXContent { /** * Sentinel {@link CachedBlob} indicating that searching the cache index returned an error. */ - public static final CachedBlob CACHE_NOT_READY = new CachedBlob(null, null, null, "CACHE_NOT_READY", null, BytesArray.EMPTY, 0L, 0L); + public static final CachedBlob CACHE_NOT_READY = new CachedBlob(null, null, "CACHE_NOT_READY", null, BytesArray.EMPTY, 0L, 0L); /** * Sentinel {@link CachedBlob} indicating that the cache index definitely did not contain the requested data. */ - public static final CachedBlob CACHE_MISS = new CachedBlob(null, null, null, "CACHE_MISS", null, BytesArray.EMPTY, 0L, 0L); + public static final CachedBlob CACHE_MISS = new CachedBlob(null, null, "CACHE_MISS", null, BytesArray.EMPTY, 0L, 0L); private static final String TYPE = "blob"; public static final String CREATION_TIME_FIELD = "creation_time"; private final Instant creationTime; - private final Version version; private final String repository; private final String name; private final String path; @@ -43,30 +43,12 @@ public class CachedBlob implements ToXContent { private final long from; private final long to; - public CachedBlob( - Instant creationTime, - Version version, - String repository, - String name, - String path, - BytesReference content, - long offset - ) { - this(creationTime, version, repository, name, path, content, offset, offset + (content == null ? 0 : content.length())); + public CachedBlob(Instant creationTime, String repository, String name, String path, BytesReference content, long offset) { + this(creationTime, repository, name, path, content, offset, offset + (content == null ? 0 : content.length())); } - private CachedBlob( - Instant creationTime, - Version version, - String repository, - String name, - String path, - BytesReference content, - long from, - long to - ) { + private CachedBlob(Instant creationTime, String repository, String name, String path, BytesReference content, long from, long to) { this.creationTime = creationTime; - this.version = version; this.repository = repository; this.name = name; this.path = path; @@ -78,11 +60,13 @@ private CachedBlob( @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + @UpdateForV9 // we can remove the version field when we no longer need to keep compatibility with <8.12 + final int version = Version.CURRENT.id; builder.startObject(); { builder.field("type", TYPE); builder.field(CREATION_TIME_FIELD, creationTime.toEpochMilli()); - builder.field("version", version.id); + builder.field("version", version); builder.field("repository", repository); builder.startObject("blob"); { @@ -118,10 +102,6 @@ public BytesReference bytes() { return bytes; } - public Version version() { - return version; - } - public Instant creationTime() { return creationTime; } @@ -132,10 +112,6 @@ public static CachedBlob fromSource(final Map source) { if (creationTimeEpochMillis == null) { throw new IllegalStateException("cached blob document does not have the [creation_time] field"); } - final Version version = Version.fromId((Integer) source.get("version")); - if (version == null) { - throw new IllegalStateException("cached blob document does not have the [version] field"); - } final String repository = (String) source.get("repository"); if (repository == null) { throw new IllegalStateException("cached blob document does not have the [repository] field"); @@ -179,7 +155,6 @@ public static CachedBlob fromSource(final Map source) { // TODO add exhaustive verifications (from/to/content.length, version supported, id == recomputed id etc) return new CachedBlob( Instant.ofEpochMilli(creationTimeEpochMillis), - version, repository, name, path, @@ -194,8 +169,6 @@ public String toString() { return "CachedBlob [" + "creationTime=" + creationTime - + ", version=" - + version + ", repository='" + repository + '\'' From 8d0551ecb965a209ab6c9b6afe59d601d94c80ee Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Tue, 5 Dec 2023 11:50:50 +0100 Subject: [PATCH 153/181] ESQL: emit warnings from single-value functions processing multi-values (#102417) When encountering a multi-value, a single-value function (i.e. all non-`mv_xxx()`) returns a `null`. This behaviour is opaque to the user. This PR adds the functionality for these functions to emit a `Warning` header, so the user is informed about the cause for the `null`s. Within testing, there are some differences between the emulated CSV-based tests (`TestPhysical*`) and the REST CSV-tests and thus the exact messages in the warnings: * The REST ones can push operations to Lucene; when this happens, a query containing a negation, `not `, can be translated to a `must_not` query, that will include the `not` in the `Source`. But outside of Lucene, the execution would consider the predicate first, then the negation. So when the predicate contains a SV function, only this part's `Source` will show up in the warning. * When pushed to Lucene, a query is wrapped within the `SingleValueQuery`. This emits now warnings when encountering MVs (and returning no match). However, this only happens once the query that it wraps returns something itself. Comparatively, the `TestPhysical*` filters will issue a warning for every encountered MV (irrespective of sigle values within the MV matching or not). To differentiate between the slightly differing values of the warnings, one can now append the `#[Emulated:` prefix to a warning, followed by the value of the warning for the emulated checks, then a corresponding `]`. Example: `warning:Line 1:24: evaluation of [not(salary_change < 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change < 1] failed, treating result as null. Only first 20 failures recorded.]` Closes #98743. --- docs/changelog/102417.yaml | 6 + .../esql/multivalued-fields.asciidoc | 8 + .../org/elasticsearch/TransportVersions.java | 1 + .../compute/gen/EvaluatorImplementer.java | 42 ++--- .../org/elasticsearch/compute/gen/Types.java | 5 - .../xpack/esql/qa/rest/EsqlSpecTestCase.java | 2 +- .../src/main/resources/boolean.csv-spec | 6 +- .../src/main/resources/eval.csv-spec | 8 +- .../src/main/resources/floats.csv-spec | 32 +++- .../src/main/resources/ints.csv-spec | 36 +++-- .../src/main/resources/ip.csv-spec | 52 +++++-- .../src/main/resources/string.csv-spec | 54 +++++-- .../src/main/resources/unsigned_long.csv-spec | 34 +++- .../comparison/EqualsBoolsEvaluator.java | 33 +++- .../comparison/EqualsDoublesEvaluator.java | 33 +++- .../comparison/EqualsIntsEvaluator.java | 33 +++- .../comparison/EqualsKeywordsEvaluator.java | 33 +++- .../comparison/EqualsLongsEvaluator.java | 33 +++- .../GreaterThanDoublesEvaluator.java | 33 +++- .../comparison/GreaterThanIntsEvaluator.java | 33 +++- .../GreaterThanKeywordsEvaluator.java | 33 +++- .../comparison/GreaterThanLongsEvaluator.java | 33 +++- .../GreaterThanOrEqualDoublesEvaluator.java | 33 +++- .../GreaterThanOrEqualIntsEvaluator.java | 33 +++- .../GreaterThanOrEqualKeywordsEvaluator.java | 33 +++- .../GreaterThanOrEqualLongsEvaluator.java | 33 +++- .../comparison/LessThanDoublesEvaluator.java | 33 +++- .../comparison/LessThanIntsEvaluator.java | 33 +++- .../comparison/LessThanKeywordsEvaluator.java | 33 +++- .../comparison/LessThanLongsEvaluator.java | 33 +++- .../LessThanOrEqualDoublesEvaluator.java | 33 +++- .../LessThanOrEqualIntsEvaluator.java | 33 +++- .../LessThanOrEqualKeywordsEvaluator.java | 33 +++- .../LessThanOrEqualLongsEvaluator.java | 33 +++- .../comparison/NotEqualsBoolsEvaluator.java | 33 +++- .../comparison/NotEqualsDoublesEvaluator.java | 33 +++- .../comparison/NotEqualsIntsEvaluator.java | 33 +++- .../NotEqualsKeywordsEvaluator.java | 33 +++- .../comparison/NotEqualsLongsEvaluator.java | 33 +++- .../operator/logical/NotEvaluator.java | 25 ++- .../operator/regex/RegexMatchEvaluator.java | 27 +++- .../conditional/GreatestBooleanEvaluator.java | 24 ++- .../GreatestBytesRefEvaluator.java | 24 ++- .../conditional/GreatestDoubleEvaluator.java | 24 ++- .../conditional/GreatestIntEvaluator.java | 24 ++- .../conditional/GreatestLongEvaluator.java | 24 ++- .../conditional/LeastBooleanEvaluator.java | 24 ++- .../conditional/LeastBytesRefEvaluator.java | 24 ++- .../conditional/LeastDoubleEvaluator.java | 24 ++- .../scalar/conditional/LeastIntEvaluator.java | 25 ++- .../conditional/LeastLongEvaluator.java | 24 ++- .../date/DateExtractConstantEvaluator.java | 26 +++- .../scalar/date/DateExtractEvaluator.java | 18 ++- .../date/DateFormatConstantEvaluator.java | 27 +++- .../scalar/date/DateFormatEvaluator.java | 33 +++- .../date/DateParseConstantEvaluator.java | 9 +- .../scalar/date/DateParseEvaluator.java | 18 ++- .../scalar/date/DateTruncEvaluator.java | 27 +++- .../function/scalar/date/NowEvaluator.java | 14 +- .../scalar/ip/CIDRMatchEvaluator.java | 33 +++- .../scalar/math/AbsDoubleEvaluator.java | 24 ++- .../function/scalar/math/AbsIntEvaluator.java | 25 ++- .../scalar/math/AbsLongEvaluator.java | 25 ++- .../function/scalar/math/AcosEvaluator.java | 10 +- .../function/scalar/math/AsinEvaluator.java | 10 +- .../function/scalar/math/Atan2Evaluator.java | 35 ++++- .../function/scalar/math/AtanEvaluator.java | 25 ++- .../scalar/math/CastIntToDoubleEvaluator.java | 25 ++- .../scalar/math/CastIntToLongEvaluator.java | 25 ++- .../math/CastIntToUnsignedLongEvaluator.java | 24 ++- .../math/CastLongToDoubleEvaluator.java | 24 ++- .../math/CastLongToUnsignedLongEvaluator.java | 24 ++- .../CastUnsignedLongToDoubleEvaluator.java | 24 ++- .../scalar/math/CeilDoubleEvaluator.java | 25 ++- .../function/scalar/math/CosEvaluator.java | 25 ++- .../function/scalar/math/CoshEvaluator.java | 10 +- .../scalar/math/FloorDoubleEvaluator.java | 25 ++- .../scalar/math/IsFiniteEvaluator.java | 25 ++- .../scalar/math/IsInfiniteEvaluator.java | 25 ++- .../function/scalar/math/IsNaNEvaluator.java | 25 ++- .../scalar/math/Log10DoubleEvaluator.java | 10 +- .../scalar/math/Log10IntEvaluator.java | 10 +- .../scalar/math/Log10LongEvaluator.java | 10 +- .../math/Log10UnsignedLongEvaluator.java | 10 +- .../function/scalar/math/PowEvaluator.java | 19 ++- .../function/scalar/math/PowIntEvaluator.java | 146 ++++++++++++++++++ .../scalar/math/PowLongEvaluator.java | 146 ++++++++++++++++++ .../scalar/math/RoundDoubleEvaluator.java | 33 +++- .../math/RoundDoubleNoDecimalsEvaluator.java | 24 ++- .../scalar/math/RoundIntEvaluator.java | 33 +++- .../scalar/math/RoundLongEvaluator.java | 33 +++- .../math/RoundUnsignedLongEvaluator.java | 33 +++- .../function/scalar/math/SinEvaluator.java | 25 ++- .../function/scalar/math/SinhEvaluator.java | 10 +- .../scalar/math/SqrtDoubleEvaluator.java | 10 +- .../scalar/math/SqrtIntEvaluator.java | 10 +- .../scalar/math/SqrtLongEvaluator.java | 10 +- .../math/SqrtUnsignedLongEvaluator.java | 24 ++- .../function/scalar/math/TanEvaluator.java | 25 ++- .../function/scalar/math/TanhEvaluator.java | 25 ++- .../scalar/string/ConcatEvaluator.java | 26 +++- .../scalar/string/EndsWithEvaluator.java | 33 +++- .../scalar/string/LTrimEvaluator.java | 25 ++- .../function/scalar/string/LeftEvaluator.java | 33 +++- .../scalar/string/LengthEvaluator.java | 25 ++- .../scalar/string/RTrimEvaluator.java | 25 ++- .../string/ReplaceConstantEvaluator.java | 19 ++- .../scalar/string/ReplaceEvaluator.java | 28 +++- .../scalar/string/RightEvaluator.java | 33 +++- .../string/SplitSingleByteEvaluator.java | 24 ++- .../scalar/string/SplitVariableEvaluator.java | 33 +++- .../scalar/string/StartsWithEvaluator.java | 33 +++- .../scalar/string/SubstringEvaluator.java | 42 ++++- .../string/SubstringNoLengthEvaluator.java | 33 +++- .../function/scalar/string/TrimEvaluator.java | 25 ++- .../arithmetic/AddDatetimesEvaluator.java | 10 +- .../arithmetic/AddDoublesEvaluator.java | 33 +++- .../operator/arithmetic/AddIntsEvaluator.java | 19 ++- .../arithmetic/AddLongsEvaluator.java | 19 ++- .../arithmetic/AddUnsignedLongsEvaluator.java | 19 ++- .../arithmetic/DivDoublesEvaluator.java | 33 +++- .../operator/arithmetic/DivIntsEvaluator.java | 19 ++- .../arithmetic/DivLongsEvaluator.java | 19 ++- .../arithmetic/DivUnsignedLongsEvaluator.java | 19 ++- .../arithmetic/ModDoublesEvaluator.java | 33 +++- .../operator/arithmetic/ModIntsEvaluator.java | 19 ++- .../arithmetic/ModLongsEvaluator.java | 19 ++- .../arithmetic/ModUnsignedLongsEvaluator.java | 19 ++- .../arithmetic/MulDoublesEvaluator.java | 33 +++- .../operator/arithmetic/MulIntsEvaluator.java | 19 ++- .../arithmetic/MulLongsEvaluator.java | 19 ++- .../arithmetic/MulUnsignedLongsEvaluator.java | 19 ++- .../arithmetic/NegDoublesEvaluator.java | 25 ++- .../operator/arithmetic/NegIntsEvaluator.java | 10 +- .../arithmetic/NegLongsEvaluator.java | 10 +- .../arithmetic/SubDatetimesEvaluator.java | 10 +- .../arithmetic/SubDoublesEvaluator.java | 33 +++- .../operator/arithmetic/SubIntsEvaluator.java | 19 ++- .../arithmetic/SubLongsEvaluator.java | 19 ++- .../arithmetic/SubUnsignedLongsEvaluator.java | 19 ++- .../xpack/esql/evaluator/EvalMapper.java | 1 + .../operator/comparison/ComparisonMapper.java | 52 +++---- .../predicate/operator/regex/RegexMapper.java | 3 +- .../function/scalar/conditional/Greatest.java | 10 +- .../function/scalar/conditional/Least.java | 10 +- .../function/scalar/date/DateExtract.java | 2 +- .../function/scalar/date/DateFormat.java | 5 +- .../function/scalar/date/DateTrunc.java | 10 +- .../expression/function/scalar/date/Now.java | 2 +- .../function/scalar/ip/CIDRMatch.java | 1 + .../expression/function/scalar/math/Abs.java | 6 +- .../math/AbstractTrigonometricFunction.java | 2 +- .../expression/function/scalar/math/Atan.java | 2 +- .../function/scalar/math/Atan2.java | 6 +- .../function/scalar/math/AutoBucket.java | 6 +- .../expression/function/scalar/math/Cast.java | 15 +- .../expression/function/scalar/math/Ceil.java | 2 +- .../expression/function/scalar/math/Cos.java | 2 +- .../function/scalar/math/Floor.java | 2 +- .../function/scalar/math/IsFinite.java | 2 +- .../function/scalar/math/IsInfinite.java | 2 +- .../function/scalar/math/IsNaN.java | 2 +- .../expression/function/scalar/math/Pow.java | 4 +- .../function/scalar/math/Round.java | 19 ++- .../expression/function/scalar/math/Sin.java | 2 +- .../expression/function/scalar/math/Sqrt.java | 2 +- .../expression/function/scalar/math/Tan.java | 2 +- .../expression/function/scalar/math/Tanh.java | 2 +- .../function/scalar/string/Concat.java | 2 +- .../function/scalar/string/EndsWith.java | 2 +- .../function/scalar/string/LTrim.java | 2 +- .../function/scalar/string/Left.java | 1 + .../function/scalar/string/Length.java | 2 +- .../function/scalar/string/RTrim.java | 2 +- .../function/scalar/string/Right.java | 1 + .../function/scalar/string/Split.java | 4 +- .../function/scalar/string/StartsWith.java | 2 +- .../function/scalar/string/Substring.java | 4 +- .../function/scalar/string/Trim.java | 2 +- .../predicate/operator/arithmetic/Add.java | 2 +- .../predicate/operator/arithmetic/Div.java | 2 +- .../arithmetic/EsqlArithmeticOperation.java | 4 +- .../predicate/operator/arithmetic/Mod.java | 2 +- .../predicate/operator/arithmetic/Mul.java | 2 +- .../predicate/operator/arithmetic/Neg.java | 2 +- .../predicate/operator/arithmetic/Sub.java | 2 +- .../xpack/esql/io/stream/PlanStreamInput.java | 44 +----- .../esql/io/stream/PlanStreamOutput.java | 6 +- .../esql/querydsl/query/SingleValueQuery.java | 129 ++++++++++++---- .../elasticsearch/xpack/esql/CsvTests.java | 2 +- .../function/AbstractFunctionTestCase.java | 9 ++ .../function/scalar/math/RoundTests.java | 3 +- .../operator/arithmetic/AddTests.java | 3 +- .../operator/arithmetic/SubTests.java | 3 +- .../LocalPhysicalPlanOptimizerTests.java | 19 ++- .../xpack/esql/planner/FilterTests.java | 71 +++++---- .../SingleValueQuerySerializationTests.java | 9 +- .../querydsl/query/SingleValueQueryTests.java | 68 ++++---- .../elasticsearch/xpack/ql/tree/Source.java | 1 + .../xpack/ql/util/SourceUtils.java | 93 +++++++++++ .../elasticsearch/xpack/ql/CsvSpecReader.java | 35 ++++- 201 files changed, 3626 insertions(+), 785 deletions(-) create mode 100644 docs/changelog/102417.yaml create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java create mode 100644 x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SourceUtils.java diff --git a/docs/changelog/102417.yaml b/docs/changelog/102417.yaml new file mode 100644 index 0000000000000..09c1a4f49dbfd --- /dev/null +++ b/docs/changelog/102417.yaml @@ -0,0 +1,6 @@ +pr: 102417 +summary: "ESQL: emit warnings from single-value functions processing multi-values" +area: ES|QL +type: feature +issues: + - 98743 diff --git a/docs/reference/esql/multivalued-fields.asciidoc b/docs/reference/esql/multivalued-fields.asciidoc index 5e48eb4ef8af8..6cb7755b91ce9 100644 --- a/docs/reference/esql/multivalued-fields.asciidoc +++ b/docs/reference/esql/multivalued-fields.asciidoc @@ -180,12 +180,20 @@ POST /mv/_bulk?refresh { "a": 1, "b": [2, 1] } { "index" : {} } { "a": 2, "b": 3 } +---- +[source,console] +---- POST /_query { "query": "FROM mv | EVAL b + 2, a + b | LIMIT 4" } ---- +// TEST[continued] +// TEST[warning:Line 1:16: evaluation of [b + 2] failed, treating result as null. Only first 20 failures recorded.] +// TEST[warning:Line 1:16: java.lang.IllegalArgumentException: single-value function encountered multi-value] +// TEST[warning:Line 1:23: evaluation of [a + b] failed, treating result as null. Only first 20 failures recorded.] +// TEST[warning:Line 1:23: java.lang.IllegalArgumentException: single-value function encountered multi-value] [source,console-result] ---- diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 7c3568986dccd..30769371f3608 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -187,6 +187,7 @@ static TransportVersion def(int id) { public static final TransportVersion INFERENCE_USAGE_ADDED = def(8_554_00_0); public static final TransportVersion UPGRADE_TO_LUCENE_9_9 = def(8_555_00_0); public static final TransportVersion HEALTH_INFO_ENRICHED_WITH_DSL_STATUS = def(8_556_00_0); + public static final TransportVersion SOURCE_IN_SINGLE_VALUE_QUERY_ADDED = def(8_557_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java index 1b44e0d274e32..f283e3b59bb63 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java @@ -86,12 +86,9 @@ private TypeSpec type() { builder.addJavadoc("This class is generated. Do not edit it."); builder.addModifiers(Modifier.PUBLIC, Modifier.FINAL); builder.addSuperinterface(EXPRESSION_EVALUATOR); - builder.addType(factory()); - if (processFunction.warnExceptions.isEmpty() == false) { - builder.addField(WARNINGS, "warnings", Modifier.PRIVATE, Modifier.FINAL); - } + builder.addField(WARNINGS, "warnings", Modifier.PRIVATE, Modifier.FINAL); processFunction.args.stream().forEach(a -> a.declareField(builder)); builder.addField(DRIVER_CONTEXT, "driverContext", Modifier.PRIVATE, Modifier.FINAL); @@ -108,10 +105,8 @@ private TypeSpec type() { private MethodSpec ctor() { MethodSpec.Builder builder = MethodSpec.constructorBuilder().addModifiers(Modifier.PUBLIC); - if (processFunction.warnExceptions.isEmpty() == false) { - builder.addParameter(SOURCE, "source"); - builder.addStatement("this.warnings = new Warnings(source)"); - } + builder.addParameter(SOURCE, "source"); + builder.addStatement("this.warnings = new Warnings(source)"); processFunction.args.stream().forEach(a -> a.implementCtor(builder)); builder.addParameter(DRIVER_CONTEXT, "driverContext"); @@ -217,8 +212,23 @@ private MethodSpec realEval(boolean blockStyle) { } private static void skipNull(MethodSpec.Builder builder, String value) { - builder.beginControlFlow("if ($N.isNull(p) || $N.getValueCount(p) != 1)", value, value); + builder.beginControlFlow("if ($N.isNull(p))", value); + { + builder.addStatement("result.appendNull()"); + builder.addStatement("continue position"); + } + builder.endControlFlow(); + builder.beginControlFlow("if ($N.getValueCount(p) != 1)", value); { + builder.beginControlFlow("if ($N.getValueCount(p) > 1)", value); + { + builder.addStatement( + // TODO: reflection on SingleValueQuery.MULTI_VALUE_WARNING? + "warnings.registerException(new $T(\"single-value function encountered multi-value\"))", + IllegalArgumentException.class + ); + } + builder.endControlFlow(); builder.addStatement("result.appendNull()"); builder.addStatement("continue position"); } @@ -259,9 +269,7 @@ private TypeSpec factory() { builder.addSuperinterface(EXPRESSION_EVALUATOR_FACTORY); builder.addModifiers(Modifier.STATIC); - if (processFunction.warnExceptions.isEmpty() == false) { - builder.addField(SOURCE, "source", Modifier.PRIVATE, Modifier.FINAL); - } + builder.addField(SOURCE, "source", Modifier.PRIVATE, Modifier.FINAL); processFunction.args.stream().forEach(a -> a.declareFactoryField(builder)); builder.addMethod(factoryCtor()); @@ -273,10 +281,8 @@ private TypeSpec factory() { private MethodSpec factoryCtor() { MethodSpec.Builder builder = MethodSpec.constructorBuilder().addModifiers(Modifier.PUBLIC); - if (processFunction.warnExceptions.isEmpty() == false) { - builder.addParameter(SOURCE, "source"); - builder.addStatement("this.source = source"); - } + builder.addParameter(SOURCE, "source"); + builder.addStatement("this.source = source"); processFunction.args.stream().forEach(a -> a.implementFactoryCtor(builder)); return builder.build(); @@ -289,9 +295,7 @@ private MethodSpec factoryGet() { builder.returns(implementation); List args = new ArrayList<>(); - if (processFunction.warnExceptions.isEmpty() == false) { - args.add("source"); - } + args.add("source"); for (ProcessFunctionArg arg : processFunction.args) { String invocation = arg.factoryInvocation(builder); if (invocation != null) { diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java index 1a09160dae3cd..e0533c68afd18 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java @@ -35,7 +35,6 @@ public class Types { static final ClassName VECTOR = ClassName.get(DATA_PACKAGE, "Vector"); static final ClassName BIG_ARRAYS = ClassName.get("org.elasticsearch.common.util", "BigArrays"); - static final ClassName BYTES_REF_ARRAY = ClassName.get("org.elasticsearch.common.util", "BytesRefArray"); static final ClassName BOOLEAN_BLOCK = ClassName.get(DATA_PACKAGE, "BooleanBlock"); static final ClassName BYTES_REF_BLOCK = ClassName.get(DATA_PACKAGE, "BytesRefBlock"); @@ -86,10 +85,6 @@ public class Types { static final ClassName LONG_CONSTANT_VECTOR = ClassName.get(DATA_PACKAGE, "ConstantLongVector"); static final ClassName DOUBLE_CONSTANT_VECTOR = ClassName.get(DATA_PACKAGE, "ConstantDoubleVector"); - static final ClassName INT_ARRAY_STATE = ClassName.get(AGGREGATION_PACKAGE, "IntArrayState"); - static final ClassName LONG_ARRAY_STATE = ClassName.get(AGGREGATION_PACKAGE, "LongArrayState"); - static final ClassName DOUBLE_ARRAY_STATE = ClassName.get(AGGREGATION_PACKAGE, "DoubleArrayState"); - static final ClassName AGGREGATOR_FUNCTION = ClassName.get(AGGREGATION_PACKAGE, "AggregatorFunction"); static final ClassName AGGREGATOR_FUNCTION_SUPPLIER = ClassName.get(AGGREGATION_PACKAGE, "AggregatorFunctionSupplier"); static final ClassName GROUPING_AGGREGATOR_FUNCTION = ClassName.get(AGGREGATION_PACKAGE, "GroupingAggregatorFunction"); diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index af3a6804f2220..734f26fab547a 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -105,7 +105,7 @@ protected void shouldSkipTest(String testName) { protected final void doTest() throws Throwable { RequestObjectBuilder builder = new RequestObjectBuilder(randomFrom(XContentType.values())); - Map answer = runEsql(builder.query(testCase.query).build(), testCase.expectedWarnings); + Map answer = runEsql(builder.query(testCase.query).build(), testCase.expectedWarnings(false)); var expectedColumnsWithValues = loadCsvSpecValues(testCase.expectedResults); var metadata = answer.get("columns"); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec index 7426f07042962..3d9f9aa6e1c27 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec @@ -62,9 +62,13 @@ avg(salary):double | always_false:boolean ; -in +in#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | keep emp_no, is_rehired, still_hired | where is_rehired in (still_hired, true) | where is_rehired != still_hired; ignoreOrder:true +warning:Line 1:63: evaluation of [is_rehired in (still_hired, true)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:63: java.lang.IllegalArgumentException: single-value function encountered multi-value +warning:Line 1:105: evaluation of [is_rehired != still_hired] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:105: java.lang.IllegalArgumentException: single-value function encountered multi-value emp_no:integer |is_rehired:boolean |still_hired:boolean 10021 |true |false 10029 |true |false diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec index 7a5a90fb398eb..e6486960c7e04 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec @@ -200,8 +200,14 @@ Chirstian. |Chirstian.Koblick|Chirstian.KoblickChirstian.|Chirstian Kyoichi. |Kyoichi.Maliniak |Kyoichi.MaliniakKyoichi. |Kyoichi ; -roundArrays +roundArrays#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] row a = [1.2], b = [2.4, 7.9] | eval c = round(a), d = round(b), e = round([1.2]), f = round([1.2, 4.6]), g = round([1.14], 1), h = round([1.14], [1, 2]); +warning:Line 1:88: evaluation of [round([1.2, 4.6])] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:88: java.lang.IllegalArgumentException: single-value function encountered multi-value +warning:Line 1:133: evaluation of [round([1.14], [1, 2])] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:133: java.lang.IllegalArgumentException: single-value function encountered multi-value +warning:Line 1:56: evaluation of [round(b)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:56: java.lang.IllegalArgumentException: single-value function encountered multi-value a:double | b:double | c:double | d: double | e:double | f:double | g:double | h:double 1.2 | [2.4, 7.9] | 1.0 | null | 1.0 | null | 1.1 | null diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec index bef977b8eea98..f56266f868d44 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec @@ -91,8 +91,10 @@ int:integer |dbl:double 520128 |520128 ; -lessThanMultivalue +lessThanMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where salary_change < 1 | keep emp_no, salary_change | sort emp_no | limit 5; +warning:Line 1:24: evaluation of [salary_change < 1] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued salaries aren't less than 1 - they are null - so they aren't included emp_no:integer |salary_change:double @@ -103,8 +105,10 @@ emp_no:integer |salary_change:double 10030 |-0.4 ; -greaterThanMultivalue +greaterThanMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where salary_change > 1 | keep emp_no, salary_change | sort emp_no | limit 5; +warning:Line 1:24: evaluation of [salary_change > 1] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued salaries aren't greater than 1 - they are null - so they aren't included emp_no:integer |salary_change:double @@ -115,16 +119,20 @@ emp_no:integer |salary_change:double 10079 |7.58 ; -equalToMultivalue +equalToMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where salary_change == 1.19 | keep emp_no, salary_change | sort emp_no; +warning:Line 1:24: evaluation of [salary_change == 1.19] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued salaries aren't greater than 1 - they are null - so they aren't included emp_no:integer |salary_change:double 10001 |1.19 ; -equalToOrEqualToMultivalue +equalToOrEqualToMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where salary_change == 1.19 or salary_change == 7.58 | keep emp_no, salary_change | sort emp_no; +warning:Line 1:24: evaluation of [salary_change] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued salaries are filtered out emp_no:integer |salary_change:double @@ -132,8 +140,10 @@ emp_no:integer |salary_change:double 10079 |7.58 ; -inMultivalue +inMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where salary_change in (1.19, 7.58) | keep emp_no, salary_change | sort emp_no; +warning:Line 1:24: evaluation of [salary_change in (1.19, 7.58)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued salaries are filtered out emp_no:integer |salary_change:double @@ -141,8 +151,10 @@ emp_no:integer |salary_change:double 10079 |7.58 ; -notLessThanMultivalue +notLessThanMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where not(salary_change < 1) | keep emp_no, salary_change | sort emp_no | limit 5; +warning:Line 1:24: evaluation of [not(salary_change < 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change < 1] failed, treating result as null. Only first 20 failures recorded.] +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value#[Emulated:Line 1:28: java.lang.IllegalArgumentException: single-value function encountered multi-value] // Note that multivalued salaries aren't less than 1 - they are null - so they aren't included emp_no:integer |salary_change:double @@ -153,8 +165,10 @@ emp_no:integer |salary_change:double 10079 | 7.58 ; -notGreaterThanMultivalue +notGreaterThanMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where not(salary_change > 1) | keep emp_no, salary_change | sort emp_no | limit 5; +warning:Line 1:24: evaluation of [not(salary_change > 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change > 1] failed, treating result as null. Only first 20 failures recorded.] +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value#[Emulated:Line 1:28: java.lang.IllegalArgumentException: single-value function encountered multi-value] // Note that multivalued salaries aren't less than 1 - they are null - so they aren't included emp_no:integer |salary_change:double @@ -165,8 +179,10 @@ emp_no:integer |salary_change:double 10030 | -0.4 ; -notEqualToMultivalue +notEqualToMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where not(salary_change == 1.19) | keep emp_no, salary_change | sort emp_no | limit 5; +warning:Line 1:24: evaluation of [not(salary_change == 1.19)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change == 1.19] failed, treating result as null. Only first 20 failures recorded.] +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value#[Emulated:Line 1:28: java.lang.IllegalArgumentException: single-value function encountered multi-value] // Note that multivalued salaries aren't greater than 1 - they are null - so they aren't included emp_no:integer |salary_change:double diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec index 3e28c8bc2cb9b..887d931f4cd5c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec @@ -1,7 +1,9 @@ // Integral types-specific tests -inLongAndInt +inLongAndInt#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where avg_worked_seconds in (372957040, salary_change.long, 236703986) | where emp_no in (10017, emp_no - 1) | keep emp_no, avg_worked_seconds; +warning:Line 1:24: evaluation of [avg_worked_seconds in (372957040, salary_change.long, 236703986)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value emp_no:integer |avg_worked_seconds:long 10017 |236703986 @@ -246,8 +248,10 @@ d:double |d2i:integer |overflow:integer 123.4 |123 |null ; -lessThanMultivalue +lessThanMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where salary_change.int < 1 | keep emp_no, salary_change.int | sort emp_no | limit 5; +warning:Line 1:24: evaluation of [salary_change.int < 1] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued salaries aren't less than 1 - they are null - so they aren't included emp_no:integer |salary_change.int:integer @@ -258,8 +262,10 @@ emp_no:integer |salary_change.int:integer 10030 | 0 ; -greaterThanMultivalue +greaterThanMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where salary_change.int > 1 | keep emp_no, salary_change.int | sort emp_no | limit 5; +warning:Line 1:24: evaluation of [salary_change.int > 1] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued salaries aren't greater than 1 - they are null - so they aren't included emp_no:integer |salary_change.int:integer @@ -270,8 +276,10 @@ emp_no:integer |salary_change.int:integer 10086 |13 ; -equalToMultivalue +equalToMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where salary_change.int == 0 | keep emp_no, salary_change.int | sort emp_no; +warning:Line 1:24: evaluation of [salary_change.int == 0] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued salaries aren't greater than 1 - they are null - so they aren't included emp_no:integer |salary_change.int:integer @@ -281,8 +289,10 @@ emp_no:integer |salary_change.int:integer 10093 | 0 ; -equalToOrEqualToMultivalue +equalToOrEqualToMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where salary_change.int == 1 or salary_change.int == 8 | keep emp_no, salary_change.int | sort emp_no; +warning:Line 1:24: evaluation of [salary_change.int] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued salaries are filtered out emp_no:integer |salary_change.int:integer @@ -290,8 +300,10 @@ emp_no:integer |salary_change.int:integer 10044 |8 ; -inMultivalue +inMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where salary_change.int in (1, 7) | keep emp_no, salary_change.int | sort emp_no; +warning:Line 1:24: evaluation of [salary_change.int in (1, 7)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued salaries are filtered out emp_no:integer |salary_change.int:integer @@ -299,8 +311,10 @@ emp_no:integer |salary_change.int:integer 10079 |7 ; -notLessThanMultivalue +notLessThanMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where not(salary_change.int < 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; +warning:Line 1:24: evaluation of [not(salary_change.int < 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change.int < 1] failed, treating result as null. Only first 20 failures recorded.] +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value#[emulated:Line 1:28: java.lang.IllegalArgumentException: single-value function encountered multi-value] // Note that multivalued salaries aren't less than 1 - they are null - so they aren't included emp_no:integer |salary_change.int:integer @@ -311,8 +325,10 @@ emp_no:integer |salary_change.int:integer 10079 | 7 ; -notGreaterThanMultivalue +notGreaterThanMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where not(salary_change.int > 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; +warning:Line 1:24: evaluation of [not(salary_change.int > 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change.int > 1] failed, treating result as null. Only first 20 failures recorded.] +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value#[Emulated:Line 1:28: java.lang.IllegalArgumentException: single-value function encountered multi-value] // Note that multivalued salaries aren't less than 1 - they are null - so they aren't included emp_no:integer |salary_change.int:integer @@ -323,8 +339,10 @@ emp_no:integer |salary_change.int:integer 10020 | -5 ; -notEqualToMultivalue +notEqualToMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where not(salary_change.int == 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; +warning:Line 1:24: evaluation of [not(salary_change.int == 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change.int == 1] failed, treating result as null. Only first 20 failures recorded.] +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value#[Emulated:Line 1:28: java.lang.IllegalArgumentException: single-value function encountered multi-value] // Note that multivalued salaries aren't greater than 1 - they are null - so they aren't included emp_no:integer |salary_change.int:integer diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec index f17af749cc85f..02e9db6ededf1 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec @@ -15,8 +15,10 @@ eth1 |epsilon |null eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] ; -equals +equals#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | sort host, card | where ip0 == ip1 | keep card, host, ip0, ip1; +warning:Line 1:38: evaluation of [ip0 == ip1] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:38: java.lang.IllegalArgumentException: single-value function encountered multi-value card:keyword |host:keyword |ip0:ip |ip1:ip eth0 |alpha |127.0.0.1 |127.0.0.1 @@ -55,8 +57,10 @@ eth0 |gamma |fe80::cae2:65ff:fece:feb9 eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0]|[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] ; -lessThen +lessThan#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | sort host, card | where ip0 < ip1 | keep card, host, ip0, ip1; +warning:Line 1:38: evaluation of [ip0 < ip1] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:38: java.lang.IllegalArgumentException: single-value function encountered multi-value card:keyword |host:keyword |ip0:ip |ip1:ip eth1 |beta |127.0.0.1 |127.0.0.2 @@ -64,8 +68,10 @@ eth1 |beta |127.0.0.1 |128.0.0.1 lo0 |gamma |fe80::cae2:65ff:fece:feb9|fe81::cae2:65ff:fece:feb9 ; -notEquals +notEquals#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | sort host, card, ip1 | where ip0 != ip1 | keep card, host, ip0, ip1; +warning:Line 1:43: evaluation of [ip0 != ip1] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:43: java.lang.IllegalArgumentException: single-value function encountered multi-value card:keyword |host:keyword |ip0:ip |ip1:ip eth0 |beta |127.0.0.1 |::1 @@ -112,9 +118,11 @@ ip0:ip |ip1:ip null |[127.0.0.1, 127.0.0.2, 127.0.0.3] ; -conditional +conditional#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | eval eq=case(ip0==ip1, ip0, ip1) | keep eq, ip0, ip1; ignoreOrder:true +warning:Line 1:27: evaluation of [ip0==ip1] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:27: java.lang.IllegalArgumentException: single-value function encountered multi-value eq:ip |ip0:ip |ip1:ip 127.0.0.1 |127.0.0.1 |127.0.0.1 @@ -129,9 +137,13 @@ fe80::cae2:65ff:fece:fec1 |[fe80::cae2:65ff:fece:feb [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0]|[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] ; -in +in#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | eval eq=case(ip0==ip1, ip0, ip1) | where eq in (ip0, ip1) | keep card, host, ip0, ip1, eq; ignoreOrder:true +warning:Line 1:27: evaluation of [ip0==ip1] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:27: java.lang.IllegalArgumentException: single-value function encountered multi-value +warning:Line 1:55: evaluation of [eq in (ip0, ip1)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:55: java.lang.IllegalArgumentException: single-value function encountered multi-value card:keyword |host:keyword |ip0:ip |ip1:ip |eq:ip eth0 |alpha |127.0.0.1 |127.0.0.1 |127.0.0.1 @@ -144,16 +156,20 @@ eth0 |gamma |fe80::cae2:65ff:fece:feb9 eth0 |epsilon |[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece:fec0, fe80::cae2:65ff:fece:fec1]|fe80::cae2:65ff:fece:fec1|fe80::cae2:65ff:fece:fec1 ; -cidrMatchSimple +cidrMatchSimple#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | where cidr_match(ip1, "127.0.0.2/32") | keep card, host, ip0, ip1; +warning:Line 1:20: evaluation of [cidr_match(ip1, \"127.0.0.2/32\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:20: java.lang.IllegalArgumentException: single-value function encountered multi-value card:keyword |host:keyword |ip0:ip |ip1:ip eth1 |beta |127.0.0.1 |127.0.0.2 ; -cidrMatchNullField +cidrMatchNullField#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | where cidr_match(ip0, "127.0.0.2/32") is null | keep card, host, ip0, ip1; ignoreOrder:true +warning:Line 1:20: evaluation of [cidr_match(ip0, \"127.0.0.2/32\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:20: java.lang.IllegalArgumentException: single-value function encountered multi-value card:keyword |host:keyword |ip0:ip |ip1:ip eth0 |epsilon |[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece:fec0, fe80::cae2:65ff:fece:fec1]|fe80::cae2:65ff:fece:fec1 @@ -161,27 +177,33 @@ eth1 |epsilon |null eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] ; -cdirMatchMultipleArgs +cdirMatchMultipleArgs#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | where cidr_match(ip1, "127.0.0.2/32", "127.0.0.3/32") | keep card, host, ip0, ip1; ignoreOrder:true +warning:Line 1:20: evaluation of [cidr_match(ip1, \"127.0.0.2/32\", \"127.0.0.3/32\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:20: java.lang.IllegalArgumentException: single-value function encountered multi-value card:keyword |host:keyword |ip0:ip |ip1:ip eth1 |beta |127.0.0.1 |127.0.0.2 eth0 |gamma |fe80::cae2:65ff:fece:feb9|127.0.0.3 ; -cidrMatchFunctionArg +cidrMatchFunctionArg#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | where cidr_match(ip1, concat("127.0.0.2", "/32"), "127.0.0.3/32") | keep card, host, ip0, ip1; ignoreOrder:true +warning:Line 1:20: evaluation of [cidr_match(ip1, concat(\"127.0.0.2\", \"/32\"), \"127.0.0.3/32\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:20: java.lang.IllegalArgumentException: single-value function encountered multi-value card:keyword |host:keyword |ip0:ip |ip1:ip eth1 |beta |127.0.0.1 |127.0.0.2 eth0 |gamma |fe80::cae2:65ff:fece:feb9|127.0.0.3 ; -cidrMatchFieldArg +cidrMatchFieldArg#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | eval cidr="127.0.0.2" | where cidr_match(ip1, cidr, "127.0.0.3/32") | keep card, host, ip0, ip1; ignoreOrder:true +warning:Line 1:44: evaluation of [cidr_match(ip1, cidr, \"127.0.0.3/32\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:44: java.lang.IllegalArgumentException: single-value function encountered multi-value card:keyword |host:keyword |ip0:ip |ip1:ip eth1 |beta |127.0.0.1 |127.0.0.2 @@ -215,15 +237,19 @@ str1:keyword |str2:keyword |ip1:ip |ip2:ip pushDownIP from hosts | where ip1 == to_ip("::1") | keep card, host, ip0, ip1; ignoreOrder:true +warning:#[Emulated:Line 1:20: evaluation of [ip1 == to_ip(\"::1\")] failed, treating result as null. Only first 20 failures recorded.] +warning:#[Emulated:Line 1:20: java.lang.IllegalArgumentException: single-value function encountered multi-value] card:keyword |host:keyword |ip0:ip |ip1:ip eth1 |alpha |::1 |::1 eth0 |beta |127.0.0.1 |::1 ; -pushDownIPWithIn +pushDownIPWithIn#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | where ip1 in (to_ip("::1"), to_ip("127.0.0.1")) | keep card, host, ip0, ip1; ignoreOrder:true +warning:Line 1:20: evaluation of [ip1 in (to_ip(\"::1\"), to_ip(\"127.0.0.1\"))] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:20: java.lang.IllegalArgumentException: single-value function encountered multi-value card:keyword |host:keyword |ip0:ip |ip1:ip eth0 |alpha |127.0.0.1 |127.0.0.1 @@ -231,9 +257,11 @@ eth1 |alpha |::1 |::1 eth0 |beta |127.0.0.1 |::1 ; -pushDownIPWithComparision +pushDownIPWithComparision#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | where ip1 > to_ip("127.0.0.1") | keep card, ip1; ignoreOrder:true +warning:Line 1:20: evaluation of [ip1 > to_ip(\"127.0.0.1\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:20: java.lang.IllegalArgumentException: single-value function encountered multi-value card:keyword |ip1:ip eth1 |127.0.0.2 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 2d1db44eea7be..1f78a63c8c4d8 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -299,9 +299,11 @@ emp_no:integer | name:keyword ; // Note: no matches in MV returned -in +in#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where job_positions in ("Internship", first_name) | keep emp_no, job_positions; ignoreOrder:true +warning:Line 1:24: evaluation of [job_positions in (\"Internship\", first_name)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value emp_no:integer |job_positions:keyword 10048 |Internship @@ -461,8 +463,10 @@ emp_no:integer |positions:keyword 10005 |null |null ; -lessThanMultivalue +lessThanMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where job_positions < "C" | keep emp_no, job_positions | sort emp_no; +warning:Line 1:24: evaluation of [job_positions < \"C\"] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued job_positions aren't included because they aren't less than or greater than C - that comparison is null emp_no:integer |job_positions:keyword @@ -470,8 +474,10 @@ emp_no:integer |job_positions:keyword 10068 |Architect ; -greaterThanMultivalue +greaterThanMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where job_positions > "C" | keep emp_no, job_positions | sort emp_no | limit 6; +warning:Line 1:24: evaluation of [job_positions > \"C\"] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued job_positions aren't included because they aren't less than or greater than C - that comparison is null emp_no:integer |job_positions:keyword @@ -483,16 +489,20 @@ emp_no:integer |job_positions:keyword 10021 |Support Engineer ; -equalToMultivalue +equalToMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where job_positions == "Accountant" | keep emp_no, job_positions | sort emp_no; +warning:Line 1:24: evaluation of [job_positions == \"Accountant\"] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued job_positions aren't included because they aren't less than or greater than C - that comparison is null emp_no:integer |job_positions:keyword 10025 |Accountant ; -equalToOrEqualToMultivalue +equalToOrEqualToMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where job_positions == "Accountant" or job_positions == "Tech Lead" | keep emp_no, job_positions | sort emp_no; +warning:Line 1:24: evaluation of [job_positions] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued job_positions aren't included because they aren't less than or greater than C - that comparison is null emp_no:integer |job_positions:keyword @@ -500,8 +510,10 @@ emp_no:integer |job_positions:keyword 10025 |Accountant ; -inMultivalue +inMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where job_positions in ("Accountant", "Tech Lead") | keep emp_no, job_positions | sort emp_no; +warning:Line 1:24: evaluation of [job_positions in (\"Accountant\", \"Tech Lead\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued job_positions aren't included because they aren't less than or greater than C - that comparison is null emp_no:integer |job_positions:keyword @@ -509,8 +521,10 @@ emp_no:integer |job_positions:keyword 10025 |Accountant ; -notLessThanMultivalue +notLessThanMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where not(job_positions < "C") | keep emp_no, job_positions | sort emp_no | limit 6; +warning:Line 1:24: evaluation of [not(job_positions < \"C\")] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [job_positions < \"C\"] failed, treating result as null. Only first 20 failures recorded.] +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value#[Emulated:Line 1:28: java.lang.IllegalArgumentException: single-value function encountered multi-value] // Note that multivalued job_positions aren't included because they aren't less than or greater than C - that comparison is null emp_no:integer |job_positions:keyword @@ -522,8 +536,10 @@ emp_no:integer |job_positions:keyword 10021 |Support Engineer ; -notGreaterThanMultivalue +notGreaterThanMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where not(job_positions > "C") | keep emp_no, job_positions | sort emp_no | limit 6; +warning:Line 1:24: evaluation of [not(job_positions > \"C\")] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [job_positions > \"C\"] failed, treating result as null. Only first 20 failures recorded.] +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value#[Emulated:Line 1:28: java.lang.IllegalArgumentException: single-value function encountered multi-value] // Note that multivalued job_positions aren't included because they aren't less than or greater than C - that comparison is null emp_no:integer |job_positions:keyword @@ -531,8 +547,10 @@ emp_no:integer |job_positions:keyword 10068 |Architect ; -notEqualToMultivalue +notEqualToMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where not(job_positions == "Accountant") | keep emp_no, job_positions | sort emp_no | limit 6; +warning:Line 1:24: evaluation of [not(job_positions == \"Accountant\")] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [job_positions == \"Accountant\"] failed, treating result as null. Only first 20 failures recorded.] +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value#[Emulated:Line 1:28: java.lang.IllegalArgumentException: single-value function encountered multi-value] // Note that multivalued job_positions aren't included because they aren't less than or greater than C - that comparison is null emp_no:integer |job_positions:keyword @@ -662,9 +680,11 @@ beta | Kubernetes cluster | beta k8s server beta | Kubernetes cluster | [beta k8s server, beta k8s server2] ; -lengthOfText +lengthOfText#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | where host=="epsilon" | eval l1 = length(host_group), l2 = length(description) | keep l1, l2; ignoreOrder:true +warning:Line 1:73: evaluation of [length(description)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:73: java.lang.IllegalArgumentException: single-value function encountered multi-value l1:integer | l2:integer null | 19 @@ -672,9 +692,11 @@ null | 19 17 | null ; -startsWithText +startsWithText#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | where host=="epsilon" | eval l1 = starts_with(host_group, host), l2 = starts_with(description, host) | keep l1, l2; ignoreOrder:true +warning:Line 1:84: evaluation of [starts_with(description, host)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:84: java.lang.IllegalArgumentException: single-value function encountered multi-value l1:boolean | l2:boolean null | true @@ -682,9 +704,11 @@ false | null false | null ; -substringOfText +substringOfText#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | where host=="epsilon" | eval l1 = substring(host_group, 0, 5), l2 = substring(description, 0, 5) | keep l1, l2; ignoreOrder:true +warning:Line 1:82: evaluation of [substring(description, 0, 5)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:82: java.lang.IllegalArgumentException: single-value function encountered multi-value l1:keyword | l2:keyword null | epsil @@ -692,8 +716,10 @@ Gatew | null Gatew | null ; -concatOfText -from hosts | where host == "epsilon" | eval l1 = concat(host,"/", host_group), l2 = concat(host_group,"/", description) | sort l1 | keep l1, l2; +concatOfText#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] +from hosts | where host == "epsilon" | eval l1 = concat(host, "/", host_group), l2 = concat(host_group, "/", description) | sort l1 | keep l1, l2; +warning:Line 1:86: evaluation of [concat(host_group, \"/\", description)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:86: java.lang.IllegalArgumentException: single-value function encountered multi-value l1:keyword | l2:keyword epsilon/Gateway instances | null diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec index 2238b0c086d9e..523a0ef7c9eed 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec @@ -45,8 +45,10 @@ from ul_logs | sort bytes_in desc nulls last, id | limit 12; 2017-11-10T20:26:21.000Z|17067060651018256448|1722789377000665830 |67 |OK ; -filterPushDownGT +filterPushDownGT#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from ul_logs | where bytes_in >= to_ul(74330435873664882) | sort bytes_in | eval div = bytes_in / to_ul(pow(10., 15)) | keep bytes_in, div, id | limit 12; +warning:Line 1:22: evaluation of [bytes_in >= to_ul(74330435873664882)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:22: java.lang.IllegalArgumentException: single-value function encountered multi-value bytes_in:ul | div:ul |id:i 74330435873664882 |74 |82 @@ -63,8 +65,12 @@ from ul_logs | where bytes_in >= to_ul(74330435873664882) | sort bytes_in | eval 2703254959364209157|2703 |18 ; -filterPushDownRange +filterPushDownRange#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from ul_logs | where bytes_in >= to_ul(74330435873664882) | where bytes_in <= to_ul(316080452389500167) | sort bytes_in | eval div = bytes_in / to_ul(pow(10., 15)) | keep bytes_in, div, id | limit 12; +warning:Line 1:22: evaluation of [bytes_in >= to_ul(74330435873664882)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:22: java.lang.IllegalArgumentException: single-value function encountered multi-value +warning:#[Emulated:Line 1:67: evaluation of [bytes_in <= to_ul(316080452389500167)] failed, treating result as null. Only first 20 failures recorded.] +warning:#[Emulated:Line 1:67: java.lang.IllegalArgumentException: single-value function encountered multi-value] bytes_in:ul | div:ul |id:i 74330435873664882 |74 |82 @@ -73,9 +79,11 @@ from ul_logs | where bytes_in >= to_ul(74330435873664882) | where bytes_in <= to 316080452389500167 |316 |25 ; -filterPushDownIn +filterPushDownIn#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] // TODO: testing framework doesn't perform implicit conversion to UL of given values, needs explicit conversion from ul_logs | where bytes_in in (to_ul(74330435873664882), to_ul(154551962150890564), to_ul(195161570976258241)) | sort bytes_in | keep bytes_in, id; +warning:Line 1:22: evaluation of [bytes_in in (to_ul(74330435873664882), to_ul(154551962150890564), to_ul(195161570976258241))] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:22: java.lang.IllegalArgumentException: single-value function encountered multi-value bytes_in:ul |id:i 74330435873664882 |82 @@ -83,15 +91,19 @@ from ul_logs | where bytes_in in (to_ul(74330435873664882), to_ul(15455196215089 195161570976258241 |88 ; -filterOnFieldsEquality +filterOnFieldsEquality#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from ul_logs | where bytes_in == bytes_out; +warning:Line 1:22: evaluation of [bytes_in == bytes_out] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:22: java.lang.IllegalArgumentException: single-value function encountered multi-value @timestamp:date | bytes_in:ul | bytes_out:ul | id:i | status:k 2017-11-10T21:12:17.000Z|16002960716282089759|16002960716282089759|34 |OK ; -filterOnFieldsInequality +filterOnFieldsInequality#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from ul_logs | sort id | where bytes_in < bytes_out | eval b_in = bytes_in / to_ul(pow(10.,15)), b_out = bytes_out / to_ul(pow(10.,15)) | limit 5; +warning:Line 1:32: evaluation of [bytes_in < bytes_out] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:32: java.lang.IllegalArgumentException: single-value function encountered multi-value @timestamp:date | bytes_in:ul | bytes_out:ul | id:i | status:k | b_in:ul | b_out:ul 2017-11-10T21:15:54.000Z|4348801185987554667 |12749081495402663265|1 |OK |4348 |12749 @@ -117,8 +129,10 @@ from ul_logs | stats c = count(bytes_in) by bytes_in | sort c desc, bytes_in des 1 |18317075104972913640 ; -case +case#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from ul_logs | where case(bytes_in == to_ul(154551962150890564), true, false); +warning:Line 1:27: evaluation of [bytes_in == to_ul(154551962150890564)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:27: java.lang.IllegalArgumentException: single-value function encountered multi-value @timestamp:date | bytes_in:ul | bytes_out:ul | id:i | status:k 2017-11-10T20:21:58.000Z|154551962150890564|9382204513185396493|63 |OK @@ -138,17 +152,21 @@ FROM ul_logs 2017-11-10T20:34:43.000Z | 17764691215469285192 | 1.75E19 ; -toDegrees +toDegrees#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] FROM ul_logs | WHERE bytes_in == bytes_out | EVAL deg = TO_DEGREES(bytes_in) | KEEP bytes_in, deg ; +warning:Line 1:22: evaluation of [bytes_in == bytes_out] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:22: java.lang.IllegalArgumentException: single-value function encountered multi-value bytes_in:ul | deg:double 16002960716282089759 | 9.169021087566165E20 ; -toRadians +toRadians#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] FROM ul_logs | WHERE bytes_in == bytes_out | EVAL rad = TO_RADIANS(bytes_in) | KEEP bytes_in, rad ; +warning:Line 1:22: evaluation of [bytes_in == bytes_out] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:22: java.lang.IllegalArgumentException: single-value function encountered multi-value bytes_in:ul | rad:double 16002960716282089759 | 2.79304354566432608E17 diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsBoolsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsBoolsEvaluator.java index b5b05d6d395fa..ef26fb4d000dc 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsBoolsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsBoolsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,20 +14,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Equals}. * This class is generated. Do not edit it. */ public final class EqualsBoolsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public EqualsBoolsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public EqualsBoolsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -52,11 +58,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, BooleanBlock lhsBlock, BooleanBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -86,19 +106,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public EqualsBoolsEvaluator get(DriverContext context) { - return new EqualsBoolsEvaluator(lhs.get(context), rhs.get(context), context); + return new EqualsBoolsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsDoublesEvaluator.java index b4a0f127c8fa1..d5b2e84384a03 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Equals}. * This class is generated. Do not edit it. */ public final class EqualsDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public EqualsDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, + public EqualsDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public EqualsDoublesEvaluator get(DriverContext context) { - return new EqualsDoublesEvaluator(lhs.get(context), rhs.get(context), context); + return new EqualsDoublesEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsIntsEvaluator.java index 8e491e14c6dc3..c2c9c7ce2b19c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsIntsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Equals}. * This class is generated. Do not edit it. */ public final class EqualsIntsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public EqualsIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public EqualsIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public EqualsIntsEvaluator get(DriverContext context) { - return new EqualsIntsEvaluator(lhs.get(context), rhs.get(context), context); + return new EqualsIntsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsKeywordsEvaluator.java index 0fe04c80a66f1..8dc15ba6d2fec 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsKeywordsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -16,20 +17,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Equals}. * This class is generated. Do not edit it. */ public final class EqualsKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public EqualsKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public EqualsKeywordsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -57,11 +63,25 @@ public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBloc BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -93,19 +113,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public EqualsKeywordsEvaluator get(DriverContext context) { - return new EqualsKeywordsEvaluator(lhs.get(context), rhs.get(context), context); + return new EqualsKeywordsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsLongsEvaluator.java index 9e656111ee074..870d7c546010f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsLongsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Equals}. * This class is generated. Do not edit it. */ public final class EqualsLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public EqualsLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public EqualsLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public EqualsLongsEvaluator get(DriverContext context) { - return new EqualsLongsEvaluator(lhs.get(context), rhs.get(context), context); + return new EqualsLongsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanDoublesEvaluator.java index 64ab3a28df39c..051df8053417f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThan}. * This class is generated. Do not edit it. */ public final class GreaterThanDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public GreaterThanDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, + public GreaterThanDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public GreaterThanDoublesEvaluator get(DriverContext context) { - return new GreaterThanDoublesEvaluator(lhs.get(context), rhs.get(context), context); + return new GreaterThanDoublesEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanIntsEvaluator.java index 7795e9b5f1b4a..c6de582ef2909 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanIntsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThan}. * This class is generated. Do not edit it. */ public final class GreaterThanIntsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public GreaterThanIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public GreaterThanIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public GreaterThanIntsEvaluator get(DriverContext context) { - return new GreaterThanIntsEvaluator(lhs.get(context), rhs.get(context), context); + return new GreaterThanIntsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java index 21ae9b1464d2a..cf243b68e473c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -16,20 +17,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThan}. * This class is generated. Do not edit it. */ public final class GreaterThanKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public GreaterThanKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public GreaterThanKeywordsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -57,11 +63,25 @@ public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBloc BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -93,19 +113,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public GreaterThanKeywordsEvaluator get(DriverContext context) { - return new GreaterThanKeywordsEvaluator(lhs.get(context), rhs.get(context), context); + return new GreaterThanKeywordsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanLongsEvaluator.java index b2b559c715126..5f1a679c76a31 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanLongsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThan}. * This class is generated. Do not edit it. */ public final class GreaterThanLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public GreaterThanLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public GreaterThanLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public GreaterThanLongsEvaluator get(DriverContext context) { - return new GreaterThanLongsEvaluator(lhs.get(context), rhs.get(context), context); + return new GreaterThanLongsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java index b73c6e359afd2..c36031c321422 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThanOrEqual}. * This class is generated. Do not edit it. */ public final class GreaterThanOrEqualDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public GreaterThanOrEqualDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, + public GreaterThanOrEqualDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public GreaterThanOrEqualDoublesEvaluator get(DriverContext context) { - return new GreaterThanOrEqualDoublesEvaluator(lhs.get(context), rhs.get(context), context); + return new GreaterThanOrEqualDoublesEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java index 2a77ee8f068e2..2b64cfcf9ea49 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThanOrEqual}. * This class is generated. Do not edit it. */ public final class GreaterThanOrEqualIntsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public GreaterThanOrEqualIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public GreaterThanOrEqualIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public GreaterThanOrEqualIntsEvaluator get(DriverContext context) { - return new GreaterThanOrEqualIntsEvaluator(lhs.get(context), rhs.get(context), context); + return new GreaterThanOrEqualIntsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java index 6909a3b761dd3..b8b2c9b6d4459 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -16,20 +17,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThanOrEqual}. * This class is generated. Do not edit it. */ public final class GreaterThanOrEqualKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public GreaterThanOrEqualKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public GreaterThanOrEqualKeywordsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -57,11 +63,25 @@ public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBloc BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -93,19 +113,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public GreaterThanOrEqualKeywordsEvaluator get(DriverContext context) { - return new GreaterThanOrEqualKeywordsEvaluator(lhs.get(context), rhs.get(context), context); + return new GreaterThanOrEqualKeywordsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java index 71a68b0bb95e6..907a29c8c904d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThanOrEqual}. * This class is generated. Do not edit it. */ public final class GreaterThanOrEqualLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public GreaterThanOrEqualLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public GreaterThanOrEqualLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public GreaterThanOrEqualLongsEvaluator get(DriverContext context) { - return new GreaterThanOrEqualLongsEvaluator(lhs.get(context), rhs.get(context), context); + return new GreaterThanOrEqualLongsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanDoublesEvaluator.java index f4990fe06f6cb..c3cf8293071e3 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThan}. * This class is generated. Do not edit it. */ public final class LessThanDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public LessThanDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, + public LessThanDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public LessThanDoublesEvaluator get(DriverContext context) { - return new LessThanDoublesEvaluator(lhs.get(context), rhs.get(context), context); + return new LessThanDoublesEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanIntsEvaluator.java index db623747a5e61..a66ac0e889090 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanIntsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThan}. * This class is generated. Do not edit it. */ public final class LessThanIntsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public LessThanIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public LessThanIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public LessThanIntsEvaluator get(DriverContext context) { - return new LessThanIntsEvaluator(lhs.get(context), rhs.get(context), context); + return new LessThanIntsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanKeywordsEvaluator.java index be658c3da46ec..a0951d9a09382 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanKeywordsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -16,20 +17,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThan}. * This class is generated. Do not edit it. */ public final class LessThanKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public LessThanKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public LessThanKeywordsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -57,11 +63,25 @@ public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBloc BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -93,19 +113,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public LessThanKeywordsEvaluator get(DriverContext context) { - return new LessThanKeywordsEvaluator(lhs.get(context), rhs.get(context), context); + return new LessThanKeywordsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanLongsEvaluator.java index 444c715c753cd..f0e7ac134410b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanLongsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThan}. * This class is generated. Do not edit it. */ public final class LessThanLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public LessThanLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public LessThanLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public LessThanLongsEvaluator get(DriverContext context) { - return new LessThanLongsEvaluator(lhs.get(context), rhs.get(context), context); + return new LessThanLongsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java index bffdf4a80649c..cf12098962599 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThanOrEqual}. * This class is generated. Do not edit it. */ public final class LessThanOrEqualDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public LessThanOrEqualDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, + public LessThanOrEqualDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public LessThanOrEqualDoublesEvaluator get(DriverContext context) { - return new LessThanOrEqualDoublesEvaluator(lhs.get(context), rhs.get(context), context); + return new LessThanOrEqualDoublesEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java index dd47aab76f21c..ffa8ab38bc2eb 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThanOrEqual}. * This class is generated. Do not edit it. */ public final class LessThanOrEqualIntsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public LessThanOrEqualIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public LessThanOrEqualIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public LessThanOrEqualIntsEvaluator get(DriverContext context) { - return new LessThanOrEqualIntsEvaluator(lhs.get(context), rhs.get(context), context); + return new LessThanOrEqualIntsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java index e7a37b3f0fc41..2e7aafeb2d805 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -16,20 +17,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThanOrEqual}. * This class is generated. Do not edit it. */ public final class LessThanOrEqualKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public LessThanOrEqualKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public LessThanOrEqualKeywordsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -57,11 +63,25 @@ public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBloc BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -93,19 +113,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public LessThanOrEqualKeywordsEvaluator get(DriverContext context) { - return new LessThanOrEqualKeywordsEvaluator(lhs.get(context), rhs.get(context), context); + return new LessThanOrEqualKeywordsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java index fec54d164ac3b..9c211610da814 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThanOrEqual}. * This class is generated. Do not edit it. */ public final class LessThanOrEqualLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public LessThanOrEqualLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public LessThanOrEqualLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public LessThanOrEqualLongsEvaluator get(DriverContext context) { - return new LessThanOrEqualLongsEvaluator(lhs.get(context), rhs.get(context), context); + return new LessThanOrEqualLongsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsBoolsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsBoolsEvaluator.java index a8a8882bf54a4..7d2067fe6bdbe 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsBoolsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsBoolsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,20 +14,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link NotEquals}. * This class is generated. Do not edit it. */ public final class NotEqualsBoolsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public NotEqualsBoolsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public NotEqualsBoolsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -52,11 +58,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, BooleanBlock lhsBlock, BooleanBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -86,19 +106,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public NotEqualsBoolsEvaluator get(DriverContext context) { - return new NotEqualsBoolsEvaluator(lhs.get(context), rhs.get(context), context); + return new NotEqualsBoolsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsDoublesEvaluator.java index cf5d7a5717600..174d3df53853b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link NotEquals}. * This class is generated. Do not edit it. */ public final class NotEqualsDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public NotEqualsDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, + public NotEqualsDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public NotEqualsDoublesEvaluator get(DriverContext context) { - return new NotEqualsDoublesEvaluator(lhs.get(context), rhs.get(context), context); + return new NotEqualsDoublesEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsIntsEvaluator.java index 128118d957222..03abc111d820e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsIntsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link NotEquals}. * This class is generated. Do not edit it. */ public final class NotEqualsIntsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public NotEqualsIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public NotEqualsIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public NotEqualsIntsEvaluator get(DriverContext context) { - return new NotEqualsIntsEvaluator(lhs.get(context), rhs.get(context), context); + return new NotEqualsIntsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java index c2d12fe5840ab..919aeb4099b1f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -16,20 +17,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link NotEquals}. * This class is generated. Do not edit it. */ public final class NotEqualsKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public NotEqualsKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public NotEqualsKeywordsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -57,11 +63,25 @@ public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBloc BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -93,19 +113,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public NotEqualsKeywordsEvaluator get(DriverContext context) { - return new NotEqualsKeywordsEvaluator(lhs.get(context), rhs.get(context), context); + return new NotEqualsKeywordsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsLongsEvaluator.java index 57e40c2857449..4ec694f918d97 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsLongsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link NotEquals}. * This class is generated. Do not edit it. */ public final class NotEqualsLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public NotEqualsLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public NotEqualsLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public NotEqualsLongsEvaluator get(DriverContext context) { - return new NotEqualsLongsEvaluator(lhs.get(context), rhs.get(context), context); + return new NotEqualsLongsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java index de3f57d54d8e4..822d380386ee9 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.logical; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,17 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Not}. * This class is generated. Do not edit it. */ public final class NotEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator v; private final DriverContext driverContext; - public NotEvaluator(EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { + public NotEvaluator(Source source, EvalOperator.ExpressionEvaluator v, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.v = v; this.driverContext = driverContext; } @@ -42,7 +49,14 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, BooleanBlock vBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (vBlock.isNull(p) || vBlock.getValueCount(p) != 1) { + if (vBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (vBlock.getValueCount(p) != 1) { + if (vBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -72,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory v; - public Factory(EvalOperator.ExpressionEvaluator.Factory v) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory v) { + this.source = source; this.v = v; } @Override public NotEvaluator get(DriverContext context) { - return new NotEvaluator(v.get(context), context); + return new NotEvaluator(source, v.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatchEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatchEvaluator.java index 83860fc328543..bdb8bfd0f613a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatchEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatchEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -17,20 +18,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link RegexMatch}. * This class is generated. Do not edit it. */ public final class RegexMatchEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator input; private final CharacterRunAutomaton pattern; private final DriverContext driverContext; - public RegexMatchEvaluator(EvalOperator.ExpressionEvaluator input, CharacterRunAutomaton pattern, - DriverContext driverContext) { + public RegexMatchEvaluator(Source source, EvalOperator.ExpressionEvaluator input, + CharacterRunAutomaton pattern, DriverContext driverContext) { + this.warnings = new Warnings(source); this.input = input; this.pattern = pattern; this.driverContext = driverContext; @@ -51,7 +57,14 @@ public BooleanBlock eval(int positionCount, BytesRefBlock inputBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { BytesRef inputScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (inputBlock.isNull(p) || inputBlock.getValueCount(p) != 1) { + if (inputBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (inputBlock.getValueCount(p) != 1) { + if (inputBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -82,18 +95,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory input; private final CharacterRunAutomaton pattern; - public Factory(EvalOperator.ExpressionEvaluator.Factory input, CharacterRunAutomaton pattern) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory input, + CharacterRunAutomaton pattern) { + this.source = source; this.input = input; this.pattern = pattern; } @Override public RegexMatchEvaluator get(DriverContext context) { - return new RegexMatchEvaluator(input.get(context), pattern, context); + return new RegexMatchEvaluator(source, input.get(context), pattern, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java index e335a2cc50add..75558171ab58c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.conditional; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.Arrays; @@ -15,18 +16,23 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Greatest}. * This class is generated. Do not edit it. */ public final class GreatestBooleanEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator[] values; private final DriverContext driverContext; - public GreatestBooleanEvaluator(EvalOperator.ExpressionEvaluator[] values, + public GreatestBooleanEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { + this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; } @@ -54,7 +60,14 @@ public BooleanBlock eval(int positionCount, BooleanBlock[] valuesBlocks) { boolean[] valuesValues = new boolean[values.length]; position: for (int p = 0; p < positionCount; p++) { for (int i = 0; i < valuesBlocks.length; i++) { - if (valuesBlocks[i].isNull(p) || valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].isNull(p)) { + result.appendNull(); + continue position; + } + if (valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -95,16 +108,19 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory[] values; - public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory[] values) { + this.source = source; this.values = values; } @Override public GreatestBooleanEvaluator get(DriverContext context) { EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); - return new GreatestBooleanEvaluator(values, context); + return new GreatestBooleanEvaluator(source, values, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java index 0919b6c624572..e70d147ec19b0 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.conditional; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.Arrays; @@ -16,18 +17,23 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Greatest}. * This class is generated. Do not edit it. */ public final class GreatestBytesRefEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator[] values; private final DriverContext driverContext; - public GreatestBytesRefEvaluator(EvalOperator.ExpressionEvaluator[] values, + public GreatestBytesRefEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { + this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; } @@ -59,7 +65,14 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock[] valuesBlocks) { } position: for (int p = 0; p < positionCount; p++) { for (int i = 0; i < valuesBlocks.length; i++) { - if (valuesBlocks[i].isNull(p) || valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].isNull(p)) { + result.appendNull(); + continue position; + } + if (valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -104,16 +117,19 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory[] values; - public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory[] values) { + this.source = source; this.values = values; } @Override public GreatestBytesRefEvaluator get(DriverContext context) { EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); - return new GreatestBytesRefEvaluator(values, context); + return new GreatestBytesRefEvaluator(source, values, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java index acabb839e0543..4a5d49cb5853b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.conditional; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.Arrays; @@ -15,18 +16,23 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Greatest}. * This class is generated. Do not edit it. */ public final class GreatestDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator[] values; private final DriverContext driverContext; - public GreatestDoubleEvaluator(EvalOperator.ExpressionEvaluator[] values, + public GreatestDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { + this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; } @@ -54,7 +60,14 @@ public DoubleBlock eval(int positionCount, DoubleBlock[] valuesBlocks) { double[] valuesValues = new double[values.length]; position: for (int p = 0; p < positionCount; p++) { for (int i = 0; i < valuesBlocks.length; i++) { - if (valuesBlocks[i].isNull(p) || valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].isNull(p)) { + result.appendNull(); + continue position; + } + if (valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -95,16 +108,19 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory[] values; - public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory[] values) { + this.source = source; this.values = values; } @Override public GreatestDoubleEvaluator get(DriverContext context) { EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); - return new GreatestDoubleEvaluator(values, context); + return new GreatestDoubleEvaluator(source, values, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java index e2fc35c829b5f..6c675c3168523 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.conditional; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.Arrays; @@ -15,18 +16,23 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Greatest}. * This class is generated. Do not edit it. */ public final class GreatestIntEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator[] values; private final DriverContext driverContext; - public GreatestIntEvaluator(EvalOperator.ExpressionEvaluator[] values, + public GreatestIntEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { + this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; } @@ -54,7 +60,14 @@ public IntBlock eval(int positionCount, IntBlock[] valuesBlocks) { int[] valuesValues = new int[values.length]; position: for (int p = 0; p < positionCount; p++) { for (int i = 0; i < valuesBlocks.length; i++) { - if (valuesBlocks[i].isNull(p) || valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].isNull(p)) { + result.appendNull(); + continue position; + } + if (valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -95,16 +108,19 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory[] values; - public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory[] values) { + this.source = source; this.values = values; } @Override public GreatestIntEvaluator get(DriverContext context) { EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); - return new GreatestIntEvaluator(values, context); + return new GreatestIntEvaluator(source, values, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java index 8f10c02c53c00..3f4f0c748db3f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.conditional; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.Arrays; @@ -15,18 +16,23 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Greatest}. * This class is generated. Do not edit it. */ public final class GreatestLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator[] values; private final DriverContext driverContext; - public GreatestLongEvaluator(EvalOperator.ExpressionEvaluator[] values, + public GreatestLongEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { + this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; } @@ -54,7 +60,14 @@ public LongBlock eval(int positionCount, LongBlock[] valuesBlocks) { long[] valuesValues = new long[values.length]; position: for (int p = 0; p < positionCount; p++) { for (int i = 0; i < valuesBlocks.length; i++) { - if (valuesBlocks[i].isNull(p) || valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].isNull(p)) { + result.appendNull(); + continue position; + } + if (valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -95,16 +108,19 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory[] values; - public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory[] values) { + this.source = source; this.values = values; } @Override public GreatestLongEvaluator get(DriverContext context) { EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); - return new GreatestLongEvaluator(values, context); + return new GreatestLongEvaluator(source, values, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java index ce337ae405cba..70d4345fe197c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.conditional; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.Arrays; @@ -15,18 +16,23 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Least}. * This class is generated. Do not edit it. */ public final class LeastBooleanEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator[] values; private final DriverContext driverContext; - public LeastBooleanEvaluator(EvalOperator.ExpressionEvaluator[] values, + public LeastBooleanEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { + this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; } @@ -54,7 +60,14 @@ public BooleanBlock eval(int positionCount, BooleanBlock[] valuesBlocks) { boolean[] valuesValues = new boolean[values.length]; position: for (int p = 0; p < positionCount; p++) { for (int i = 0; i < valuesBlocks.length; i++) { - if (valuesBlocks[i].isNull(p) || valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].isNull(p)) { + result.appendNull(); + continue position; + } + if (valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -95,16 +108,19 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory[] values; - public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory[] values) { + this.source = source; this.values = values; } @Override public LeastBooleanEvaluator get(DriverContext context) { EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); - return new LeastBooleanEvaluator(values, context); + return new LeastBooleanEvaluator(source, values, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java index 621d21e13f691..642ca36574cb6 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.conditional; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.Arrays; @@ -16,18 +17,23 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Least}. * This class is generated. Do not edit it. */ public final class LeastBytesRefEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator[] values; private final DriverContext driverContext; - public LeastBytesRefEvaluator(EvalOperator.ExpressionEvaluator[] values, + public LeastBytesRefEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { + this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; } @@ -59,7 +65,14 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock[] valuesBlocks) { } position: for (int p = 0; p < positionCount; p++) { for (int i = 0; i < valuesBlocks.length; i++) { - if (valuesBlocks[i].isNull(p) || valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].isNull(p)) { + result.appendNull(); + continue position; + } + if (valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -104,16 +117,19 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory[] values; - public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory[] values) { + this.source = source; this.values = values; } @Override public LeastBytesRefEvaluator get(DriverContext context) { EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); - return new LeastBytesRefEvaluator(values, context); + return new LeastBytesRefEvaluator(source, values, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java index 42255e56c6527..41b0ad4d4d085 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.conditional; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.Arrays; @@ -15,18 +16,23 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Least}. * This class is generated. Do not edit it. */ public final class LeastDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator[] values; private final DriverContext driverContext; - public LeastDoubleEvaluator(EvalOperator.ExpressionEvaluator[] values, + public LeastDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { + this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; } @@ -54,7 +60,14 @@ public DoubleBlock eval(int positionCount, DoubleBlock[] valuesBlocks) { double[] valuesValues = new double[values.length]; position: for (int p = 0; p < positionCount; p++) { for (int i = 0; i < valuesBlocks.length; i++) { - if (valuesBlocks[i].isNull(p) || valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].isNull(p)) { + result.appendNull(); + continue position; + } + if (valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -95,16 +108,19 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory[] values; - public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory[] values) { + this.source = source; this.values = values; } @Override public LeastDoubleEvaluator get(DriverContext context) { EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); - return new LeastDoubleEvaluator(values, context); + return new LeastDoubleEvaluator(source, values, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java index ca95f0096166e..c2c80db6ca0bb 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.conditional; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.Arrays; @@ -15,17 +16,23 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Least}. * This class is generated. Do not edit it. */ public final class LeastIntEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator[] values; private final DriverContext driverContext; - public LeastIntEvaluator(EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { + public LeastIntEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; } @@ -53,7 +60,14 @@ public IntBlock eval(int positionCount, IntBlock[] valuesBlocks) { int[] valuesValues = new int[values.length]; position: for (int p = 0; p < positionCount; p++) { for (int i = 0; i < valuesBlocks.length; i++) { - if (valuesBlocks[i].isNull(p) || valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].isNull(p)) { + result.appendNull(); + continue position; + } + if (valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -94,16 +108,19 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory[] values; - public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory[] values) { + this.source = source; this.values = values; } @Override public LeastIntEvaluator get(DriverContext context) { EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); - return new LeastIntEvaluator(values, context); + return new LeastIntEvaluator(source, values, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java index 263972b414dd4..cd8ab3a0cd06f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.conditional; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.Arrays; @@ -15,18 +16,23 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Least}. * This class is generated. Do not edit it. */ public final class LeastLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator[] values; private final DriverContext driverContext; - public LeastLongEvaluator(EvalOperator.ExpressionEvaluator[] values, + public LeastLongEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { + this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; } @@ -54,7 +60,14 @@ public LongBlock eval(int positionCount, LongBlock[] valuesBlocks) { long[] valuesValues = new long[values.length]; position: for (int p = 0; p < positionCount; p++) { for (int i = 0; i < valuesBlocks.length; i++) { - if (valuesBlocks[i].isNull(p) || valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].isNull(p)) { + result.appendNull(); + continue position; + } + if (valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -95,16 +108,19 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory[] values; - public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory[] values) { + this.source = source; this.values = values; } @Override public LeastLongEvaluator get(DriverContext context) { EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); - return new LeastLongEvaluator(values, context); + return new LeastLongEvaluator(source, values, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java index f4109947c7406..8b1804cacfc21 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.date; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.time.ZoneId; @@ -15,12 +16,16 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateExtract}. * This class is generated. Do not edit it. */ public final class DateExtractConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator value; private final ChronoField chronoField; @@ -29,8 +34,9 @@ public final class DateExtractConstantEvaluator implements EvalOperator.Expressi private final DriverContext driverContext; - public DateExtractConstantEvaluator(EvalOperator.ExpressionEvaluator value, + public DateExtractConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator value, ChronoField chronoField, ZoneId zone, DriverContext driverContext) { + this.warnings = new Warnings(source); this.value = value; this.chronoField = chronoField; this.zone = zone; @@ -51,7 +57,14 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock valueBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valueBlock.isNull(p) || valueBlock.getValueCount(p) != 1) { + if (valueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valueBlock.getValueCount(p) != 1) { + if (valueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -81,14 +94,17 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory value; private final ChronoField chronoField; private final ZoneId zone; - public Factory(EvalOperator.ExpressionEvaluator.Factory value, ChronoField chronoField, - ZoneId zone) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory value, + ChronoField chronoField, ZoneId zone) { + this.source = source; this.value = value; this.chronoField = chronoField; this.zone = zone; @@ -96,7 +112,7 @@ public Factory(EvalOperator.ExpressionEvaluator.Factory value, ChronoField chron @Override public DateExtractConstantEvaluator get(DriverContext context) { - return new DateExtractConstantEvaluator(value.get(context), chronoField, zone, context); + return new DateExtractConstantEvaluator(source, value.get(context), chronoField, zone, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java index 37af410e1d49d..65af16e2a9f5b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java @@ -66,11 +66,25 @@ public LongBlock eval(int positionCount, LongBlock valueBlock, BytesRefBlock chr try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { BytesRef chronoFieldScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (valueBlock.isNull(p) || valueBlock.getValueCount(p) != 1) { + if (valueBlock.isNull(p)) { result.appendNull(); continue position; } - if (chronoFieldBlock.isNull(p) || chronoFieldBlock.getValueCount(p) != 1) { + if (valueBlock.getValueCount(p) != 1) { + if (valueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (chronoFieldBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (chronoFieldBlock.getValueCount(p) != 1) { + if (chronoFieldBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java index 1ef4b15860dde..38cc3e2809f0a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.date; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.common.time.DateFormatter; @@ -16,20 +17,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateFormat}. * This class is generated. Do not edit it. */ public final class DateFormatConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DateFormatter formatter; private final DriverContext driverContext; - public DateFormatConstantEvaluator(EvalOperator.ExpressionEvaluator val, DateFormatter formatter, - DriverContext driverContext) { + public DateFormatConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DateFormatter formatter, DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.formatter = formatter; this.driverContext = driverContext; @@ -49,7 +55,14 @@ public Block eval(Page page) { public BytesRefBlock eval(int positionCount, LongBlock valBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -79,18 +92,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; private final DateFormatter formatter; - public Factory(EvalOperator.ExpressionEvaluator.Factory val, DateFormatter formatter) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val, + DateFormatter formatter) { + this.source = source; this.val = val; this.formatter = formatter; } @Override public DateFormatConstantEvaluator get(DriverContext context) { - return new DateFormatConstantEvaluator(val.get(context), formatter, context); + return new DateFormatConstantEvaluator(source, val.get(context), formatter, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java index 5f8077f908b39..d517c16cb4076 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.date; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.Locale; @@ -17,12 +18,16 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateFormat}. * This class is generated. Do not edit it. */ public final class DateFormatEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final EvalOperator.ExpressionEvaluator formatter; @@ -31,8 +36,9 @@ public final class DateFormatEvaluator implements EvalOperator.ExpressionEvaluat private final DriverContext driverContext; - public DateFormatEvaluator(EvalOperator.ExpressionEvaluator val, + public DateFormatEvaluator(Source source, EvalOperator.ExpressionEvaluator val, EvalOperator.ExpressionEvaluator formatter, Locale locale, DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.formatter = formatter; this.locale = locale; @@ -60,11 +66,25 @@ public BytesRefBlock eval(int positionCount, LongBlock valBlock, BytesRefBlock f try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef formatterScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (formatterBlock.isNull(p) || formatterBlock.getValueCount(p) != 1) { + if (formatterBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (formatterBlock.getValueCount(p) != 1) { + if (formatterBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -96,14 +116,17 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; private final EvalOperator.ExpressionEvaluator.Factory formatter; private final Locale locale; - public Factory(EvalOperator.ExpressionEvaluator.Factory val, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val, EvalOperator.ExpressionEvaluator.Factory formatter, Locale locale) { + this.source = source; this.val = val; this.formatter = formatter; this.locale = locale; @@ -111,7 +134,7 @@ public Factory(EvalOperator.ExpressionEvaluator.Factory val, @Override public DateFormatEvaluator get(DriverContext context) { - return new DateFormatEvaluator(val.get(context), formatter.get(context), locale, context); + return new DateFormatEvaluator(source, val.get(context), formatter.get(context), locale, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java index 84e141dcdf448..3a6b44d82a011 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java @@ -56,7 +56,14 @@ public LongBlock eval(int positionCount, BytesRefBlock valBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { BytesRef valScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java index 233d2f45c93fa..2da9310b0f53a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java @@ -66,11 +66,25 @@ public LongBlock eval(int positionCount, BytesRefBlock valBlock, BytesRefBlock f BytesRef valScratch = new BytesRef(); BytesRef formatterScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { result.appendNull(); continue position; } - if (formatterBlock.isNull(p) || formatterBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (formatterBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (formatterBlock.getValueCount(p) != 1) { + if (formatterBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java index ff31d753427d4..27a15ca19bec9 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.date; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.common.Rounding; @@ -14,20 +15,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateTrunc}. * This class is generated. Do not edit it. */ public final class DateTruncEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator fieldVal; private final Rounding.Prepared rounding; private final DriverContext driverContext; - public DateTruncEvaluator(EvalOperator.ExpressionEvaluator fieldVal, Rounding.Prepared rounding, - DriverContext driverContext) { + public DateTruncEvaluator(Source source, EvalOperator.ExpressionEvaluator fieldVal, + Rounding.Prepared rounding, DriverContext driverContext) { + this.warnings = new Warnings(source); this.fieldVal = fieldVal; this.rounding = rounding; this.driverContext = driverContext; @@ -47,7 +53,14 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock fieldValBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (fieldValBlock.isNull(p) || fieldValBlock.getValueCount(p) != 1) { + if (fieldValBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (fieldValBlock.getValueCount(p) != 1) { + if (fieldValBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -77,18 +90,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory fieldVal; private final Rounding.Prepared rounding; - public Factory(EvalOperator.ExpressionEvaluator.Factory fieldVal, Rounding.Prepared rounding) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory fieldVal, + Rounding.Prepared rounding) { + this.source = source; this.fieldVal = fieldVal; this.rounding = rounding; } @Override public DateTruncEvaluator get(DriverContext context) { - return new DateTruncEvaluator(fieldVal.get(context), rounding, context); + return new DateTruncEvaluator(source, fieldVal.get(context), rounding, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java index d4c04b724377e..45465468f7c91 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java @@ -11,17 +11,22 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Now}. * This class is generated. Do not edit it. */ public final class NowEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final long now; private final DriverContext driverContext; - public NowEvaluator(long now, DriverContext driverContext) { + public NowEvaluator(Source source, long now, DriverContext driverContext) { + this.warnings = new Warnings(source); this.now = now; this.driverContext = driverContext; } @@ -50,15 +55,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final long now; - public Factory(long now) { + public Factory(Source source, long now) { + this.source = source; this.now = now; } @Override public NowEvaluator get(DriverContext context) { - return new NowEvaluator(now, context); + return new NowEvaluator(source, now, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java index c3a347433ff9f..4ac2fa7d2738e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.ip; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.Arrays; @@ -18,20 +19,25 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link CIDRMatch}. * This class is generated. Do not edit it. */ public final class CIDRMatchEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator ip; private final EvalOperator.ExpressionEvaluator[] cidrs; private final DriverContext driverContext; - public CIDRMatchEvaluator(EvalOperator.ExpressionEvaluator ip, + public CIDRMatchEvaluator(Source source, EvalOperator.ExpressionEvaluator ip, EvalOperator.ExpressionEvaluator[] cidrs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.ip = ip; this.cidrs = cidrs; this.driverContext = driverContext; @@ -70,12 +76,26 @@ public BooleanBlock eval(int positionCount, BytesRefBlock ipBlock, BytesRefBlock cidrsScratch[i] = new BytesRef(); } position: for (int p = 0; p < positionCount; p++) { - if (ipBlock.isNull(p) || ipBlock.getValueCount(p) != 1) { + if (ipBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (ipBlock.getValueCount(p) != 1) { + if (ipBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } for (int i = 0; i < cidrsBlocks.length; i++) { - if (cidrsBlocks[i].isNull(p) || cidrsBlocks[i].getValueCount(p) != 1) { + if (cidrsBlocks[i].isNull(p)) { + result.appendNull(); + continue position; + } + if (cidrsBlocks[i].getValueCount(p) != 1) { + if (cidrsBlocks[i].getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -122,12 +142,15 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory ip; private final EvalOperator.ExpressionEvaluator.Factory[] cidrs; - public Factory(EvalOperator.ExpressionEvaluator.Factory ip, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory ip, EvalOperator.ExpressionEvaluator.Factory[] cidrs) { + this.source = source; this.ip = ip; this.cidrs = cidrs; } @@ -135,7 +158,7 @@ public Factory(EvalOperator.ExpressionEvaluator.Factory ip, @Override public CIDRMatchEvaluator get(DriverContext context) { EvalOperator.ExpressionEvaluator[] cidrs = Arrays.stream(this.cidrs).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); - return new CIDRMatchEvaluator(ip.get(context), cidrs, context); + return new CIDRMatchEvaluator(source, ip.get(context), cidrs, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java index b7e061e5e684b..d7c793b99e57b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,18 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Abs}. * This class is generated. Do not edit it. */ public final class AbsDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator fieldVal; private final DriverContext driverContext; - public AbsDoubleEvaluator(EvalOperator.ExpressionEvaluator fieldVal, + public AbsDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator fieldVal, DriverContext driverContext) { + this.warnings = new Warnings(source); this.fieldVal = fieldVal; this.driverContext = driverContext; } @@ -43,7 +49,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock fieldValBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (fieldValBlock.isNull(p) || fieldValBlock.getValueCount(p) != 1) { + if (fieldValBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (fieldValBlock.getValueCount(p) != 1) { + if (fieldValBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -73,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory fieldVal; - public Factory(EvalOperator.ExpressionEvaluator.Factory fieldVal) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory fieldVal) { + this.source = source; this.fieldVal = fieldVal; } @Override public AbsDoubleEvaluator get(DriverContext context) { - return new AbsDoubleEvaluator(fieldVal.get(context), context); + return new AbsDoubleEvaluator(source, fieldVal.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java index 9894a8ebcdce3..9964a95fafe0c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,17 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Abs}. * This class is generated. Do not edit it. */ public final class AbsIntEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator fieldVal; private final DriverContext driverContext; - public AbsIntEvaluator(EvalOperator.ExpressionEvaluator fieldVal, DriverContext driverContext) { + public AbsIntEvaluator(Source source, EvalOperator.ExpressionEvaluator fieldVal, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.fieldVal = fieldVal; this.driverContext = driverContext; } @@ -42,7 +49,14 @@ public Block eval(Page page) { public IntBlock eval(int positionCount, IntBlock fieldValBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (fieldValBlock.isNull(p) || fieldValBlock.getValueCount(p) != 1) { + if (fieldValBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (fieldValBlock.getValueCount(p) != 1) { + if (fieldValBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -72,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory fieldVal; - public Factory(EvalOperator.ExpressionEvaluator.Factory fieldVal) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory fieldVal) { + this.source = source; this.fieldVal = fieldVal; } @Override public AbsIntEvaluator get(DriverContext context) { - return new AbsIntEvaluator(fieldVal.get(context), context); + return new AbsIntEvaluator(source, fieldVal.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java index ebbb754e28188..9457112aa9d81 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,17 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Abs}. * This class is generated. Do not edit it. */ public final class AbsLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator fieldVal; private final DriverContext driverContext; - public AbsLongEvaluator(EvalOperator.ExpressionEvaluator fieldVal, DriverContext driverContext) { + public AbsLongEvaluator(Source source, EvalOperator.ExpressionEvaluator fieldVal, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.fieldVal = fieldVal; this.driverContext = driverContext; } @@ -42,7 +49,14 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock fieldValBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (fieldValBlock.isNull(p) || fieldValBlock.getValueCount(p) != 1) { + if (fieldValBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (fieldValBlock.getValueCount(p) != 1) { + if (fieldValBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -72,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory fieldVal; - public Factory(EvalOperator.ExpressionEvaluator.Factory fieldVal) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory fieldVal) { + this.source = source; this.fieldVal = fieldVal; } @Override public AbsLongEvaluator get(DriverContext context) { - return new AbsLongEvaluator(fieldVal.get(context), context); + return new AbsLongEvaluator(source, fieldVal.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java index ce43cb0d88d09..1c86fe46e9b93 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -49,7 +50,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java index 2b8168cd2abc7..fc73f4c475676 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -49,7 +50,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java index ac4d61502be33..b6d0a628c329c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,20 +14,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Atan2}. * This class is generated. Do not edit it. */ public final class Atan2Evaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator y; private final EvalOperator.ExpressionEvaluator x; private final DriverContext driverContext; - public Atan2Evaluator(EvalOperator.ExpressionEvaluator y, EvalOperator.ExpressionEvaluator x, - DriverContext driverContext) { + public Atan2Evaluator(Source source, EvalOperator.ExpressionEvaluator y, + EvalOperator.ExpressionEvaluator x, DriverContext driverContext) { + this.warnings = new Warnings(source); this.y = y; this.x = x; this.driverContext = driverContext; @@ -52,11 +58,25 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock yBlock, DoubleBlock xBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (yBlock.isNull(p) || yBlock.getValueCount(p) != 1) { + if (yBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (yBlock.getValueCount(p) != 1) { + if (yBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (xBlock.isNull(p) || xBlock.getValueCount(p) != 1) { + if (xBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (xBlock.getValueCount(p) != 1) { + if (xBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -86,19 +106,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory y; private final EvalOperator.ExpressionEvaluator.Factory x; - public Factory(EvalOperator.ExpressionEvaluator.Factory y, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory y, EvalOperator.ExpressionEvaluator.Factory x) { + this.source = source; this.y = y; this.x = x; } @Override public Atan2Evaluator get(DriverContext context) { - return new Atan2Evaluator(y.get(context), x.get(context), context); + return new Atan2Evaluator(source, y.get(context), x.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java index 2ce4dac48fbf5..b40a6cde6550e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,17 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Atan}. * This class is generated. Do not edit it. */ public final class AtanEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public AtanEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public AtanEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -42,7 +49,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -72,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public AtanEvaluator get(DriverContext context) { - return new AtanEvaluator(val.get(context), context); + return new AtanEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java index 5b09822354480..a13d11199c0fb 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,17 +16,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cast}. * This class is generated. Do not edit it. */ public final class CastIntToDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator v; private final DriverContext driverContext; - public CastIntToDoubleEvaluator(EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { + public CastIntToDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator v, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.v = v; this.driverContext = driverContext; } @@ -44,7 +51,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, IntBlock vBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (vBlock.isNull(p) || vBlock.getValueCount(p) != 1) { + if (vBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (vBlock.getValueCount(p) != 1) { + if (vBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -74,15 +88,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory v; - public Factory(EvalOperator.ExpressionEvaluator.Factory v) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory v) { + this.source = source; this.v = v; } @Override public CastIntToDoubleEvaluator get(DriverContext context) { - return new CastIntToDoubleEvaluator(v.get(context), context); + return new CastIntToDoubleEvaluator(source, v.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java index 0b9f3a5cd2a51..cf91f080537e7 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,17 +16,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cast}. * This class is generated. Do not edit it. */ public final class CastIntToLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator v; private final DriverContext driverContext; - public CastIntToLongEvaluator(EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { + public CastIntToLongEvaluator(Source source, EvalOperator.ExpressionEvaluator v, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.v = v; this.driverContext = driverContext; } @@ -44,7 +51,14 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, IntBlock vBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (vBlock.isNull(p) || vBlock.getValueCount(p) != 1) { + if (vBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (vBlock.getValueCount(p) != 1) { + if (vBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -74,15 +88,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory v; - public Factory(EvalOperator.ExpressionEvaluator.Factory v) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory v) { + this.source = source; this.v = v; } @Override public CastIntToLongEvaluator get(DriverContext context) { - return new CastIntToLongEvaluator(v.get(context), context); + return new CastIntToLongEvaluator(source, v.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java index ee228b79085b7..15b18a91ee241 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,18 +16,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cast}. * This class is generated. Do not edit it. */ public final class CastIntToUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator v; private final DriverContext driverContext; - public CastIntToUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator v, + public CastIntToUnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { + this.warnings = new Warnings(source); this.v = v; this.driverContext = driverContext; } @@ -45,7 +51,14 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, IntBlock vBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (vBlock.isNull(p) || vBlock.getValueCount(p) != 1) { + if (vBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (vBlock.getValueCount(p) != 1) { + if (vBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -75,15 +88,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory v; - public Factory(EvalOperator.ExpressionEvaluator.Factory v) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory v) { + this.source = source; this.v = v; } @Override public CastIntToUnsignedLongEvaluator get(DriverContext context) { - return new CastIntToUnsignedLongEvaluator(v.get(context), context); + return new CastIntToUnsignedLongEvaluator(source, v.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java index 9a70690bf891d..1bb63cb66eec5 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,18 +16,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cast}. * This class is generated. Do not edit it. */ public final class CastLongToDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator v; private final DriverContext driverContext; - public CastLongToDoubleEvaluator(EvalOperator.ExpressionEvaluator v, + public CastLongToDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { + this.warnings = new Warnings(source); this.v = v; this.driverContext = driverContext; } @@ -45,7 +51,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, LongBlock vBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (vBlock.isNull(p) || vBlock.getValueCount(p) != 1) { + if (vBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (vBlock.getValueCount(p) != 1) { + if (vBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -75,15 +88,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory v; - public Factory(EvalOperator.ExpressionEvaluator.Factory v) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory v) { + this.source = source; this.v = v; } @Override public CastLongToDoubleEvaluator get(DriverContext context) { - return new CastLongToDoubleEvaluator(v.get(context), context); + return new CastLongToDoubleEvaluator(source, v.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java index a258b2eeb7636..3ed067671183d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,18 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cast}. * This class is generated. Do not edit it. */ public final class CastLongToUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator v; private final DriverContext driverContext; - public CastLongToUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator v, + public CastLongToUnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { + this.warnings = new Warnings(source); this.v = v; this.driverContext = driverContext; } @@ -43,7 +49,14 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock vBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (vBlock.isNull(p) || vBlock.getValueCount(p) != 1) { + if (vBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (vBlock.getValueCount(p) != 1) { + if (vBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -73,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory v; - public Factory(EvalOperator.ExpressionEvaluator.Factory v) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory v) { + this.source = source; this.v = v; } @Override public CastLongToUnsignedLongEvaluator get(DriverContext context) { - return new CastLongToUnsignedLongEvaluator(v.get(context), context); + return new CastLongToUnsignedLongEvaluator(source, v.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java index f57d0f4dae34d..5135aab0dcc50 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,18 +16,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cast}. * This class is generated. Do not edit it. */ public final class CastUnsignedLongToDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator v; private final DriverContext driverContext; - public CastUnsignedLongToDoubleEvaluator(EvalOperator.ExpressionEvaluator v, + public CastUnsignedLongToDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { + this.warnings = new Warnings(source); this.v = v; this.driverContext = driverContext; } @@ -45,7 +51,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, LongBlock vBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (vBlock.isNull(p) || vBlock.getValueCount(p) != 1) { + if (vBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (vBlock.getValueCount(p) != 1) { + if (vBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -75,15 +88,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory v; - public Factory(EvalOperator.ExpressionEvaluator.Factory v) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory v) { + this.source = source; this.v = v; } @Override public CastUnsignedLongToDoubleEvaluator get(DriverContext context) { - return new CastUnsignedLongToDoubleEvaluator(v.get(context), context); + return new CastUnsignedLongToDoubleEvaluator(source, v.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java index fb25d318f7336..500f108afbe39 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,17 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Ceil}. * This class is generated. Do not edit it. */ public final class CeilDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public CeilDoubleEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public CeilDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -42,7 +49,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -72,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public CeilDoubleEvaluator get(DriverContext context) { - return new CeilDoubleEvaluator(val.get(context), context); + return new CeilDoubleEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java index 7fb5063875834..dd3961845c244 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,17 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cos}. * This class is generated. Do not edit it. */ public final class CosEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public CosEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public CosEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -42,7 +49,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -72,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public CosEvaluator get(DriverContext context) { - return new CosEvaluator(val.get(context), context); + return new CosEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java index ab862a62c6bfe..2f0bbaaacb40a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -49,7 +50,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java index 99ceca3521883..f8a10822a3c44 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,17 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Floor}. * This class is generated. Do not edit it. */ public final class FloorDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public FloorDoubleEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public FloorDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -42,7 +49,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -72,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public FloorDoubleEvaluator get(DriverContext context) { - return new FloorDoubleEvaluator(val.get(context), context); + return new FloorDoubleEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFiniteEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFiniteEvaluator.java index 6ad3ccb6cb287..85a5476b9510d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFiniteEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFiniteEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,17 +16,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link IsFinite}. * This class is generated. Do not edit it. */ public final class IsFiniteEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public IsFiniteEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public IsFiniteEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -44,7 +51,14 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, DoubleBlock valBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -74,15 +88,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public IsFiniteEvaluator get(DriverContext context) { - return new IsFiniteEvaluator(val.get(context), context); + return new IsFiniteEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfiniteEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfiniteEvaluator.java index 00b260467046c..1813fd1ee056e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfiniteEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfiniteEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,17 +16,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link IsInfinite}. * This class is generated. Do not edit it. */ public final class IsInfiniteEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public IsInfiniteEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public IsInfiniteEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -44,7 +51,14 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, DoubleBlock valBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -74,15 +88,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public IsInfiniteEvaluator get(DriverContext context) { - return new IsInfiniteEvaluator(val.get(context), context); + return new IsInfiniteEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaNEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaNEvaluator.java index d7639010d9533..b9dee40de583d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaNEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaNEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,17 +16,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link IsNaN}. * This class is generated. Do not edit it. */ public final class IsNaNEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public IsNaNEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public IsNaNEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -44,7 +51,14 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, DoubleBlock valBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -74,15 +88,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public IsNaNEvaluator get(DriverContext context) { - return new IsNaNEvaluator(val.get(context), context); + return new IsNaNEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java index 6a42dadae78ea..d402cf7a79e68 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -49,7 +50,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java index 782e35e9a74ab..a1aa03af7d7f5 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -50,7 +51,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, IntBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java index cfcf56a637f32..848baaea72b67 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -50,7 +51,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, LongBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java index 1b092bcbfd8a6..01812d8b1d2c9 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -50,7 +51,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, LongBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowEvaluator.java index 775cee816be7b..33bf2b4bd0c25 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock baseBlock, DoubleBlock exponentBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (baseBlock.isNull(p) || baseBlock.getValueCount(p) != 1) { + if (baseBlock.isNull(p)) { result.appendNull(); continue position; } - if (exponentBlock.isNull(p) || exponentBlock.getValueCount(p) != 1) { + if (baseBlock.getValueCount(p) != 1) { + if (baseBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (exponentBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (exponentBlock.getValueCount(p) != 1) { + if (exponentBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java new file mode 100644 index 0000000000000..1232e0dda7c0f --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java @@ -0,0 +1,146 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Pow}. + * This class is generated. Do not edit it. + */ +public final class PowIntEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator base; + + private final EvalOperator.ExpressionEvaluator exponent; + + private final DriverContext driverContext; + + public PowIntEvaluator(Source source, EvalOperator.ExpressionEvaluator base, + EvalOperator.ExpressionEvaluator exponent, DriverContext driverContext) { + this.warnings = new Warnings(source); + this.base = base; + this.exponent = exponent; + this.driverContext = driverContext; + } + + @Override + public Block.Ref eval(Page page) { + try (Block.Ref baseRef = base.eval(page)) { + DoubleBlock baseBlock = (DoubleBlock) baseRef.block(); + try (Block.Ref exponentRef = exponent.eval(page)) { + DoubleBlock exponentBlock = (DoubleBlock) exponentRef.block(); + DoubleVector baseVector = baseBlock.asVector(); + if (baseVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), baseBlock, exponentBlock)); + } + DoubleVector exponentVector = exponentBlock.asVector(); + if (exponentVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), baseBlock, exponentBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), baseVector, exponentVector)); + } + } + } + + public IntBlock eval(int positionCount, DoubleBlock baseBlock, DoubleBlock exponentBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (baseBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (baseBlock.getValueCount(p) != 1) { + if (baseBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (exponentBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (exponentBlock.getValueCount(p) != 1) { + if (exponentBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendInt(Pow.processInt(baseBlock.getDouble(baseBlock.getFirstValueIndex(p)), exponentBlock.getDouble(exponentBlock.getFirstValueIndex(p)))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public IntBlock eval(int positionCount, DoubleVector baseVector, DoubleVector exponentVector) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendInt(Pow.processInt(baseVector.getDouble(p), exponentVector.getDouble(p))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "PowIntEvaluator[" + "base=" + base + ", exponent=" + exponent + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(base, exponent); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory base; + + private final EvalOperator.ExpressionEvaluator.Factory exponent; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory base, + EvalOperator.ExpressionEvaluator.Factory exponent) { + this.source = source; + this.base = base; + this.exponent = exponent; + } + + @Override + public PowIntEvaluator get(DriverContext context) { + return new PowIntEvaluator(source, base.get(context), exponent.get(context), context); + } + + @Override + public String toString() { + return "PowIntEvaluator[" + "base=" + base + ", exponent=" + exponent + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java new file mode 100644 index 0000000000000..bd2e5f5e10ec2 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java @@ -0,0 +1,146 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Pow}. + * This class is generated. Do not edit it. + */ +public final class PowLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator base; + + private final EvalOperator.ExpressionEvaluator exponent; + + private final DriverContext driverContext; + + public PowLongEvaluator(Source source, EvalOperator.ExpressionEvaluator base, + EvalOperator.ExpressionEvaluator exponent, DriverContext driverContext) { + this.warnings = new Warnings(source); + this.base = base; + this.exponent = exponent; + this.driverContext = driverContext; + } + + @Override + public Block.Ref eval(Page page) { + try (Block.Ref baseRef = base.eval(page)) { + DoubleBlock baseBlock = (DoubleBlock) baseRef.block(); + try (Block.Ref exponentRef = exponent.eval(page)) { + DoubleBlock exponentBlock = (DoubleBlock) exponentRef.block(); + DoubleVector baseVector = baseBlock.asVector(); + if (baseVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), baseBlock, exponentBlock)); + } + DoubleVector exponentVector = exponentBlock.asVector(); + if (exponentVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), baseBlock, exponentBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), baseVector, exponentVector)); + } + } + } + + public LongBlock eval(int positionCount, DoubleBlock baseBlock, DoubleBlock exponentBlock) { + try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (baseBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (baseBlock.getValueCount(p) != 1) { + if (baseBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (exponentBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (exponentBlock.getValueCount(p) != 1) { + if (exponentBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendLong(Pow.processLong(baseBlock.getDouble(baseBlock.getFirstValueIndex(p)), exponentBlock.getDouble(exponentBlock.getFirstValueIndex(p)))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public LongBlock eval(int positionCount, DoubleVector baseVector, DoubleVector exponentVector) { + try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendLong(Pow.processLong(baseVector.getDouble(p), exponentVector.getDouble(p))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "PowLongEvaluator[" + "base=" + base + ", exponent=" + exponent + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(base, exponent); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory base; + + private final EvalOperator.ExpressionEvaluator.Factory exponent; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory base, + EvalOperator.ExpressionEvaluator.Factory exponent) { + this.source = source; + this.base = base; + this.exponent = exponent; + } + + @Override + public PowLongEvaluator get(DriverContext context) { + return new PowLongEvaluator(source, base.get(context), exponent.get(context), context); + } + + @Override + public String toString() { + return "PowLongEvaluator[" + "base=" + base + ", exponent=" + exponent + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java index a658e73a3b44f..3b85a32fc3081 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Round}. * This class is generated. Do not edit it. */ public final class RoundDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final EvalOperator.ExpressionEvaluator decimals; private final DriverContext driverContext; - public RoundDoubleEvaluator(EvalOperator.ExpressionEvaluator val, + public RoundDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator val, EvalOperator.ExpressionEvaluator decimals, DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.decimals = decimals; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock, LongBlock decimalsBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (decimalsBlock.isNull(p) || decimalsBlock.getValueCount(p) != 1) { + if (decimalsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (decimalsBlock.getValueCount(p) != 1) { + if (decimalsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; private final EvalOperator.ExpressionEvaluator.Factory decimals; - public Factory(EvalOperator.ExpressionEvaluator.Factory val, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val, EvalOperator.ExpressionEvaluator.Factory decimals) { + this.source = source; this.val = val; this.decimals = decimals; } @Override public RoundDoubleEvaluator get(DriverContext context) { - return new RoundDoubleEvaluator(val.get(context), decimals.get(context), context); + return new RoundDoubleEvaluator(source, val.get(context), decimals.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java index 316655de1d7b7..c36a1fe25b61f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,18 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Round}. * This class is generated. Do not edit it. */ public final class RoundDoubleNoDecimalsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public RoundDoubleNoDecimalsEvaluator(EvalOperator.ExpressionEvaluator val, + public RoundDoubleNoDecimalsEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -43,7 +49,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -73,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public RoundDoubleNoDecimalsEvaluator get(DriverContext context) { - return new RoundDoubleNoDecimalsEvaluator(val.get(context), context); + return new RoundDoubleNoDecimalsEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java index 71ea5938afe48..f96f92e5d0b38 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Round}. * This class is generated. Do not edit it. */ public final class RoundIntEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final EvalOperator.ExpressionEvaluator decimals; private final DriverContext driverContext; - public RoundIntEvaluator(EvalOperator.ExpressionEvaluator val, + public RoundIntEvaluator(Source source, EvalOperator.ExpressionEvaluator val, EvalOperator.ExpressionEvaluator decimals, DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.decimals = decimals; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public IntBlock eval(int positionCount, IntBlock valBlock, LongBlock decimalsBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (decimalsBlock.isNull(p) || decimalsBlock.getValueCount(p) != 1) { + if (decimalsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (decimalsBlock.getValueCount(p) != 1) { + if (decimalsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; private final EvalOperator.ExpressionEvaluator.Factory decimals; - public Factory(EvalOperator.ExpressionEvaluator.Factory val, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val, EvalOperator.ExpressionEvaluator.Factory decimals) { + this.source = source; this.val = val; this.decimals = decimals; } @Override public RoundIntEvaluator get(DriverContext context) { - return new RoundIntEvaluator(val.get(context), decimals.get(context), context); + return new RoundIntEvaluator(source, val.get(context), decimals.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java index eae45800fdee0..c8a2fdd384f40 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,20 +14,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Round}. * This class is generated. Do not edit it. */ public final class RoundLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final EvalOperator.ExpressionEvaluator decimals; private final DriverContext driverContext; - public RoundLongEvaluator(EvalOperator.ExpressionEvaluator val, + public RoundLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, EvalOperator.ExpressionEvaluator decimals, DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.decimals = decimals; this.driverContext = driverContext; @@ -52,11 +58,25 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock valBlock, LongBlock decimalsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (decimalsBlock.isNull(p) || decimalsBlock.getValueCount(p) != 1) { + if (decimalsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (decimalsBlock.getValueCount(p) != 1) { + if (decimalsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -86,19 +106,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; private final EvalOperator.ExpressionEvaluator.Factory decimals; - public Factory(EvalOperator.ExpressionEvaluator.Factory val, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val, EvalOperator.ExpressionEvaluator.Factory decimals) { + this.source = source; this.val = val; this.decimals = decimals; } @Override public RoundLongEvaluator get(DriverContext context) { - return new RoundLongEvaluator(val.get(context), decimals.get(context), context); + return new RoundLongEvaluator(source, val.get(context), decimals.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java index 5f8cb5370b213..5c94e386d4978 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,20 +14,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Round}. * This class is generated. Do not edit it. */ public final class RoundUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final EvalOperator.ExpressionEvaluator decimals; private final DriverContext driverContext; - public RoundUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator val, + public RoundUnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, EvalOperator.ExpressionEvaluator decimals, DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.decimals = decimals; this.driverContext = driverContext; @@ -52,11 +58,25 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock valBlock, LongBlock decimalsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (decimalsBlock.isNull(p) || decimalsBlock.getValueCount(p) != 1) { + if (decimalsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (decimalsBlock.getValueCount(p) != 1) { + if (decimalsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -86,19 +106,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; private final EvalOperator.ExpressionEvaluator.Factory decimals; - public Factory(EvalOperator.ExpressionEvaluator.Factory val, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val, EvalOperator.ExpressionEvaluator.Factory decimals) { + this.source = source; this.val = val; this.decimals = decimals; } @Override public RoundUnsignedLongEvaluator get(DriverContext context) { - return new RoundUnsignedLongEvaluator(val.get(context), decimals.get(context), context); + return new RoundUnsignedLongEvaluator(source, val.get(context), decimals.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java index fd2f0b1e3de64..a3c9e1481c19e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,17 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sin}. * This class is generated. Do not edit it. */ public final class SinEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public SinEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public SinEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -42,7 +49,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -72,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public SinEvaluator get(DriverContext context) { - return new SinEvaluator(val.get(context), context); + return new SinEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java index 342c1b86a873f..c6020d6bd86ea 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -49,7 +50,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java index 7be90cb5c87c0..516d6639fb115 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -49,7 +50,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java index d7a24ebafec97..3719bc6bd7326 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -50,7 +51,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, IntBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java index 57055641877c9..a9620291ddd8b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -50,7 +51,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, LongBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java index 8eddd0293ae86..6478f0639bb9a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,18 +16,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sqrt}. * This class is generated. Do not edit it. */ public final class SqrtUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public SqrtUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator val, + public SqrtUnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -45,7 +51,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, LongBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -75,15 +88,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public SqrtUnsignedLongEvaluator get(DriverContext context) { - return new SqrtUnsignedLongEvaluator(val.get(context), context); + return new SqrtUnsignedLongEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java index 2ff4ccba94ae0..ed410d20d122b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,17 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Tan}. * This class is generated. Do not edit it. */ public final class TanEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public TanEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public TanEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -42,7 +49,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -72,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public TanEvaluator get(DriverContext context) { - return new TanEvaluator(val.get(context), context); + return new TanEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java index 05cfc6446cdb6..94fa4fad18fd3 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,17 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Tanh}. * This class is generated. Do not edit it. */ public final class TanhEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public TanhEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public TanhEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -42,7 +49,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -72,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public TanhEvaluator get(DriverContext context) { - return new TanhEvaluator(val.get(context), context); + return new TanhEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java index 2b3045d29c70f..99e87ce490eb1 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.Arrays; @@ -18,20 +19,25 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Concat}. * This class is generated. Do not edit it. */ public final class ConcatEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final BreakingBytesRefBuilder scratch; private final EvalOperator.ExpressionEvaluator[] values; private final DriverContext driverContext; - public ConcatEvaluator(BreakingBytesRefBuilder scratch, EvalOperator.ExpressionEvaluator[] values, - DriverContext driverContext) { + public ConcatEvaluator(Source source, BreakingBytesRefBuilder scratch, + EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { + this.warnings = new Warnings(source); this.scratch = scratch; this.values = values; this.driverContext = driverContext; @@ -64,7 +70,14 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock[] valuesBlocks) { } position: for (int p = 0; p < positionCount; p++) { for (int i = 0; i < valuesBlocks.length; i++) { - if (valuesBlocks[i].isNull(p) || valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].isNull(p)) { + result.appendNull(); + continue position; + } + if (valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -109,12 +122,15 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final Function scratch; private final EvalOperator.ExpressionEvaluator.Factory[] values; - public Factory(Function scratch, + public Factory(Source source, Function scratch, EvalOperator.ExpressionEvaluator.Factory[] values) { + this.source = source; this.scratch = scratch; this.values = values; } @@ -122,7 +138,7 @@ public Factory(Function scratch, @Override public ConcatEvaluator get(DriverContext context) { EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); - return new ConcatEvaluator(scratch.apply(context), values, context); + return new ConcatEvaluator(source, scratch.apply(context), values, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java index b1cadf96b80cd..8d1d197aae9ad 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -16,20 +17,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link EndsWith}. * This class is generated. Do not edit it. */ public final class EndsWithEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator str; private final EvalOperator.ExpressionEvaluator suffix; private final DriverContext driverContext; - public EndsWithEvaluator(EvalOperator.ExpressionEvaluator str, + public EndsWithEvaluator(Source source, EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator suffix, DriverContext driverContext) { + this.warnings = new Warnings(source); this.str = str; this.suffix = suffix; this.driverContext = driverContext; @@ -57,11 +63,25 @@ public BooleanBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBloc BytesRef strScratch = new BytesRef(); BytesRef suffixScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (strBlock.isNull(p) || strBlock.getValueCount(p) != 1) { + if (strBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (suffixBlock.isNull(p) || suffixBlock.getValueCount(p) != 1) { + if (suffixBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (suffixBlock.getValueCount(p) != 1) { + if (suffixBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -94,19 +114,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory str; private final EvalOperator.ExpressionEvaluator.Factory suffix; - public Factory(EvalOperator.ExpressionEvaluator.Factory str, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory str, EvalOperator.ExpressionEvaluator.Factory suffix) { + this.source = source; this.str = str; this.suffix = suffix; } @Override public EndsWithEvaluator get(DriverContext context) { - return new EndsWithEvaluator(str.get(context), suffix.get(context), context); + return new EndsWithEvaluator(source, str.get(context), suffix.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java index 034cf5ddc5727..0f68955507d50 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -14,17 +15,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LTrim}. * This class is generated. Do not edit it. */ public final class LTrimEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public LTrimEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public LTrimEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -44,7 +51,14 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock valBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef valScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -75,15 +89,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public LTrimEvaluator get(DriverContext context) { - return new LTrimEvaluator(val.get(context), context); + return new LTrimEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java index b2cbbc8ed9cf6..13e7cbe9ece92 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.function.Function; @@ -18,12 +19,16 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Left}. * This class is generated. Do not edit it. */ public final class LeftEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final BytesRef out; private final UnicodeUtil.UTF8CodePoint cp; @@ -34,9 +39,10 @@ public final class LeftEvaluator implements EvalOperator.ExpressionEvaluator { private final DriverContext driverContext; - public LeftEvaluator(BytesRef out, UnicodeUtil.UTF8CodePoint cp, + public LeftEvaluator(Source source, BytesRef out, UnicodeUtil.UTF8CodePoint cp, EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator length, DriverContext driverContext) { + this.warnings = new Warnings(source); this.out = out; this.cp = cp; this.str = str; @@ -65,11 +71,25 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, IntBlock le try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (strBlock.isNull(p) || strBlock.getValueCount(p) != 1) { + if (strBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (lengthBlock.isNull(p) || lengthBlock.getValueCount(p) != 1) { + if (lengthBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lengthBlock.getValueCount(p) != 1) { + if (lengthBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -100,6 +120,8 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final Function out; private final Function cp; @@ -108,10 +130,11 @@ static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory length; - public Factory(Function out, + public Factory(Source source, Function out, Function cp, EvalOperator.ExpressionEvaluator.Factory str, EvalOperator.ExpressionEvaluator.Factory length) { + this.source = source; this.out = out; this.cp = cp; this.str = str; @@ -120,7 +143,7 @@ public Factory(Function out, @Override public LeftEvaluator get(DriverContext context) { - return new LeftEvaluator(out.apply(context), cp.apply(context), str.get(context), length.get(context), context); + return new LeftEvaluator(source, out.apply(context), cp.apply(context), str.get(context), length.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java index 2896de06f656d..890b56e78ca13 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -16,17 +17,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Length}. * This class is generated. Do not edit it. */ public final class LengthEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public LengthEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public LengthEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -46,7 +53,14 @@ public IntBlock eval(int positionCount, BytesRefBlock valBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { BytesRef valScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -77,15 +91,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public LengthEvaluator get(DriverContext context) { - return new LengthEvaluator(val.get(context), context); + return new LengthEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java index a2d1d6bb34384..fdd1c2a23357f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -14,17 +15,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link RTrim}. * This class is generated. Do not edit it. */ public final class RTrimEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public RTrimEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public RTrimEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -44,7 +51,14 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock valBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef valScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -75,15 +89,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public RTrimEvaluator get(DriverContext context) { - return new RTrimEvaluator(val.get(context), context); + return new RTrimEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java index b3af24d2f6851..71f8724d17a80 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.regex.Pattern; @@ -65,11 +66,25 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlo BytesRef strScratch = new BytesRef(); BytesRef newStrScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (strBlock.isNull(p) || strBlock.getValueCount(p) != 1) { + if (strBlock.isNull(p)) { result.appendNull(); continue position; } - if (newStrBlock.isNull(p) || newStrBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (newStrBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (newStrBlock.getValueCount(p) != 1) { + if (newStrBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java index 89013fd3ca2f1..8d4deb878f117 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.regex.PatternSyntaxException; @@ -73,15 +74,36 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlo BytesRef regexScratch = new BytesRef(); BytesRef newStrScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (strBlock.isNull(p) || strBlock.getValueCount(p) != 1) { + if (strBlock.isNull(p)) { result.appendNull(); continue position; } - if (regexBlock.isNull(p) || regexBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (regexBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (regexBlock.getValueCount(p) != 1) { + if (regexBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (newStrBlock.isNull(p)) { result.appendNull(); continue position; } - if (newStrBlock.isNull(p) || newStrBlock.getValueCount(p) != 1) { + if (newStrBlock.getValueCount(p) != 1) { + if (newStrBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java index 1e3094ed8d5d3..96473a2deefd2 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.function.Function; @@ -18,12 +19,16 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Right}. * This class is generated. Do not edit it. */ public final class RightEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final BytesRef out; private final UnicodeUtil.UTF8CodePoint cp; @@ -34,9 +39,10 @@ public final class RightEvaluator implements EvalOperator.ExpressionEvaluator { private final DriverContext driverContext; - public RightEvaluator(BytesRef out, UnicodeUtil.UTF8CodePoint cp, + public RightEvaluator(Source source, BytesRef out, UnicodeUtil.UTF8CodePoint cp, EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator length, DriverContext driverContext) { + this.warnings = new Warnings(source); this.out = out; this.cp = cp; this.str = str; @@ -65,11 +71,25 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, IntBlock le try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (strBlock.isNull(p) || strBlock.getValueCount(p) != 1) { + if (strBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (lengthBlock.isNull(p) || lengthBlock.getValueCount(p) != 1) { + if (lengthBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lengthBlock.getValueCount(p) != 1) { + if (lengthBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -100,6 +120,8 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final Function out; private final Function cp; @@ -108,10 +130,11 @@ static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory length; - public Factory(Function out, + public Factory(Source source, Function out, Function cp, EvalOperator.ExpressionEvaluator.Factory str, EvalOperator.ExpressionEvaluator.Factory length) { + this.source = source; this.out = out; this.cp = cp; this.str = str; @@ -120,7 +143,7 @@ public Factory(Function out, @Override public RightEvaluator get(DriverContext context) { - return new RightEvaluator(out.apply(context), cp.apply(context), str.get(context), length.get(context), context); + return new RightEvaluator(source, out.apply(context), cp.apply(context), str.get(context), length.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java index bb5b3569934c0..7081f22606112 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.function.Function; @@ -15,12 +16,16 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Split}. * This class is generated. Do not edit it. */ public final class SplitSingleByteEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator str; private final byte delim; @@ -29,8 +34,9 @@ public final class SplitSingleByteEvaluator implements EvalOperator.ExpressionEv private final DriverContext driverContext; - public SplitSingleByteEvaluator(EvalOperator.ExpressionEvaluator str, byte delim, + public SplitSingleByteEvaluator(Source source, EvalOperator.ExpressionEvaluator str, byte delim, BytesRef scratch, DriverContext driverContext) { + this.warnings = new Warnings(source); this.str = str; this.delim = delim; this.scratch = scratch; @@ -52,7 +58,14 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (strBlock.isNull(p) || strBlock.getValueCount(p) != 1) { + if (strBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -83,14 +96,17 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory str; private final byte delim; private final Function scratch; - public Factory(EvalOperator.ExpressionEvaluator.Factory str, byte delim, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory str, byte delim, Function scratch) { + this.source = source; this.str = str; this.delim = delim; this.scratch = scratch; @@ -98,7 +114,7 @@ public Factory(EvalOperator.ExpressionEvaluator.Factory str, byte delim, @Override public SplitSingleByteEvaluator get(DriverContext context) { - return new SplitSingleByteEvaluator(str.get(context), delim, scratch.apply(context), context); + return new SplitSingleByteEvaluator(source, str.get(context), delim, scratch.apply(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java index d80d8d65c3606..82feca1b79053 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.function.Function; @@ -15,12 +16,16 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Split}. * This class is generated. Do not edit it. */ public final class SplitVariableEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator str; private final EvalOperator.ExpressionEvaluator delim; @@ -29,8 +34,9 @@ public final class SplitVariableEvaluator implements EvalOperator.ExpressionEval private final DriverContext driverContext; - public SplitVariableEvaluator(EvalOperator.ExpressionEvaluator str, + public SplitVariableEvaluator(Source source, EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator delim, BytesRef scratch, DriverContext driverContext) { + this.warnings = new Warnings(source); this.str = str; this.delim = delim; this.scratch = scratch; @@ -59,11 +65,25 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlo BytesRef strScratch = new BytesRef(); BytesRef delimScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (strBlock.isNull(p) || strBlock.getValueCount(p) != 1) { + if (strBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (delimBlock.isNull(p) || delimBlock.getValueCount(p) != 1) { + if (delimBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (delimBlock.getValueCount(p) != 1) { + if (delimBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -96,14 +116,17 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory str; private final EvalOperator.ExpressionEvaluator.Factory delim; private final Function scratch; - public Factory(EvalOperator.ExpressionEvaluator.Factory str, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory str, EvalOperator.ExpressionEvaluator.Factory delim, Function scratch) { + this.source = source; this.str = str; this.delim = delim; this.scratch = scratch; @@ -111,7 +134,7 @@ public Factory(EvalOperator.ExpressionEvaluator.Factory str, @Override public SplitVariableEvaluator get(DriverContext context) { - return new SplitVariableEvaluator(str.get(context), delim.get(context), scratch.apply(context), context); + return new SplitVariableEvaluator(source, str.get(context), delim.get(context), scratch.apply(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java index 564dd1b7760be..9eb1c488f52dd 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -16,20 +17,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StartsWith}. * This class is generated. Do not edit it. */ public final class StartsWithEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator str; private final EvalOperator.ExpressionEvaluator prefix; private final DriverContext driverContext; - public StartsWithEvaluator(EvalOperator.ExpressionEvaluator str, + public StartsWithEvaluator(Source source, EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator prefix, DriverContext driverContext) { + this.warnings = new Warnings(source); this.str = str; this.prefix = prefix; this.driverContext = driverContext; @@ -57,11 +63,25 @@ public BooleanBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBloc BytesRef strScratch = new BytesRef(); BytesRef prefixScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (strBlock.isNull(p) || strBlock.getValueCount(p) != 1) { + if (strBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (prefixBlock.isNull(p) || prefixBlock.getValueCount(p) != 1) { + if (prefixBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (prefixBlock.getValueCount(p) != 1) { + if (prefixBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -94,19 +114,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory str; private final EvalOperator.ExpressionEvaluator.Factory prefix; - public Factory(EvalOperator.ExpressionEvaluator.Factory str, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory str, EvalOperator.ExpressionEvaluator.Factory prefix) { + this.source = source; this.str = str; this.prefix = prefix; } @Override public StartsWithEvaluator get(DriverContext context) { - return new StartsWithEvaluator(str.get(context), prefix.get(context), context); + return new StartsWithEvaluator(source, str.get(context), prefix.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java index f0b4b0363ebc5..9da104137ba94 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -16,12 +17,16 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Substring}. * This class is generated. Do not edit it. */ public final class SubstringEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator str; private final EvalOperator.ExpressionEvaluator start; @@ -30,9 +35,10 @@ public final class SubstringEvaluator implements EvalOperator.ExpressionEvaluato private final DriverContext driverContext; - public SubstringEvaluator(EvalOperator.ExpressionEvaluator str, + public SubstringEvaluator(Source source, EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator start, EvalOperator.ExpressionEvaluator length, DriverContext driverContext) { + this.warnings = new Warnings(source); this.str = str; this.start = start; this.length = length; @@ -67,15 +73,36 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, IntBlock st try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (strBlock.isNull(p) || strBlock.getValueCount(p) != 1) { + if (strBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (startBlock.isNull(p)) { result.appendNull(); continue position; } - if (startBlock.isNull(p) || startBlock.getValueCount(p) != 1) { + if (startBlock.getValueCount(p) != 1) { + if (startBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (lengthBlock.isNull(p) || lengthBlock.getValueCount(p) != 1) { + if (lengthBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lengthBlock.getValueCount(p) != 1) { + if (lengthBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -107,15 +134,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory str; private final EvalOperator.ExpressionEvaluator.Factory start; private final EvalOperator.ExpressionEvaluator.Factory length; - public Factory(EvalOperator.ExpressionEvaluator.Factory str, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory str, EvalOperator.ExpressionEvaluator.Factory start, EvalOperator.ExpressionEvaluator.Factory length) { + this.source = source; this.str = str; this.start = start; this.length = length; @@ -123,7 +153,7 @@ public Factory(EvalOperator.ExpressionEvaluator.Factory str, @Override public SubstringEvaluator get(DriverContext context) { - return new SubstringEvaluator(str.get(context), start.get(context), length.get(context), context); + return new SubstringEvaluator(source, str.get(context), start.get(context), length.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java index a410df8bbdc69..08d12ac049837 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -16,20 +17,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Substring}. * This class is generated. Do not edit it. */ public final class SubstringNoLengthEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator str; private final EvalOperator.ExpressionEvaluator start; private final DriverContext driverContext; - public SubstringNoLengthEvaluator(EvalOperator.ExpressionEvaluator str, + public SubstringNoLengthEvaluator(Source source, EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator start, DriverContext driverContext) { + this.warnings = new Warnings(source); this.str = str; this.start = start; this.driverContext = driverContext; @@ -56,11 +62,25 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, IntBlock st try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (strBlock.isNull(p) || strBlock.getValueCount(p) != 1) { + if (strBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (startBlock.isNull(p) || startBlock.getValueCount(p) != 1) { + if (startBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (startBlock.getValueCount(p) != 1) { + if (startBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -91,19 +111,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory str; private final EvalOperator.ExpressionEvaluator.Factory start; - public Factory(EvalOperator.ExpressionEvaluator.Factory str, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory str, EvalOperator.ExpressionEvaluator.Factory start) { + this.source = source; this.str = str; this.start = start; } @Override public SubstringNoLengthEvaluator get(DriverContext context) { - return new SubstringNoLengthEvaluator(str.get(context), start.get(context), context); + return new SubstringNoLengthEvaluator(source, str.get(context), start.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java index 38b42070e96a6..1ecb6b3bd578f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -14,17 +15,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Trim}. * This class is generated. Do not edit it. */ public final class TrimEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public TrimEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public TrimEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -44,7 +51,14 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock valBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef valScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -75,15 +89,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public TrimEvaluator get(DriverContext context) { - return new TrimEvaluator(val.get(context), context); + return new TrimEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java index f484a77c30ed2..04b433ecde34a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.time.DateTimeException; @@ -54,7 +55,14 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock datetimeBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (datetimeBlock.isNull(p) || datetimeBlock.getValueCount(p) != 1) { + if (datetimeBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (datetimeBlock.getValueCount(p) != 1) { + if (datetimeBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java index 1e9cf33ae39e2..071369c29f333 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,20 +14,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Add}. * This class is generated. Do not edit it. */ public final class AddDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public AddDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, + public AddDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -52,11 +58,25 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -86,19 +106,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public AddDoublesEvaluator get(DriverContext context) { - return new AddDoublesEvaluator(lhs.get(context), rhs.get(context), context); + return new AddDoublesEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java index e7a3b57479b99..bf9157540ea55 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public IntBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java index d2e029ff276b8..51199df88fb9b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java index 54b7b8df88178..10b21fb5898e8 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java index f906d83b19ce4..bb9f55f2b5b85 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,20 +14,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Div}. * This class is generated. Do not edit it. */ public final class DivDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public DivDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, + public DivDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -52,11 +58,25 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -86,19 +106,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public DivDoublesEvaluator get(DriverContext context) { - return new DivDoublesEvaluator(lhs.get(context), rhs.get(context), context); + return new DivDoublesEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java index 53cfbd8540e33..de3fb03fe4405 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public IntBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java index 31f62d3d729c5..9eb02cbd47614 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java index 104208de1e13f..50e3c933fec41 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java index 6d4f2d08b0b6e..8d441ffe10a48 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,20 +14,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Mod}. * This class is generated. Do not edit it. */ public final class ModDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public ModDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, + public ModDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -52,11 +58,25 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -86,19 +106,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public ModDoublesEvaluator get(DriverContext context) { - return new ModDoublesEvaluator(lhs.get(context), rhs.get(context), context); + return new ModDoublesEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java index 1f6979179627d..c2c44dba5207d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public IntBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java index 3bc252c5cd059..58b3f055db6b2 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java index a18a99c7e220f..5b79aa8653923 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java index 4ab6801f66b92..1b9d10bff58e9 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,20 +14,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Mul}. * This class is generated. Do not edit it. */ public final class MulDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public MulDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, + public MulDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -52,11 +58,25 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -86,19 +106,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public MulDoublesEvaluator get(DriverContext context) { - return new MulDoublesEvaluator(lhs.get(context), rhs.get(context), context); + return new MulDoublesEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java index 9926668c5e505..7501d0fc505a1 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public IntBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java index 8be74005e1940..383e55755917d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java index 4ba489dc65f06..95ecaee6b34ac 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java index 330b3afa3df19..5915d4d476f19 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,17 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Neg}. * This class is generated. Do not edit it. */ public final class NegDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator v; private final DriverContext driverContext; - public NegDoublesEvaluator(EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { + public NegDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator v, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.v = v; this.driverContext = driverContext; } @@ -42,7 +49,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock vBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (vBlock.isNull(p) || vBlock.getValueCount(p) != 1) { + if (vBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (vBlock.getValueCount(p) != 1) { + if (vBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -72,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory v; - public Factory(EvalOperator.ExpressionEvaluator.Factory v) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory v) { + this.source = source; this.v = v; } @Override public NegDoublesEvaluator get(DriverContext context) { - return new NegDoublesEvaluator(v.get(context), context); + return new NegDoublesEvaluator(source, v.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java index 9691099b03924..1821406f061bd 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -49,7 +50,14 @@ public Block eval(Page page) { public IntBlock eval(int positionCount, IntBlock vBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (vBlock.isNull(p) || vBlock.getValueCount(p) != 1) { + if (vBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (vBlock.getValueCount(p) != 1) { + if (vBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java index 4d8ee14d4569b..49a0096665112 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -49,7 +50,14 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock vBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (vBlock.isNull(p) || vBlock.getValueCount(p) != 1) { + if (vBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (vBlock.getValueCount(p) != 1) { + if (vBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java index de81736c42abf..88d94573b7562 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.time.DateTimeException; @@ -54,7 +55,14 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock datetimeBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (datetimeBlock.isNull(p) || datetimeBlock.getValueCount(p) != 1) { + if (datetimeBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (datetimeBlock.getValueCount(p) != 1) { + if (datetimeBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java index 6609d6cfbb4ae..d479d0fe751c9 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,20 +14,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sub}. * This class is generated. Do not edit it. */ public final class SubDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public SubDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, + public SubDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -52,11 +58,25 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -86,19 +106,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public SubDoublesEvaluator get(DriverContext context) { - return new SubDoublesEvaluator(lhs.get(context), rhs.get(context), context); + return new SubDoublesEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java index 4013cdd240dd0..72bd7e4b6848a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public IntBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java index 7528750da15f8..88cb6bf287d8d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java index 6c2a31db0a6f0..1ef9034d76f62 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java index 132df0d3a5afd..280ef898c3b90 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java @@ -154,6 +154,7 @@ static class Nots extends ExpressionMapper { public ExpressionEvaluator.Factory map(Not not, Layout layout) { var expEval = toEvaluator(not.field(), layout); return dvrCtx -> new org.elasticsearch.xpack.esql.evaluator.predicate.operator.logical.NotEvaluator( + not.source(), expEval.get(dvrCtx), dvrCtx ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java index d5a3e1cc6244c..36c19825fab85 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.ExpressionMapper; @@ -16,11 +17,10 @@ import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; -import java.util.function.BiFunction; - import static org.elasticsearch.xpack.esql.evaluator.EvalMapper.toEvaluator; public abstract class ComparisonMapper extends ExpressionMapper { @@ -74,18 +74,18 @@ public abstract class ComparisonMapper extends Expre ) { }; - private final BiFunction ints; - private final BiFunction longs; - private final BiFunction doubles; - private final BiFunction keywords; - private final BiFunction bools; + private final TriFunction ints; + private final TriFunction longs; + private final TriFunction doubles; + private final TriFunction keywords; + private final TriFunction bools; private ComparisonMapper( - BiFunction ints, - BiFunction longs, - BiFunction doubles, - BiFunction keywords, - BiFunction bools + TriFunction ints, + TriFunction longs, + TriFunction doubles, + TriFunction keywords, + TriFunction bools ) { this.ints = ints; this.longs = longs; @@ -95,16 +95,16 @@ private ComparisonMapper( } ComparisonMapper( - BiFunction ints, - BiFunction longs, - BiFunction doubles, - BiFunction keywords + TriFunction ints, + TriFunction longs, + TriFunction doubles, + TriFunction keywords ) { this.ints = ints; this.longs = longs; this.doubles = doubles; this.keywords = keywords; - this.bools = (lhs, rhs) -> { throw EsqlIllegalArgumentException.illegalDataType(DataTypes.BOOLEAN); }; + this.bools = (source, lhs, rhs) -> { throw EsqlIllegalArgumentException.illegalDataType(DataTypes.BOOLEAN); }; } @Override @@ -129,20 +129,20 @@ public final ExpressionEvaluator.Factory map(BinaryComparison bc, Layout layout) var leftEval = toEvaluator(bc.left(), layout); var rightEval = toEvaluator(bc.right(), layout); if (leftType == DataTypes.KEYWORD || leftType == DataTypes.TEXT || leftType == DataTypes.IP || leftType == DataTypes.VERSION) { - return keywords.apply(leftEval, rightEval); + return keywords.apply(bc.source(), leftEval, rightEval); } if (leftType == DataTypes.BOOLEAN) { - return bools.apply(leftEval, rightEval); + return bools.apply(bc.source(), leftEval, rightEval); } if (leftType == DataTypes.DATETIME) { - return longs.apply(leftEval, rightEval); + return longs.apply(bc.source(), leftEval, rightEval); } if (leftType == EsqlDataTypes.GEO_POINT) { - return longs.apply(leftEval, rightEval); + return longs.apply(bc.source(), leftEval, rightEval); } // TODO: Perhaps neithger geo_point, not cartesian_point should support comparisons? if (leftType == EsqlDataTypes.CARTESIAN_POINT) { - return longs.apply(leftEval, rightEval); + return longs.apply(bc.source(), leftEval, rightEval); } throw new EsqlIllegalArgumentException("resolved type for [" + bc + "] but didn't implement mapping"); } @@ -151,10 +151,10 @@ public static ExpressionEvaluator.Factory castToEvaluator( BinaryOperator op, Layout layout, DataType required, - BiFunction factory + TriFunction factory ) { - var lhs = Cast.cast(op.left().dataType(), required, toEvaluator(op.left(), layout)); - var rhs = Cast.cast(op.right().dataType(), required, toEvaluator(op.right(), layout)); - return factory.apply(lhs, rhs); + var lhs = Cast.cast(op.source(), op.left().dataType(), required, toEvaluator(op.left(), layout)); + var rhs = Cast.cast(op.source(), op.right().dataType(), required, toEvaluator(op.right(), layout)); + return factory.apply(op.source(), lhs, rhs); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMapper.java index c0fa71a59e415..f37751e18858f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMapper.java @@ -20,7 +20,8 @@ public abstract class RegexMapper extends ExpressionMapper> { public static final ExpressionMapper REGEX_MATCH = new RegexMapper() { @Override public ExpressionEvaluator.Factory map(RegexMatch expression, Layout layout) { - return dvrCtx -> new org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.RegexMatchEvaluator( + return dvrCtx -> new RegexMatchEvaluator( + expression.source(), EvalMapper.toEvaluator(expression.field(), layout).get(dvrCtx), new CharacterRunAutomaton(((AbstractStringPattern) expression.pattern()).createAutomaton()), dvrCtx diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java index 948e44f946920..25477e501645d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java @@ -110,16 +110,16 @@ public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator.apply(new MvMax(e.source(), e))) .toArray(ExpressionEvaluator.Factory[]::new); if (dataType == DataTypes.BOOLEAN) { - return new GreatestBooleanEvaluator.Factory(factories); + return new GreatestBooleanEvaluator.Factory(source(), factories); } if (dataType == DataTypes.DOUBLE) { - return new GreatestDoubleEvaluator.Factory(factories); + return new GreatestDoubleEvaluator.Factory(source(), factories); } if (dataType == DataTypes.INTEGER) { - return new GreatestIntEvaluator.Factory(factories); + return new GreatestIntEvaluator.Factory(source(), factories); } if (dataType == DataTypes.LONG) { - return new GreatestLongEvaluator.Factory(factories); + return new GreatestLongEvaluator.Factory(source(), factories); } if (dataType == DataTypes.KEYWORD || dataType == DataTypes.TEXT @@ -127,7 +127,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator.apply(new MvMin(e.source(), e))) .toArray(ExpressionEvaluator.Factory[]::new); if (dataType == DataTypes.BOOLEAN) { - return new LeastBooleanEvaluator.Factory(factories); + return new LeastBooleanEvaluator.Factory(source(), factories); } if (dataType == DataTypes.DOUBLE) { - return new LeastDoubleEvaluator.Factory(factories); + return new LeastDoubleEvaluator.Factory(source(), factories); } if (dataType == DataTypes.INTEGER) { - return new LeastIntEvaluator.Factory(factories); + return new LeastIntEvaluator.Factory(source(), factories); } if (dataType == DataTypes.LONG) { - return new LeastLongEvaluator.Factory(factories); + return new LeastLongEvaluator.Factory(source(), factories); } if (dataType == DataTypes.KEYWORD || dataType == DataTypes.TEXT @@ -127,7 +127,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var fieldEvaluator = toEvaluator.apply(field); if (format == null) { - return dvrCtx -> new DateFormatConstantEvaluator(fieldEvaluator.get(dvrCtx), UTC_DATE_TIME_FORMATTER, dvrCtx); + return dvrCtx -> new DateFormatConstantEvaluator(source(), fieldEvaluator.get(dvrCtx), UTC_DATE_TIME_FORMATTER, dvrCtx); } if (format.dataType() != DataTypes.KEYWORD) { throw new IllegalArgumentException("unsupported data type for format [" + format.dataType() + "]"); } if (format.foldable()) { DateFormatter formatter = toFormatter(format.fold(), ((EsqlConfiguration) configuration()).locale()); - return dvrCtx -> new DateFormatConstantEvaluator(fieldEvaluator.get(dvrCtx), formatter, dvrCtx); + return dvrCtx -> new DateFormatConstantEvaluator(source(), fieldEvaluator.get(dvrCtx), formatter, dvrCtx); } var formatEvaluator = toEvaluator.apply(format); return dvrCtx -> new DateFormatEvaluator( + source(), fieldEvaluator.get(dvrCtx), formatEvaluator.get(dvrCtx), ((EsqlConfiguration) configuration()).locale(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java index 0c70c9065dfc4..e5063bc0cbab4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java @@ -149,10 +149,14 @@ public ExpressionEvaluator.Factory toEvaluator(Function info() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - return dvrCtx -> new NowEvaluator(now, dvrCtx); + return dvrCtx -> new NowEvaluator(source(), now, dvrCtx); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java index 36d889ea1b19a..f47637ced2e90 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java @@ -58,6 +58,7 @@ public CIDRMatch(Source source, Expression ipField, List matches) { public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var ipEvaluatorSupplier = toEvaluator.apply(ipField); return dvrCtx -> new CIDRMatchEvaluator( + source(), ipEvaluatorSupplier.get(dvrCtx), matches.stream().map(x -> toEvaluator.apply(x).get(dvrCtx)).toArray(EvalOperator.ExpressionEvaluator[]::new), dvrCtx diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java index 90766a95e9cc0..8bc3ba3b184e9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java @@ -52,16 +52,16 @@ static int process(int fieldVal) { public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var field = toEvaluator.apply(field()); if (dataType() == DataTypes.DOUBLE) { - return new AbsDoubleEvaluator.Factory(field); + return new AbsDoubleEvaluator.Factory(source(), field); } if (dataType() == DataTypes.UNSIGNED_LONG) { return field; } if (dataType() == DataTypes.LONG) { - return new AbsLongEvaluator.Factory(field); + return new AbsLongEvaluator.Factory(source(), field); } if (dataType() == DataTypes.INTEGER) { - return new AbsIntEvaluator.Factory(field); + return new AbsIntEvaluator.Factory(source(), field); } throw EsqlIllegalArgumentException.illegalDataType(dataType()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java index 08a842e8b9fd7..305228362a9d0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java @@ -33,7 +33,7 @@ abstract class AbstractTrigonometricFunction extends UnaryScalarFunction impleme @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - return doubleEvaluator(Cast.cast(field().dataType(), DataTypes.DOUBLE, toEvaluator.apply(field()))); + return doubleEvaluator(Cast.cast(source(), field().dataType(), DataTypes.DOUBLE, toEvaluator.apply(field()))); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java index f730b3358a7f1..88079e60fa66a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java @@ -28,7 +28,7 @@ public Atan(Source source, @Param(name = "n", type = { "integer", "long", "doubl @Override protected EvalOperator.ExpressionEvaluator.Factory doubleEvaluator(EvalOperator.ExpressionEvaluator.Factory field) { - return new AtanEvaluator.Factory(field); + return new AtanEvaluator.Factory(source(), field); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java index 31fdea6e0d00c..e754aff1853b3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java @@ -85,9 +85,9 @@ public boolean foldable() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - var yEval = Cast.cast(y.dataType(), DataTypes.DOUBLE, toEvaluator.apply(y)); - var xEval = Cast.cast(x.dataType(), DataTypes.DOUBLE, toEvaluator.apply(x)); - return new Atan2Evaluator.Factory(yEval, xEval); + var yEval = Cast.cast(source(), y.dataType(), DataTypes.DOUBLE, toEvaluator.apply(y)); + var xEval = Cast.cast(source(), x.dataType(), DataTypes.DOUBLE, toEvaluator.apply(x)); + return new Atan2Evaluator.Factory(source(), yEval, xEval); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java index 33115352d9e54..27abeb44b2ff0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java @@ -115,7 +115,11 @@ public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var field = toEvaluator.apply(field()); - return new IsFiniteEvaluator.Factory(field); + return new IsFiniteEvaluator.Factory(source(), field); } @Evaluator diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfinite.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfinite.java index 70e8137d8871e..80068f3aaf8d4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfinite.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfinite.java @@ -23,7 +23,7 @@ public IsInfinite(Source source, Expression field) { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - return new IsInfiniteEvaluator.Factory(toEvaluator.apply(field())); + return new IsInfiniteEvaluator.Factory(source(), toEvaluator.apply(field())); } @Evaluator diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaN.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaN.java index 4db5534631fc9..07875987f74d7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaN.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaN.java @@ -23,7 +23,7 @@ public IsNaN(Source source, Expression field) { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - return new IsNaNEvaluator.Factory(toEvaluator.apply(field())); + return new IsNaNEvaluator.Factory(source(), toEvaluator.apply(field())); } @Evaluator diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java index 9e160e7c2f15f..0658dcccbbb48 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java @@ -112,8 +112,8 @@ public ScriptTemplate asScript() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - var baseEval = Cast.cast(base.dataType(), DataTypes.DOUBLE, toEvaluator.apply(base)); - var expEval = Cast.cast(exponent.dataType(), DataTypes.DOUBLE, toEvaluator.apply(exponent)); + var baseEval = Cast.cast(source(), base.dataType(), DataTypes.DOUBLE, toEvaluator.apply(base)); + var expEval = Cast.cast(source(), exponent.dataType(), DataTypes.DOUBLE, toEvaluator.apply(exponent)); return new PowEvaluator.Factory(source(), baseEval, expEval); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java index 4e1d12606a34f..3cbc74b3b6c28 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; @@ -38,6 +39,8 @@ public class Round extends ScalarFunction implements OptionalArgument, EvaluatorMapper { + private static final BiFunction EVALUATOR_IDENTITY = (s, e) -> e; + private final Expression field, decimals; public Round(Source source, Expression field, Expression decimals) { @@ -137,28 +140,28 @@ public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator, - Function noDecimals, - BiFunction withDecimals + BiFunction noDecimals, + TriFunction withDecimals ) { var fieldEvaluator = toEvaluator.apply(field()); if (decimals == null) { - return noDecimals.apply(fieldEvaluator); + return noDecimals.apply(source(), fieldEvaluator); } - var decimalsEvaluator = Cast.cast(decimals().dataType(), DataTypes.LONG, toEvaluator.apply(decimals())); - return withDecimals.apply(fieldEvaluator, decimalsEvaluator); + var decimalsEvaluator = Cast.cast(source(), decimals().dataType(), DataTypes.LONG, toEvaluator.apply(decimals())); + return withDecimals.apply(source(), fieldEvaluator, decimalsEvaluator); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java index eaf632ee8c40e..7487d8df90395 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java @@ -32,7 +32,7 @@ public Sin( @Override protected EvalOperator.ExpressionEvaluator.Factory doubleEvaluator(EvalOperator.ExpressionEvaluator.Factory field) { - return new SinEvaluator.Factory(field); + return new SinEvaluator.Factory(source(), field); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java index dc9e3bc2b3fde..bdaf3a9498b09 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java @@ -46,7 +46,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var values = children().stream().map(toEvaluator).toArray(ExpressionEvaluator.Factory[]::new); - return new ConcatEvaluator.Factory(context -> new BreakingBytesRefBuilder(context.breaker(), "concat"), values); + return new ConcatEvaluator.Factory(source(), context -> new BreakingBytesRefBuilder(context.breaker(), "concat"), values); } @Evaluator diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java index 1140bfcf1f5d9..250cbfad69b39 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java @@ -98,6 +98,6 @@ public ScriptTemplate asScript() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - return new EndsWithEvaluator.Factory(toEvaluator.apply(str), toEvaluator.apply(suffix)); + return new EndsWithEvaluator.Factory(source(), toEvaluator.apply(str), toEvaluator.apply(suffix)); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java index 382f64fcf831c..bcb4b81c452f3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java @@ -50,7 +50,7 @@ public Object fold() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - return new LTrimEvaluator.Factory(toEvaluator.apply(field())); + return new LTrimEvaluator.Factory(source(), toEvaluator.apply(field())); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java index 14cb03943f520..65d3a6388f790 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java @@ -74,6 +74,7 @@ static BytesRef process( @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { return new LeftEvaluator.Factory( + source, context -> new BytesRef(), context -> new UnicodeUtil.UTF8CodePoint(), toEvaluator.apply(str), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java index 47ee8f20e7f32..9f944c62af6a3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java @@ -72,6 +72,6 @@ protected NodeInfo info() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - return new LengthEvaluator.Factory(toEvaluator.apply(field())); + return new LengthEvaluator.Factory(source(), toEvaluator.apply(field())); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java index 98fc93b4f6acc..f694999ec2767 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java @@ -50,7 +50,7 @@ public Object fold() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - return new RTrimEvaluator.Factory(toEvaluator.apply(field())); + return new RTrimEvaluator.Factory(source(), toEvaluator.apply(field())); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java index f77c703e7cb0c..7e96f7a396472 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java @@ -78,6 +78,7 @@ static BytesRef process( @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { return new RightEvaluator.Factory( + source, context -> new BytesRef(), context -> new UnicodeUtil.UTF8CodePoint(), toEvaluator.apply(str), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java index 7f18be0e7b18e..950486b1b0eed 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java @@ -122,12 +122,12 @@ protected NodeInfo info() { public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var str = toEvaluator.apply(left()); if (right().foldable() == false) { - return new SplitVariableEvaluator.Factory(str, toEvaluator.apply(right()), context -> new BytesRef()); + return new SplitVariableEvaluator.Factory(source(), str, toEvaluator.apply(right()), context -> new BytesRef()); } BytesRef delim = (BytesRef) right().fold(); if (delim.length != 1) { throw new QlIllegalArgumentException("for now delimiter must be a single byte"); } - return new SplitSingleByteEvaluator.Factory(str, delim.bytes[delim.offset], context -> new BytesRef()); + return new SplitSingleByteEvaluator.Factory(source(), str, delim.bytes[delim.offset], context -> new BytesRef()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java index 3497d9360b187..d78ad3df64d1f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java @@ -91,6 +91,6 @@ public ScriptTemplate asScript() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - return new StartsWithEvaluator.Factory(toEvaluator.apply(str), toEvaluator.apply(prefix)); + return new StartsWithEvaluator.Factory(source(), toEvaluator.apply(str), toEvaluator.apply(prefix)); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java index 261b7aeb19da2..c287aeafc8d80 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java @@ -135,9 +135,9 @@ public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var field = toEvaluator.apply(field()); - return new TrimEvaluator.Factory(field); + return new TrimEvaluator.Factory(source(), field); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Add.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Add.java index 1e1da2634fadf..4439c4ebc754e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Add.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Add.java @@ -35,7 +35,7 @@ public Add(Source source, Expression left, Expression right) { AddIntsEvaluator.Factory::new, AddLongsEvaluator.Factory::new, AddUnsignedLongsEvaluator.Factory::new, - (s, lhs, rhs) -> new AddDoublesEvaluator.Factory(lhs, rhs), + (s, lhs, rhs) -> new AddDoublesEvaluator.Factory(source, lhs, rhs), AddDatetimesEvaluator.Factory::new ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Div.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Div.java index 0bcbe21c60a63..42fd526cb3b99 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Div.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Div.java @@ -34,7 +34,7 @@ public Div(Source source, Expression left, Expression right, DataType type) { DivIntsEvaluator.Factory::new, DivLongsEvaluator.Factory::new, DivUnsignedLongsEvaluator.Factory::new, - (s, lhs, rhs) -> new DivDoublesEvaluator.Factory(lhs, rhs) + (s, lhs, rhs) -> new DivDoublesEvaluator.Factory(source, lhs, rhs) ); this.type = type; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java index dc5be3373198b..5f8006d10f6cd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java @@ -115,8 +115,8 @@ public ExpressionEvaluator.Factory toEvaluator(Function new ModDoublesEvaluator.Factory(lhs, rhs) + (s, lhs, rhs) -> new ModDoublesEvaluator.Factory(source, lhs, rhs) ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mul.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mul.java index 963f09486a361..3eb78ef953f8f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mul.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mul.java @@ -30,7 +30,7 @@ public Mul(Source source, Expression left, Expression right) { MulIntsEvaluator.Factory::new, MulLongsEvaluator.Factory::new, MulUnsignedLongsEvaluator.Factory::new, - (s, lhs, rhs) -> new MulDoublesEvaluator.Factory(lhs, rhs) + (s, lhs, rhs) -> new MulDoublesEvaluator.Factory(source, lhs, rhs) ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java index 2ad5c5b9de5b5..37c8d6dd652e3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java @@ -55,7 +55,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function new SubDoublesEvaluator.Factory(lhs, rhs), + (s, lhs, rhs) -> new SubDoublesEvaluator.Factory(source, lhs, rhs), SubDatetimesEvaluator.Factory::new ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java index 0197ecd4f5a24..bdd93d733a460 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanNamedReader; import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanReader; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; @@ -24,11 +23,9 @@ import org.elasticsearch.xpack.ql.expression.NameId; import org.elasticsearch.xpack.ql.expression.NamedExpression; import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.ql.tree.Location; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.EsField; -import org.elasticsearch.xpack.ql.util.StringUtils; import java.io.IOException; import java.util.Collection; @@ -38,6 +35,8 @@ import java.util.function.LongFunction; import java.util.function.Supplier; +import static org.elasticsearch.xpack.ql.util.SourceUtils.readSourceWithText; + /** * A customized stream input used to deserialize ESQL physical plan fragments. Complements stream * input with methods that read plan nodes, Attributes, Expressions, etc. @@ -107,44 +106,7 @@ public PhysicalPlan readPhysicalPlanNode() throws IOException { public Source readSource() throws IOException { boolean hasSource = readBoolean(); - if (hasSource) { - int line = readInt(); - int column = readInt(); - int length = readInt(); - int charPositionInLine = column - 1; - return new Source(new Location(line, charPositionInLine), sourceText(configuration.query(), line, column, length)); - } - return Source.EMPTY; - } - - private static String sourceText(String query, int line, int column, int length) { - if (line <= 0 || column <= 0 || query.isEmpty()) { - return StringUtils.EMPTY; - } - int offset = textOffset(query, line, column); - if (offset + length > query.length()) { - throw new EsqlIllegalArgumentException( - "location [@" + line + ":" + column + "] and length [" + length + "] overrun query size [" + query.length() + "]" - ); - } - return query.substring(offset, offset + length); - } - - private static int textOffset(String query, int line, int column) { - int offset = 0; - if (line > 1) { - String[] lines = query.split("\n"); - if (line > lines.length) { - throw new EsqlIllegalArgumentException( - "line location [" + line + "] higher than max [" + lines.length + "] in query [" + query + "]" - ); - } - for (int i = 0; i < line - 1; i++) { - offset += lines[i].length() + 1; // +1 accounts for the removed \n - } - } - offset += column - 1; // -1 since column is 1-based indexed - return offset; + return hasSource ? readSourceWithText(this, configuration.query()) : Source.EMPTY; } public Expression readExpression() throws IOException { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java index 846c28b1ce719..66bd4163013ee 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java @@ -20,6 +20,8 @@ import java.io.IOException; import java.util.function.Function; +import static org.elasticsearch.xpack.ql.util.SourceUtils.writeSourceNoText; + /** * A customized stream output used to serialize ESQL physical plan fragments. Complements stream * output with methods that write plan nodes, Attributes, Expressions, etc. @@ -52,9 +54,7 @@ public void writePhysicalPlanNode(PhysicalPlan physicalPlan) throws IOException public void writeSource(Source source) throws IOException { writeBoolean(true); - writeInt(source.source().getLineNumber()); - writeInt(source.source().getColumnNumber()); - writeInt(source.text().length()); + writeSourceNoText(this, source); } public void writeNoSource() throws IOException { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java index e419be2b7e1fc..0e481c3dd762b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java @@ -41,12 +41,16 @@ import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.sort.NestedSortBuilder; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.querydsl.query.Query; import org.elasticsearch.xpack.ql.tree.Source; import java.io.IOException; import java.util.Objects; +import static org.elasticsearch.xpack.ql.util.SourceUtils.readSource; +import static org.elasticsearch.xpack.ql.util.SourceUtils.writeSource; + /** * Lucene query that wraps another query and only selects documents that match * the wrapped query and have a single field value. @@ -68,6 +72,8 @@ public class SingleValueQuery extends Query { Builder::new ); + public static final String MULTI_VALUE_WARNING = "single-value function encountered multi-value"; + private final Query next; private final String field; @@ -94,7 +100,7 @@ public void enrichNestedSort(NestedSortBuilder sort) { @Override public Builder asBuilder() { - return new Builder(next.asBuilder(), field, new Stats()); + return new Builder(next.asBuilder(), field, new Stats(), next.source()); } @Override @@ -125,11 +131,13 @@ public static class Builder extends AbstractQueryBuilder { private final QueryBuilder next; private final String field; private final Stats stats; + private final Source source; - Builder(QueryBuilder next, String field, Stats stats) { + Builder(QueryBuilder next, String field, Stats stats, Source source) { this.next = next; this.field = field; this.stats = stats; + this.source = source; } Builder(StreamInput in) throws IOException { @@ -137,12 +145,21 @@ public static class Builder extends AbstractQueryBuilder { this.next = in.readNamedWriteable(QueryBuilder.class); this.field = in.readString(); this.stats = new Stats(); + if (in.getTransportVersion().onOrAfter(TransportVersions.SOURCE_IN_SINGLE_VALUE_QUERY_ADDED)) { + this.source = readSource(in); + } else { + this.source = Source.EMPTY; + + } } @Override protected void doWriteTo(StreamOutput out) throws IOException { out.writeNamedWriteable(next); out.writeString(field); + if (out.getTransportVersion().onOrAfter(TransportVersions.SOURCE_IN_SINGLE_VALUE_QUERY_ADDED)) { + writeSource(out, source); + } } public QueryBuilder next() { @@ -153,6 +170,10 @@ public String field() { return field; } + public Source source() { + return source; + } + @Override public String getWriteableName() { return ENTRY.name; @@ -163,6 +184,7 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep builder.startObject(ENTRY.name); builder.field("field", field); builder.field("next", next, params); + builder.field("source", source.toString()); builder.endObject(); } @@ -178,7 +200,12 @@ protected org.apache.lucene.search.Query doToQuery(SearchExecutionContext contex stats.missingField++; return new MatchNoDocsQuery("missing field [" + field + "]"); } - return new LuceneQuery(next.toQuery(context), context.getForField(ft, MappedFieldType.FielddataOperation.SEARCH), stats); + return new LuceneQuery( + next.toQuery(context), + context.getForField(ft, MappedFieldType.FielddataOperation.SEARCH), + stats, + new Warnings(source) + ); } @Override @@ -191,7 +218,7 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws if (rewritten == next) { return this; } - return new Builder(rewritten, field, stats); + return new Builder(rewritten, field, stats, source); } @Override @@ -210,14 +237,16 @@ Stats stats() { } private static class LuceneQuery extends org.apache.lucene.search.Query { - private final org.apache.lucene.search.Query next; + final org.apache.lucene.search.Query next; private final IndexFieldData fieldData; private final Stats stats; + private final Warnings warnings; - LuceneQuery(org.apache.lucene.search.Query next, IndexFieldData fieldData, Stats stats) { + LuceneQuery(org.apache.lucene.search.Query next, IndexFieldData fieldData, Stats stats, Warnings warnings) { this.next = next; this.fieldData = fieldData; this.stats = stats; + this.warnings = warnings; } @Override @@ -237,12 +266,12 @@ public org.apache.lucene.search.Query rewrite(IndexReader reader) throws IOExcep if (rewritten == next) { return this; } - return new LuceneQuery(rewritten, fieldData, stats); + return new LuceneQuery(rewritten, fieldData, stats, warnings); } @Override public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { - return new SingleValueWeight(this, next.createWeight(searcher, scoreMode, boost), fieldData); + return new SingleValueWeight(this, next.createWeight(searcher, scoreMode, boost), fieldData, warnings); } @Override @@ -254,12 +283,14 @@ public boolean equals(Object obj) { return false; } SingleValueQuery.LuceneQuery other = (SingleValueQuery.LuceneQuery) obj; - return next.equals(other.next) && fieldData.getFieldName().equals(other.fieldData.getFieldName()); + return next.equals(other.next) + && fieldData.getFieldName().equals(other.fieldData.getFieldName()) + && warnings.equals(other.warnings); } @Override public int hashCode() { - return Objects.hash(classHash(), next, fieldData); + return Objects.hash(classHash(), next, fieldData, warnings); } @Override @@ -278,12 +309,14 @@ private static class SingleValueWeight extends Weight { private final Stats stats; private final Weight next; private final IndexFieldData fieldData; + private final Warnings warnings; - private SingleValueWeight(SingleValueQuery.LuceneQuery query, Weight next, IndexFieldData fieldData) { + private SingleValueWeight(SingleValueQuery.LuceneQuery query, Weight next, IndexFieldData fieldData, Warnings warnings) { super(query); this.stats = query.stats; this.next = next; this.fieldData = fieldData; + this.warnings = warnings; } @Override @@ -354,14 +387,14 @@ private Scorer scorer(LeafReaderContext context, Scorer nextScorer, LeafNumericF return new SingleValueQueryScorer( this, nextScorer, - new TwoPhaseIteratorForSortedNumericsAndSinglePhaseQueries(nextScorer.iterator(), sortedNumerics) + new TwoPhaseIteratorForSortedNumericsAndSinglePhaseQueries(nextScorer.iterator(), sortedNumerics, warnings) ); } stats.numericMultiApprox++; return new SingleValueQueryScorer( this, nextScorer, - new TwoPhaseIteratorForSortedNumericsAndTwoPhaseQueries(nextIterator, sortedNumerics) + new TwoPhaseIteratorForSortedNumericsAndTwoPhaseQueries(nextIterator, sortedNumerics, warnings) ); } @@ -388,14 +421,14 @@ private Scorer scorer(LeafReaderContext context, Scorer nextScorer, LeafOrdinals return new SingleValueQueryScorer( this, nextScorer, - new TwoPhaseIteratorForSortedSetAndSinglePhaseQueries(nextScorer.iterator(), sortedSet) + new TwoPhaseIteratorForSortedSetAndSinglePhaseQueries(nextScorer.iterator(), sortedSet, warnings) ); } stats.ordinalsMultiApprox++; return new SingleValueQueryScorer( this, nextScorer, - new TwoPhaseIteratorForSortedSetAndTwoPhaseQueries(nextIterator, sortedSet) + new TwoPhaseIteratorForSortedSetAndTwoPhaseQueries(nextIterator, sortedSet, warnings) ); } @@ -407,14 +440,14 @@ private Scorer scorer(Scorer nextScorer, LeafFieldData lfd) { return new SingleValueQueryScorer( this, nextScorer, - new TwoPhaseIteratorForSortedBinaryAndSinglePhaseQueries(nextScorer.iterator(), sortedBinary) + new TwoPhaseIteratorForSortedBinaryAndSinglePhaseQueries(nextScorer.iterator(), sortedBinary, warnings) ); } stats.bytesApprox++; return new SingleValueQueryScorer( this, nextScorer, - new TwoPhaseIteratorForSortedBinaryAndTwoPhaseQueries(nextIterator, sortedBinary) + new TwoPhaseIteratorForSortedBinaryAndTwoPhaseQueries(nextIterator, sortedBinary, warnings) ); } @@ -469,13 +502,16 @@ public int docID() { private static class TwoPhaseIteratorForSortedNumericsAndSinglePhaseQueries extends TwoPhaseIterator { private final SortedNumericDocValues sortedNumerics; + private final Warnings warnings; private TwoPhaseIteratorForSortedNumericsAndSinglePhaseQueries( DocIdSetIterator approximation, - SortedNumericDocValues sortedNumerics + SortedNumericDocValues sortedNumerics, + Warnings warning ) { super(approximation); this.sortedNumerics = sortedNumerics; + this.warnings = warning; } @Override @@ -483,7 +519,11 @@ public boolean matches() throws IOException { if (false == sortedNumerics.advanceExact(approximation.docID())) { return false; } - return sortedNumerics.docValueCount() == 1; + if (sortedNumerics.docValueCount() != 1) { + warnings.registerException(new IllegalArgumentException(MULTI_VALUE_WARNING)); + return false; + } + return true; } @Override @@ -495,11 +535,17 @@ public float matchCost() { private static class TwoPhaseIteratorForSortedNumericsAndTwoPhaseQueries extends TwoPhaseIterator { private final SortedNumericDocValues sortedNumerics; private final TwoPhaseIterator next; + private final Warnings warnings; - private TwoPhaseIteratorForSortedNumericsAndTwoPhaseQueries(TwoPhaseIterator next, SortedNumericDocValues sortedNumerics) { + private TwoPhaseIteratorForSortedNumericsAndTwoPhaseQueries( + TwoPhaseIterator next, + SortedNumericDocValues sortedNumerics, + Warnings warnings + ) { super(next.approximation()); this.sortedNumerics = sortedNumerics; this.next = next; + this.warnings = warnings; } @Override @@ -508,6 +554,7 @@ public boolean matches() throws IOException { return false; } if (sortedNumerics.docValueCount() != 1) { + warnings.registerException(new IllegalArgumentException(MULTI_VALUE_WARNING)); return false; } return next.matches(); @@ -521,10 +568,16 @@ public float matchCost() { private static class TwoPhaseIteratorForSortedBinaryAndSinglePhaseQueries extends TwoPhaseIterator { private final SortedBinaryDocValues sortedBinary; + private final Warnings warnings; - private TwoPhaseIteratorForSortedBinaryAndSinglePhaseQueries(DocIdSetIterator approximation, SortedBinaryDocValues sortedBinary) { + private TwoPhaseIteratorForSortedBinaryAndSinglePhaseQueries( + DocIdSetIterator approximation, + SortedBinaryDocValues sortedBinary, + Warnings warnings + ) { super(approximation); this.sortedBinary = sortedBinary; + this.warnings = warnings; } @Override @@ -532,7 +585,11 @@ public boolean matches() throws IOException { if (false == sortedBinary.advanceExact(approximation.docID())) { return false; } - return sortedBinary.docValueCount() == 1; + if (sortedBinary.docValueCount() != 1) { + warnings.registerException(new IllegalArgumentException(MULTI_VALUE_WARNING)); + return false; + } + return true; } @Override @@ -544,11 +601,13 @@ public float matchCost() { private static class TwoPhaseIteratorForSortedSetAndTwoPhaseQueries extends TwoPhaseIterator { private final SortedSetDocValues sortedSet; private final TwoPhaseIterator next; + private final Warnings warnings; - private TwoPhaseIteratorForSortedSetAndTwoPhaseQueries(TwoPhaseIterator next, SortedSetDocValues sortedSet) { + private TwoPhaseIteratorForSortedSetAndTwoPhaseQueries(TwoPhaseIterator next, SortedSetDocValues sortedSet, Warnings warnings) { super(next.approximation()); this.sortedSet = sortedSet; this.next = next; + this.warnings = warnings; } @Override @@ -557,6 +616,7 @@ public boolean matches() throws IOException { return false; } if (sortedSet.docValueCount() != 1) { + warnings.registerException(new IllegalArgumentException(MULTI_VALUE_WARNING)); return false; } return next.matches(); @@ -570,10 +630,16 @@ public float matchCost() { private static class TwoPhaseIteratorForSortedSetAndSinglePhaseQueries extends TwoPhaseIterator { private final SortedSetDocValues sortedSet; + private final Warnings warnings; - private TwoPhaseIteratorForSortedSetAndSinglePhaseQueries(DocIdSetIterator approximation, SortedSetDocValues sortedSet) { + private TwoPhaseIteratorForSortedSetAndSinglePhaseQueries( + DocIdSetIterator approximation, + SortedSetDocValues sortedSet, + Warnings warnings + ) { super(approximation); this.sortedSet = sortedSet; + this.warnings = warnings; } @Override @@ -581,7 +647,11 @@ public boolean matches() throws IOException { if (false == sortedSet.advanceExact(approximation.docID())) { return false; } - return sortedSet.docValueCount() == 1; + if (sortedSet.docValueCount() != 1) { + warnings.registerException(new IllegalArgumentException(MULTI_VALUE_WARNING)); + return false; + } + return true; } @Override @@ -593,11 +663,17 @@ public float matchCost() { private static class TwoPhaseIteratorForSortedBinaryAndTwoPhaseQueries extends TwoPhaseIterator { private final SortedBinaryDocValues sortedBinary; private final TwoPhaseIterator next; + private final Warnings warnings; - private TwoPhaseIteratorForSortedBinaryAndTwoPhaseQueries(TwoPhaseIterator next, SortedBinaryDocValues sortedBinary) { + private TwoPhaseIteratorForSortedBinaryAndTwoPhaseQueries( + TwoPhaseIterator next, + SortedBinaryDocValues sortedBinary, + Warnings warnings + ) { super(next.approximation()); this.sortedBinary = sortedBinary; this.next = next; + this.warnings = warnings; } @Override @@ -606,6 +682,7 @@ public boolean matches() throws IOException { return false; } if (sortedBinary.docValueCount() != 1) { + warnings.registerException(new IllegalArgumentException(MULTI_VALUE_WARNING)); return false; } return next.matches(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 99b21225e1985..dbb7c1f130a1b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -441,6 +441,6 @@ private void assertWarnings(List warnings) { normalized.add(normW); } } - assertMap(normalized, matchesList(testCase.expectedWarnings)); + assertMap(normalized, matchesList(testCase.expectedWarnings(true))); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index f003170a7551d..3bac4f1c4b5c0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -436,6 +436,15 @@ public final void testSimpleWithNulls() { // TODO replace this with nulls insert assertSimpleWithNulls(data, block, i); } } + + // Note: the null-in-fast-null-out handling prevents any exception from being thrown, so the warnings provided in some test + // cases won't actually be registered. This isn't an issue for unary functions, but could be an issue for n-ary ones, if + // function processing of the first parameter(s) could raise an exception/warning. (But hasn't been the case so far.) + // For n-ary functions, dealing with one multivalue (before hitting the null parameter injected above) will now trigger + // a warning ("SV-function encountered a MV") that thus needs to be checked. + if (simpleData.stream().anyMatch(List.class::isInstance) && testCase.getExpectedWarnings() != null) { + assertWarnings(testCase.getExpectedWarnings()); + } } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java index 5ef485e8ba441..6bf816ce4c734 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java @@ -70,7 +70,8 @@ public static Iterable parameters() { "RoundDoubleEvaluator[val=Attribute[channel=0], decimals=CastIntToLongEvaluator[v=Attribute[channel=1]]]", DataTypes.DOUBLE, is(nullValue()) - ); + ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + .withWarning("Line -1:-1: java.lang.IllegalArgumentException: single-value function encountered multi-value"); }))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java index 0189d29e5e717..cc52a965e70b6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java @@ -179,7 +179,8 @@ public static Iterable parameters() { "AddIntsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", DataTypes.INTEGER, is(nullValue()) - ); + ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + .withWarning("Line -1:-1: java.lang.IllegalArgumentException: single-value function encountered multi-value"); }))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java index e45a86375fd4e..db924d0d68c53 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java @@ -155,7 +155,8 @@ public static Iterable parameters() { "SubIntsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", DataTypes.INTEGER, is(nullValue()) - ); + ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + .withWarning("Line -1:-1: java.lang.IllegalArgumentException: single-value function encountered multi-value"); }))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 8b185e013a8a5..12b8185cbec5d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -38,6 +38,7 @@ import org.elasticsearch.xpack.esql.planner.Mapper; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; +import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; import org.elasticsearch.xpack.esql.stats.Metrics; import org.elasticsearch.xpack.esql.stats.SearchStats; @@ -47,6 +48,7 @@ import org.elasticsearch.xpack.ql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.ql.index.EsIndex; import org.elasticsearch.xpack.ql.index.IndexResolution; +import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.type.EsField; import org.junit.Before; @@ -257,7 +259,8 @@ public void testCountOneFieldWithFilter() { assertThat(Expressions.names(esStatsQuery.output()), contains("count", "seen")); var stat = as(esStatsQuery.stats().get(0), Stat.class); assertThat(stat.query(), is(QueryBuilders.existsQuery("salary"))); - var expected = wrapWithSingleQuery(QueryBuilders.rangeQuery("salary").gt(1000), "salary"); + var source = ((SingleValueQuery.Builder) esStatsQuery.query()).source(); + var expected = wrapWithSingleQuery(QueryBuilders.rangeQuery("salary").gt(1000), "salary", source); assertThat(expected.toString(), is(esStatsQuery.query().toString())); } @@ -297,7 +300,8 @@ public void testAnotherCountAllWithFilter() { var esStatsQuery = as(exchange.child(), EsStatsQueryExec.class); assertThat(esStatsQuery.limit(), is(nullValue())); assertThat(Expressions.names(esStatsQuery.output()), contains("count", "seen")); - var expected = wrapWithSingleQuery(QueryBuilders.rangeQuery("emp_no").gt(10010), "emp_no"); + var source = ((SingleValueQuery.Builder) esStatsQuery.query()).source(); + var expected = wrapWithSingleQuery(QueryBuilders.rangeQuery("emp_no").gt(10010), "emp_no", source); assertThat(expected.toString(), is(esStatsQuery.query().toString())); } @@ -308,8 +312,8 @@ public void testAnotherCountAllWithFilter() { * \_AggregateExec[[],[COUNT([2a][KEYWORD]) AS c, COUNT(1[INTEGER]) AS c_literal],FINAL,null] * \_ExchangeExec[[count{r}#18, seen{r}#19, count{r}#20, seen{r}#21],true] * \_EsStatsQueryExec[test], stats[Stat[name=*, type=COUNT, query=null], Stat[name=*, type=COUNT, query=null]]], - * query[{"esql_single_value":{"field":"emp_no","next":{"range":{"emp_no":{"gt":10010,"boost":1.0}}}}}] - * [count{r}#23, seen{r}#24, count{r}#25, seen{r}#26], limit[], + * query[{"esql_single_value":{"field":"emp_no","next":{"range":{"emp_no":{"gt":10010,"boost":1.0}}}, + * "source":"emp_no > 10010@2:9"}}][count{r}#23, seen{r}#24, count{r}#25, seen{r}#26], limit[], */ public void testMultiCountAllWithFilter() { var plan = plan(""" @@ -331,7 +335,8 @@ public void testMultiCountAllWithFilter() { var esStatsQuery = as(exchange.child(), EsStatsQueryExec.class); assertThat(esStatsQuery.limit(), is(nullValue())); assertThat(Expressions.names(esStatsQuery.output()), contains("count", "seen", "count", "seen")); - var expected = wrapWithSingleQuery(QueryBuilders.rangeQuery("emp_no").gt(10010), "emp_no"); + var source = ((SingleValueQuery.Builder) esStatsQuery.query()).source(); + var expected = wrapWithSingleQuery(QueryBuilders.rangeQuery("emp_no").gt(10010), "emp_no", source); assertThat(expected.toString(), is(esStatsQuery.query().toString())); } @@ -376,8 +381,8 @@ public boolean exists(String field) { assertThat(Expressions.names(localSource.output()), contains("count", "seen")); } - private QueryBuilder wrapWithSingleQuery(QueryBuilder inner, String fieldName) { - return FilterTests.singleValueQuery(inner, fieldName); + private QueryBuilder wrapWithSingleQuery(QueryBuilder inner, String fieldName, Source source) { + return FilterTests.singleValueQuery(inner, fieldName, source); } private Stat queryStatsFor(PhysicalPlan plan) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java index 774ac24d3cd02..926f9dd27f84f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.EsqlTestUtils; @@ -31,6 +32,7 @@ import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; import org.elasticsearch.xpack.ql.index.EsIndex; import org.elasticsearch.xpack.ql.index.IndexResolution; +import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.EsField; import org.elasticsearch.xpack.ql.util.Queries; import org.junit.BeforeClass; @@ -49,12 +51,13 @@ import static org.elasticsearch.xpack.ql.util.Queries.Clause.FILTER; import static org.elasticsearch.xpack.ql.util.Queries.Clause.MUST; import static org.elasticsearch.xpack.ql.util.Queries.Clause.SHOULD; +import static org.elasticsearch.xpack.ql.util.SourceUtils.writeSource; import static org.hamcrest.Matchers.nullValue; public class FilterTests extends ESTestCase { // use a field that already exists in the mapping - private static final String AT_TIMESTAMP = "emp_no"; + private static final String EMP_NO = "emp_no"; private static final String OTHER_FIELD = "salary"; private static EsqlParser parser; @@ -82,7 +85,7 @@ public static void init() { } public void testTimestampRequestFilterNoQueryFilter() { - var restFilter = restFilterQuery(AT_TIMESTAMP); + var restFilter = restFilterQuery(EMP_NO); var plan = plan(LoggerMessageFormat.format(null, """ FROM test @@ -99,24 +102,29 @@ public void testTimestampNoRequestFilterQueryFilter() { var plan = plan(LoggerMessageFormat.format(null, """ FROM test |WHERE {} > {} - """, AT_TIMESTAMP, value), null); + """, EMP_NO, value), null); var filter = filterQueryForTransportNodes(plan); - var expected = singleValueQuery(rangeQuery(AT_TIMESTAMP).gt(value), AT_TIMESTAMP); + var expected = singleValueQuery(rangeQuery(EMP_NO).gt(value), EMP_NO, ((SingleValueQuery.Builder) filter).source()); assertEquals(expected.toString(), filter.toString()); } public void testTimestampRequestFilterQueryFilter() { var value = 10; - var restFilter = restFilterQuery(AT_TIMESTAMP); + var restFilter = restFilterQuery(EMP_NO); var plan = plan(LoggerMessageFormat.format(null, """ FROM test |WHERE {} > 10 - """, AT_TIMESTAMP, value), restFilter); + """, EMP_NO, value), restFilter); var filter = filterQueryForTransportNodes(plan); - var queryFilter = singleValueQuery(rangeQuery(AT_TIMESTAMP).gt(value).includeUpper(false), AT_TIMESTAMP); + var builder = ((BoolQueryBuilder) filter).filter().get(1); + var queryFilter = singleValueQuery( + rangeQuery(EMP_NO).gt(value).includeUpper(false), + EMP_NO, + ((SingleValueQuery.Builder) builder).source() + ); var expected = Queries.combine(FILTER, asList(restFilter, queryFilter)); assertEquals(expected.toString(), filter.toString()); } @@ -124,16 +132,17 @@ public void testTimestampRequestFilterQueryFilter() { public void testTimestampRequestFilterQueryFilterWithConjunction() { var lowValue = 10; var highValue = 100; - var restFilter = restFilterQuery(AT_TIMESTAMP); + var restFilter = restFilterQuery(EMP_NO); var plan = plan(LoggerMessageFormat.format(null, """ FROM test |WHERE {} > {} AND {} < {} - """, AT_TIMESTAMP, lowValue, AT_TIMESTAMP, highValue), restFilter); + """, EMP_NO, lowValue, EMP_NO, highValue), restFilter); var filter = filterQueryForTransportNodes(plan); - var left = singleValueQuery(rangeQuery(AT_TIMESTAMP).gt(lowValue), AT_TIMESTAMP); - var right = singleValueQuery(rangeQuery(AT_TIMESTAMP).lt(highValue), AT_TIMESTAMP); + var musts = ((BoolQueryBuilder) ((BoolQueryBuilder) filter).filter().get(1)).must(); + var left = singleValueQuery(rangeQuery(EMP_NO).gt(lowValue), EMP_NO, ((SingleValueQuery.Builder) musts.get(0)).source()); + var right = singleValueQuery(rangeQuery(EMP_NO).lt(highValue), EMP_NO, ((SingleValueQuery.Builder) musts.get(1)).source()); var must = Queries.combine(MUST, asList(left, right)); var expected = Queries.combine(FILTER, asList(restFilter, must)); assertEquals(expected.toString(), filter.toString()); @@ -142,12 +151,12 @@ public void testTimestampRequestFilterQueryFilterWithConjunction() { public void testTimestampRequestFilterQueryFilterWithDisjunctionOnDifferentFields() { var lowValue = 10; var highValue = 100; - var restFilter = restFilterQuery(AT_TIMESTAMP); + var restFilter = restFilterQuery(EMP_NO); var plan = plan(LoggerMessageFormat.format(null, """ FROM test |WHERE {} > {} OR {} < {} - """, OTHER_FIELD, lowValue, AT_TIMESTAMP, highValue), restFilter); + """, OTHER_FIELD, lowValue, EMP_NO, highValue), restFilter); var filter = filterQueryForTransportNodes(plan); var expected = restFilter; @@ -157,16 +166,17 @@ public void testTimestampRequestFilterQueryFilterWithDisjunctionOnDifferentField public void testTimestampRequestFilterQueryFilterWithDisjunctionOnSameField() { var lowValue = 10; var highValue = 100; - var restFilter = restFilterQuery(AT_TIMESTAMP); + var restFilter = restFilterQuery(EMP_NO); var plan = plan(LoggerMessageFormat.format(null, """ FROM test |WHERE {} > {} OR {} < {} - """, AT_TIMESTAMP, lowValue, AT_TIMESTAMP, highValue), restFilter); + """, EMP_NO, lowValue, EMP_NO, highValue), restFilter); var filter = filterQueryForTransportNodes(plan); - var left = singleValueQuery(rangeQuery(AT_TIMESTAMP).gt(lowValue), AT_TIMESTAMP); - var right = singleValueQuery(rangeQuery(AT_TIMESTAMP).lt(highValue), AT_TIMESTAMP); + var shoulds = ((BoolQueryBuilder) ((BoolQueryBuilder) filter).filter().get(1)).should(); + var left = singleValueQuery(rangeQuery(EMP_NO).gt(lowValue), EMP_NO, ((SingleValueQuery.Builder) shoulds.get(0)).source()); + var right = singleValueQuery(rangeQuery(EMP_NO).lt(highValue), EMP_NO, ((SingleValueQuery.Builder) shoulds.get(1)).source()); var should = Queries.combine(SHOULD, asList(left, right)); var expected = Queries.combine(FILTER, asList(restFilter, should)); assertEquals(expected.toString(), filter.toString()); @@ -176,16 +186,17 @@ public void testTimestampRequestFilterQueryFilterWithMultiConjunction() { var lowValue = 10; var highValue = 100; var eqValue = 1234; - var restFilter = restFilterQuery(AT_TIMESTAMP); + var restFilter = restFilterQuery(EMP_NO); var plan = plan(LoggerMessageFormat.format(null, """ FROM test |WHERE {} > {} AND {} == {} AND {} < {} - """, AT_TIMESTAMP, lowValue, OTHER_FIELD, eqValue, AT_TIMESTAMP, highValue), restFilter); + """, EMP_NO, lowValue, OTHER_FIELD, eqValue, EMP_NO, highValue), restFilter); var filter = filterQueryForTransportNodes(plan); - var left = singleValueQuery(rangeQuery(AT_TIMESTAMP).gt(lowValue), AT_TIMESTAMP); - var right = singleValueQuery(rangeQuery(AT_TIMESTAMP).lt(highValue), AT_TIMESTAMP); + var musts = ((BoolQueryBuilder) ((BoolQueryBuilder) filter).filter().get(1)).must(); + var left = singleValueQuery(rangeQuery(EMP_NO).gt(lowValue), EMP_NO, ((SingleValueQuery.Builder) musts.get(0)).source()); + var right = singleValueQuery(rangeQuery(EMP_NO).lt(highValue), EMP_NO, ((SingleValueQuery.Builder) musts.get(1)).source()); var must = Queries.combine(MUST, asList(left, right)); var expected = Queries.combine(FILTER, asList(restFilter, must)); assertEquals(expected.toString(), filter.toString()); @@ -196,17 +207,18 @@ public void testTimestampRequestFilterQueryMultipleFilters() { var eqValue = 1234; var highValue = 100; - var restFilter = restFilterQuery(AT_TIMESTAMP); + var restFilter = restFilterQuery(EMP_NO); var plan = plan(LoggerMessageFormat.format(null, """ FROM test |WHERE {} > {} |EVAL {} = {} |WHERE {} > {} - """, AT_TIMESTAMP, lowValue, AT_TIMESTAMP, eqValue, AT_TIMESTAMP, highValue), restFilter); + """, EMP_NO, lowValue, EMP_NO, eqValue, EMP_NO, highValue), restFilter); var filter = filterQueryForTransportNodes(plan); - var queryFilter = singleValueQuery(rangeQuery(AT_TIMESTAMP).gt(lowValue), AT_TIMESTAMP); + var builder = ((BoolQueryBuilder) filter).filter().get(1); + var queryFilter = singleValueQuery(rangeQuery(EMP_NO).gt(lowValue), EMP_NO, ((SingleValueQuery.Builder) builder).source()); var expected = Queries.combine(FILTER, asList(restFilter, queryFilter)); assertEquals(expected.toString(), filter.toString()); } @@ -218,7 +230,7 @@ public void testTimestampOverriddenFilterFilter() { FROM test |EVAL {} = {} |WHERE {} > {} - """, AT_TIMESTAMP, OTHER_FIELD, AT_TIMESTAMP, eqValue), null); + """, EMP_NO, OTHER_FIELD, EMP_NO, eqValue), null); var filter = filterQueryForTransportNodes(plan); assertThat(filter, nullValue()); @@ -230,7 +242,7 @@ public void testTimestampAsFunctionArgument() { var plan = plan(LoggerMessageFormat.format(null, """ FROM test |WHERE to_int(to_string({})) == {} - """, AT_TIMESTAMP, eqValue), null); + """, EMP_NO, eqValue), null); var filter = filterQueryForTransportNodes(plan); assertThat(filter, nullValue()); @@ -242,7 +254,7 @@ public void testTimestampAsFunctionArgumentInsideExpression() { var plan = plan(LoggerMessageFormat.format(null, """ FROM test |WHERE to_int(to_string({})) + 987 == {} - """, AT_TIMESTAMP, eqValue), null); + """, EMP_NO, eqValue), null); var filter = filterQueryForTransportNodes(plan); assertThat(filter, nullValue()); @@ -252,13 +264,14 @@ public void testTimestampAsFunctionArgumentInsideExpression() { * Ugly hack to create a QueryBuilder for SingleValueQuery. * For some reason however the queryName is set to null on range queries when deserializing. */ - public static QueryBuilder singleValueQuery(QueryBuilder inner, String field) { + public static QueryBuilder singleValueQuery(QueryBuilder inner, String field, Source source) { try (BytesStreamOutput out = new BytesStreamOutput()) { // emulate SingleValueQuery writeTo out.writeFloat(AbstractQueryBuilder.DEFAULT_BOOST); out.writeOptionalString(null); out.writeNamedWriteable(inner); out.writeString(field); + writeSource(out, source); StreamInput in = new NamedWriteableAwareStreamInput( ByteBufferStreamInput.wrap(BytesReference.toBytes(out.bytes())), @@ -292,7 +305,7 @@ private QueryBuilder restFilterQuery(String field) { } private QueryBuilder filterQueryForTransportNodes(PhysicalPlan plan) { - return PlannerUtils.detectFilter(plan, AT_TIMESTAMP); + return PlannerUtils.detectFilter(plan, EMP_NO); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuerySerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuerySerializationTests.java index 06c9febec324a..63b674aad7a90 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuerySerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuerySerializationTests.java @@ -12,13 +12,14 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.ql.tree.Source; import java.util.List; public class SingleValueQuerySerializationTests extends AbstractWireSerializingTestCase { @Override protected SingleValueQuery.Builder createTestInstance() { - return new SingleValueQuery.Builder(randomQuery(), randomFieldName(), new SingleValueQuery.Stats()); + return new SingleValueQuery.Builder(randomQuery(), randomFieldName(), new SingleValueQuery.Stats(), Source.EMPTY); } private QueryBuilder randomQuery() { @@ -35,12 +36,14 @@ protected SingleValueQuery.Builder mutateInstance(SingleValueQuery.Builder insta case 0 -> new SingleValueQuery.Builder( randomValueOtherThan(instance.next(), this::randomQuery), instance.field(), - new SingleValueQuery.Stats() + new SingleValueQuery.Stats(), + Source.EMPTY ); case 1 -> new SingleValueQuery.Builder( instance.next(), randomValueOtherThan(instance.field(), this::randomFieldName), - new SingleValueQuery.Stats() + new SingleValueQuery.Stats(), + Source.EMPTY ); default -> throw new IllegalArgumentException(); }; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java index a6eacae2857e7..4322e5fbac2ef 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java @@ -74,44 +74,36 @@ public SingleValueQueryTests(Setup setup) { } public void testMatchAll() throws IOException { - testCase( - new SingleValueQuery(new MatchAll(Source.EMPTY), "foo").asBuilder(), - false, - false, - (fieldValues, count) -> assertThat(count, equalTo((int) fieldValues.stream().filter(l -> l.size() == 1).count())) - ); + testCase(new SingleValueQuery(new MatchAll(Source.EMPTY), "foo").asBuilder(), false, false, this::runCase); } public void testMatchSome() throws IOException { int max = between(1, 100); testCase( - new SingleValueQuery.Builder(new RangeQueryBuilder("i").lt(max), "foo", new SingleValueQuery.Stats()), + new SingleValueQuery.Builder(new RangeQueryBuilder("i").lt(max), "foo", new SingleValueQuery.Stats(), Source.EMPTY), false, false, - (fieldValues, count) -> { - int expected = 0; - for (int i = 0; i < max; i++) { - if (fieldValues.get(i).size() == 1) { - expected++; - } - } - assertThat(count, equalTo(expected)); - } + (fieldValues, count) -> runCase(fieldValues, count, null, max) ); } public void testSubPhrase() throws IOException { testCase( - new SingleValueQuery.Builder(new MatchPhraseQueryBuilder("str", "fox jumped"), "foo", new SingleValueQuery.Stats()), + new SingleValueQuery.Builder( + new MatchPhraseQueryBuilder("str", "fox jumped"), + "foo", + new SingleValueQuery.Stats(), + Source.EMPTY + ), false, true, - (fieldValues, count) -> assertThat(count, equalTo((int) fieldValues.stream().filter(l -> l.size() == 1).count())) + this::runCase ); } public void testMatchNone() throws IOException { testCase( - new SingleValueQuery.Builder(new MatchNoneQueryBuilder(), "foo", new SingleValueQuery.Stats()), + new SingleValueQuery.Builder(new MatchNoneQueryBuilder(), "foo", new SingleValueQuery.Stats(), Source.EMPTY), true, false, (fieldValues, count) -> assertThat(count, equalTo(0)) @@ -120,7 +112,7 @@ public void testMatchNone() throws IOException { public void testRewritesToMatchNone() throws IOException { testCase( - new SingleValueQuery.Builder(new TermQueryBuilder("missing", 0), "foo", new SingleValueQuery.Stats()), + new SingleValueQuery.Builder(new TermQueryBuilder("missing", 0), "foo", new SingleValueQuery.Stats(), Source.EMPTY), true, false, (fieldValues, count) -> assertThat(count, equalTo(0)) @@ -141,7 +133,7 @@ public void testNotMatchNone() throws IOException { new SingleValueQuery(new MatchAll(Source.EMPTY).negate(Source.EMPTY), "foo").negate(Source.EMPTY).asBuilder(), false, false, - (fieldValues, count) -> assertThat(count, equalTo((int) fieldValues.stream().filter(l -> l.size() == 1).count())) + this::runCase ); } @@ -151,15 +143,7 @@ public void testNotMatchSome() throws IOException { new SingleValueQuery(new RangeQuery(Source.EMPTY, "i", null, false, max, false, null), "foo").negate(Source.EMPTY).asBuilder(), false, true, - (fieldValues, count) -> { - int expected = 0; - for (int i = max; i < 100; i++) { - if (fieldValues.get(i).size() == 1) { - expected++; - } - } - assertThat(count, equalTo(expected)); - } + (fieldValues, count) -> runCase(fieldValues, count, max, 100) ); } @@ -168,6 +152,30 @@ interface TestCase { void run(List> fieldValues, int count) throws IOException; } + private void runCase(List> fieldValues, int count, Integer docsStart, Integer docsStop) { + int expected = 0; + int min = docsStart != null ? docsStart : 0; + int max = docsStop != null ? docsStop : fieldValues.size(); + for (int i = min; i < max; i++) { + if (fieldValues.get(i).size() == 1) { + expected++; + } + } + assertThat(count, equalTo(expected)); + + // query's count runs against the full set, not just min-to-max + if (fieldValues.stream().anyMatch(x -> x.size() > 1)) { + assertWarnings( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.IllegalArgumentException: single-value function encountered multi-value" + ); + } + } + + private void runCase(List> fieldValues, int count) { + runCase(fieldValues, count, null, null); + } + private void testCase(SingleValueQuery.Builder builder, boolean rewritesToMatchNone, boolean subHasTwoPhase, TestCase testCase) throws IOException { MapperService mapper = createMapperService(mapping(setup::mapping)); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/tree/Source.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/tree/Source.java index ff4fbb7a9d9b0..cf71549925eda 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/tree/Source.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/tree/Source.java @@ -25,6 +25,7 @@ public Source(Location location, String text) { this.text = text; } + // TODO: rename to location() public Location source() { return location; } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SourceUtils.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SourceUtils.java new file mode 100644 index 0000000000000..afba73373df92 --- /dev/null +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SourceUtils.java @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ql.util; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xpack.ql.QlIllegalArgumentException; +import org.elasticsearch.xpack.ql.tree.Location; +import org.elasticsearch.xpack.ql.tree.Source; + +import java.io.IOException; + +public final class SourceUtils { + + private SourceUtils() {} + + public static void writeSource(StreamOutput out, Source source) throws IOException { + writeSource(out, source, true); + } + + public static void writeSourceNoText(StreamOutput out, Source source) throws IOException { + writeSource(out, source, false); + } + + public static Source readSource(StreamInput in) throws IOException { + return readSource(in, null); + } + + public static Source readSourceWithText(StreamInput in, String queryText) throws IOException { + return readSource(in, queryText); + } + + private static void writeSource(StreamOutput out, Source source, boolean writeText) throws IOException { + out.writeInt(source.source().getLineNumber()); + out.writeInt(source.source().getColumnNumber()); + if (writeText) { + out.writeString(source.text()); + } else { + out.writeInt(source.text().length()); + } + } + + private static Source readSource(StreamInput in, @Nullable String queryText) throws IOException { + int line = in.readInt(); + int column = in.readInt(); + int charPositionInLine = column - 1; + + String text; + if (queryText == null) { + text = in.readString(); + } else { + int length = in.readInt(); + text = sourceText(queryText, line, column, length); + } + return new Source(new Location(line, charPositionInLine), text); + } + + private static String sourceText(String query, int line, int column, int length) { + if (line <= 0 || column <= 0 || query.isEmpty()) { + return StringUtils.EMPTY; + } + int offset = textOffset(query, line, column); + if (offset + length > query.length()) { + throw new QlIllegalArgumentException( + "location [@" + line + ":" + column + "] and length [" + length + "] overrun query size [" + query.length() + "]" + ); + } + return query.substring(offset, offset + length); + } + + private static int textOffset(String query, int line, int column) { + int offset = 0; + if (line > 1) { + String[] lines = query.split("\n"); + if (line > lines.length) { + throw new QlIllegalArgumentException( + "line location [" + line + "] higher than max [" + lines.length + "] in query [" + query + "]" + ); + } + for (int i = 0; i < line - 1; i++) { + offset += lines[i].length() + 1; // +1 accounts for the removed \n + } + } + offset += column - 1; // -1 since column is 1-based indexed + return offset; + } +} diff --git a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java index 240df6ecb0227..7ddd660645a7c 100644 --- a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java +++ b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java @@ -13,6 +13,7 @@ import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; +import static org.junit.Assert.assertTrue; public final class CsvSpecReader { @@ -84,8 +85,40 @@ public static class CsvTestCase { public String query; public String earlySchema; public String expectedResults; - public List expectedWarnings = new ArrayList<>(); + private final List expectedWarnings = new ArrayList<>(); public boolean ignoreOrder; + + // The emulated-specific warnings must always trail the non-emulated ones, if these are present. Otherwise, the closing bracket + // would need to be changed to a less common sequence (like `]#` maybe). + private static final String EMULATED_PREFIX = "#[emulated:"; + + /** + * Returns the warning headers expected to be added by the test. To declare such a header, use the `warning:definition` format + * in the CSV test declaration. The `definition` can use the `EMULATED_PREFIX` string to specify the format of the warning run on + * emulated physical operators, if this differs from the format returned by SingleValueQuery. + * @param forEmulated if true, the tests are run on emulated physical operators; if false, the test case is for queries executed + * on a "full stack" ESQL, having data loaded from Lucene. + * @return the list of headers that are expected to be returned part of the response. + */ + public List expectedWarnings(boolean forEmulated) { + List warnings = new ArrayList<>(expectedWarnings.size()); + for (String warning : expectedWarnings) { + int idx = warning.toLowerCase(Locale.ROOT).indexOf(EMULATED_PREFIX); + if (idx >= 0) { + assertTrue("Invalid warning spec: closing delimiter (]) missing: `" + warning + "`", warning.endsWith("]")); + if (forEmulated) { + if (idx + EMULATED_PREFIX.length() < warning.length() - 1) { + warnings.add(warning.substring(idx + EMULATED_PREFIX.length(), warning.length() - 1)); + } + } else if (idx > 0) { + warnings.add(warning.substring(0, idx)); + } // else: no warnings expected for non-emulated + } else { + warnings.add(warning); + } + } + return warnings; + } } } From 1a8d80321954cb325ef9a33ac58cb7eb919c2deb Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Tue, 5 Dec 2023 12:10:35 +0100 Subject: [PATCH 154/181] [Connector API] Implement update name/description action (#102825) --- .../api/connector.update_name.json | 39 ++++ .../entsearch/336_connector_update_name.yml | 81 +++++++ .../xpack/application/EnterpriseSearch.java | 5 + .../application/connector/Connector.java | 30 +-- .../connector/ConnectorIndexService.java | 71 ++++-- .../action/RestUpdateConnectorNameAction.java | 45 ++++ .../TransportUpdateConnectorNameAction.java | 55 +++++ .../action/UpdateConnectorNameAction.java | 209 ++++++++++++++++++ .../connector/ConnectorIndexServiceTests.java | 46 ++++ ...rNameActionRequestBWCSerializingTests.java | 50 +++++ ...NameActionResponseBWCSerializingTests.java | 42 ++++ .../xpack/security/operator/Constants.java | 1 + 12 files changed, 639 insertions(+), 35 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_name.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/336_connector_update_name.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorNameAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorNameAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameActionResponseBWCSerializingTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_name.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_name.json new file mode 100644 index 0000000000000..e42d9b5766b0a --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_name.json @@ -0,0 +1,39 @@ +{ + "connector.update_name": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Updates the name and/or description fields in the connector document." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/{connector_id}/_name", + "methods": [ + "PUT" + ], + "parts": { + "connector_id": { + "type": "string", + "description": "The unique identifier of the connector to be updated." + } + } + } + ] + }, + "body": { + "description": "An object containing the connector's name and/or description.", + "required": true + } + } +} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/336_connector_update_name.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/336_connector_update_name.yml new file mode 100644 index 0000000000000..6fe025b4ae002 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/336_connector_update_name.yml @@ -0,0 +1,81 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + + - do: + connector.put: + connector_id: test-connector + body: + index_name: search-1-test + name: my-connector + language: pl + is_native: false + service_type: super-connector + +--- +"Update Connector Name": + - do: + connector.update_name: + connector_id: test-connector + body: + name: test-name + + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { name: test-name } + +--- +"Update Connector Name and Description": + - do: + connector.update_name: + connector_id: test-connector + body: + name: test-name + description: test-description + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { name: test-name } + - match: { description: test-description } + +--- +"Update Connector Scheduling - 404 when connector doesn't exist": + - do: + catch: "missing" + connector.update_name: + connector_id: test-non-existent-connector + body: + name: test-name + description: test-description + +--- +"Update Connector Scheduling - 400 status code when connector_id is empty": + - do: + catch: "bad_request" + connector.update_name: + connector_id: "" + body: + name: test-name + description: test-description + +--- +"Update Connector Scheduling - 400 status code when payload is not string": + - do: + catch: "bad_request" + connector.update_name: + connector_id: test-connector + body: + name: + field_1: test + field_2: something + description: test-description diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index ce9bbfa4d6a4b..12bd2f4a25bdd 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -55,6 +55,7 @@ import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorLastSyncStatsAction; +import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorNameAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.action.TransportDeleteConnectorAction; @@ -66,6 +67,7 @@ import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorLastSyncStatsAction; +import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorNameAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorConfigurationAction; @@ -73,6 +75,7 @@ import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSyncStatsAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorNameAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.syncjob.action.CancelConnectorSyncJobAction; @@ -221,6 +224,7 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(UpdateConnectorFilteringAction.INSTANCE, TransportUpdateConnectorFilteringAction.class), new ActionHandler<>(UpdateConnectorLastSeenAction.INSTANCE, TransportUpdateConnectorLastSeenAction.class), new ActionHandler<>(UpdateConnectorLastSyncStatsAction.INSTANCE, TransportUpdateConnectorLastSyncStatsAction.class), + new ActionHandler<>(UpdateConnectorNameAction.INSTANCE, TransportUpdateConnectorNameAction.class), new ActionHandler<>(UpdateConnectorPipelineAction.INSTANCE, TransportUpdateConnectorPipelineAction.class), new ActionHandler<>(UpdateConnectorSchedulingAction.INSTANCE, TransportUpdateConnectorSchedulingAction.class), @@ -295,6 +299,7 @@ public List getRestHandlers( new RestUpdateConnectorFilteringAction(), new RestUpdateConnectorLastSeenAction(), new RestUpdateConnectorLastSyncStatsAction(), + new RestUpdateConnectorNameAction(), new RestUpdateConnectorPipelineAction(), new RestUpdateConnectorSchedulingAction(), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java index 73d066f64d197..8c0c150ea88af 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java @@ -199,7 +199,7 @@ public Connector(StreamInput in) throws IOException { static final ParseField API_KEY_ID_FIELD = new ParseField("api_key_id"); public static final ParseField CONFIGURATION_FIELD = new ParseField("configuration"); static final ParseField CUSTOM_SCHEDULING_FIELD = new ParseField("custom_scheduling"); - static final ParseField DESCRIPTION_FIELD = new ParseField("description"); + public static final ParseField DESCRIPTION_FIELD = new ParseField("description"); public static final ParseField ERROR_FIELD = new ParseField("error"); static final ParseField FEATURES_FIELD = new ParseField("features"); public static final ParseField FILTERING_FIELD = new ParseField("filtering"); @@ -461,6 +461,10 @@ public String getApiKeyId() { return apiKeyId; } + public Map getConfiguration() { + return configuration; + } + public Map getCustomScheduling() { return customScheduling; } @@ -493,6 +497,14 @@ public String getLanguage() { return language; } + public Instant getLastSeen() { + return lastSeen; + } + + public ConnectorSyncInfo getSyncInfo() { + return syncInfo; + } + public String getName() { return name; } @@ -509,8 +521,8 @@ public String getServiceType() { return serviceType; } - public Map getConfiguration() { - return configuration; + public ConnectorStatus getStatus() { + return status; } public Object getSyncCursor() { @@ -521,18 +533,6 @@ public boolean isSyncNow() { return syncNow; } - public ConnectorSyncInfo getSyncInfo() { - return syncInfo; - } - - public Instant getLastSeen() { - return lastSeen; - } - - public ConnectorStatus getStatus() { - return status; - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index 624697edfcd85..4f4e9d234c813 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -36,6 +36,7 @@ import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSyncStatsAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorNameAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; @@ -214,12 +215,12 @@ public void updateConnectorConfiguration(UpdateConnectorConfigurationAction.Requ } /** - * Updates the {@link ConnectorFiltering} property of a {@link Connector}. + * Updates the error property of a {@link Connector}. * - * @param request Request for updating connector filtering property. - * @param listener Listener to respond to a successful response or an error. + * @param request The request for updating the connector's error. + * @param listener The listener for handling responses, including successful updates or errors. */ - public void updateConnectorFiltering(UpdateConnectorFilteringAction.Request request, ActionListener listener) { + public void updateConnectorError(UpdateConnectorErrorAction.Request request, ActionListener listener) { try { String connectorId = request.getConnectorId(); final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( @@ -244,12 +245,12 @@ public void updateConnectorFiltering(UpdateConnectorFilteringAction.Request requ } /** - * Updates the lastSeen property of a {@link Connector}. + * Updates the name and/or description property of a {@link Connector}. * - * @param request The request for updating the connector's lastSeen status. + * @param request The request for updating the connector's name and/or description. * @param listener The listener for handling responses, including successful updates or errors. */ - public void updateConnectorLastSeen(UpdateConnectorLastSeenAction.Request request, ActionListener listener) { + public void updateConnectorNameOrDescription(UpdateConnectorNameAction.Request request, ActionListener listener) { try { String connectorId = request.getConnectorId(); final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( @@ -274,12 +275,12 @@ public void updateConnectorLastSeen(UpdateConnectorLastSeenAction.Request reques } /** - * Updates the {@link ConnectorSyncInfo} properties in a {@link Connector}. + * Updates the {@link ConnectorFiltering} property of a {@link Connector}. * - * @param request Request for updating connector last sync stats properties. + * @param request Request for updating connector filtering property. * @param listener Listener to respond to a successful response or an error. */ - public void updateConnectorLastSyncStats(UpdateConnectorLastSyncStatsAction.Request request, ActionListener listener) { + public void updateConnectorFiltering(UpdateConnectorFilteringAction.Request request, ActionListener listener) { try { String connectorId = request.getConnectorId(); final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( @@ -304,12 +305,42 @@ public void updateConnectorLastSyncStats(UpdateConnectorLastSyncStatsAction.Requ } /** - * Updates the {@link ConnectorIngestPipeline} property of a {@link Connector}. + * Updates the lastSeen property of a {@link Connector}. * - * @param request Request for updating connector ingest pipeline property. + * @param request The request for updating the connector's lastSeen status. + * @param listener The listener for handling responses, including successful updates or errors. + */ + public void updateConnectorLastSeen(UpdateConnectorLastSeenAction.Request request, ActionListener listener) { + try { + String connectorId = request.getConnectorId(); + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( + new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) + .id(connectorId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(request.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)) + ); + clientWithOrigin.update( + updateRequest, + new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { + if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { + l.onFailure(new ResourceNotFoundException(connectorId)); + return; + } + l.onResponse(updateResponse); + }) + ); + } catch (Exception e) { + listener.onFailure(e); + } + } + + /** + * Updates the {@link ConnectorSyncInfo} properties in a {@link Connector}. + * + * @param request Request for updating connector last sync stats properties. * @param listener Listener to respond to a successful response or an error. */ - public void updateConnectorPipeline(UpdateConnectorPipelineAction.Request request, ActionListener listener) { + public void updateConnectorLastSyncStats(UpdateConnectorLastSyncStatsAction.Request request, ActionListener listener) { try { String connectorId = request.getConnectorId(); final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( @@ -334,12 +365,12 @@ public void updateConnectorPipeline(UpdateConnectorPipelineAction.Request reques } /** - * Updates the {@link ConnectorScheduling} property of a {@link Connector}. + * Updates the {@link ConnectorIngestPipeline} property of a {@link Connector}. * - * @param request The request for updating the connector's scheduling. - * @param listener The listener for handling responses, including successful updates or errors. + * @param request Request for updating connector ingest pipeline property. + * @param listener Listener to respond to a successful response or an error. */ - public void updateConnectorScheduling(UpdateConnectorSchedulingAction.Request request, ActionListener listener) { + public void updateConnectorPipeline(UpdateConnectorPipelineAction.Request request, ActionListener listener) { try { String connectorId = request.getConnectorId(); final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( @@ -364,12 +395,12 @@ public void updateConnectorScheduling(UpdateConnectorSchedulingAction.Request re } /** - * Updates the error property of a {@link Connector}. + * Updates the {@link ConnectorScheduling} property of a {@link Connector}. * - * @param request The request for updating the connector's error. + * @param request The request for updating the connector's scheduling. * @param listener The listener for handling responses, including successful updates or errors. */ - public void updateConnectorError(UpdateConnectorErrorAction.Request request, ActionListener listener) { + public void updateConnectorScheduling(UpdateConnectorSchedulingAction.Request request, ActionListener listener) { try { String connectorId = request.getConnectorId(); final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorNameAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorNameAction.java new file mode 100644 index 0000000000000..54ce2c9af79e8 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorNameAction.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.PUT; + +public class RestUpdateConnectorNameAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_update_name_action"; + } + + @Override + public List routes() { + return List.of(new Route(PUT, "/" + EnterpriseSearch.CONNECTOR_API_ENDPOINT + "/{connector_id}/_name")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + UpdateConnectorNameAction.Request request = UpdateConnectorNameAction.Request.fromXContentBytes( + restRequest.param("connector_id"), + restRequest.content(), + restRequest.getXContentType() + ); + return channel -> client.execute( + UpdateConnectorNameAction.INSTANCE, + request, + new RestToXContentListener<>(channel, UpdateConnectorNameAction.Response::status, r -> null) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorNameAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorNameAction.java new file mode 100644 index 0000000000000..252734aab1c51 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorNameAction.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.ConnectorIndexService; + +public class TransportUpdateConnectorNameAction extends HandledTransportAction< + UpdateConnectorNameAction.Request, + UpdateConnectorNameAction.Response> { + + protected final ConnectorIndexService connectorIndexService; + + @Inject + public TransportUpdateConnectorNameAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + UpdateConnectorNameAction.NAME, + transportService, + actionFilters, + UpdateConnectorNameAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorIndexService = new ConnectorIndexService(client); + } + + @Override + protected void doExecute( + Task task, + UpdateConnectorNameAction.Request request, + ActionListener listener + ) { + connectorIndexService.updateConnectorNameOrDescription( + request, + listener.map(r -> new UpdateConnectorNameAction.Response(r.getResult())) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameAction.java new file mode 100644 index 0000000000000..1db9bbe3aad9d --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameAction.java @@ -0,0 +1,209 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.Connector; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class UpdateConnectorNameAction extends ActionType { + + public static final UpdateConnectorNameAction INSTANCE = new UpdateConnectorNameAction(); + public static final String NAME = "cluster:admin/xpack/connector/update_name"; + + public UpdateConnectorNameAction() { + super(NAME, UpdateConnectorNameAction.Response::new); + } + + public static class Request extends ActionRequest implements ToXContentObject { + + private final String connectorId; + + @Nullable + private final String name; + + @Nullable + private final String description; + + public Request(String connectorId, String name, String description) { + this.connectorId = connectorId; + this.name = name; + this.description = description; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorId = in.readString(); + this.name = in.readOptionalString(); + this.description = in.readOptionalString(); + } + + public String getConnectorId() { + return connectorId; + } + + public String getName() { + return name; + } + + @Override + public String getDescription() { + return description; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorId)) { + validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + } + if (Strings.isNullOrEmpty(name)) { + validationException = addValidationError("[name] cannot be null or empty.", validationException); + } + + return validationException; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_update_name_request", + false, + ((args, connectorId) -> new UpdateConnectorNameAction.Request(connectorId, (String) args[0], (String) args[1])) + ); + + static { + PARSER.declareStringOrNull(constructorArg(), Connector.NAME_FIELD); + PARSER.declareStringOrNull(optionalConstructorArg(), Connector.DESCRIPTION_FIELD); + } + + public static UpdateConnectorNameAction.Request fromXContentBytes( + String connectorId, + BytesReference source, + XContentType xContentType + ) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return UpdateConnectorNameAction.Request.fromXContent(parser, connectorId); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse: " + source.utf8ToString(), e); + } + } + + public static UpdateConnectorNameAction.Request fromXContent(XContentParser parser, String connectorId) throws IOException { + return PARSER.parse(parser, connectorId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + if (name != null) { + builder.field(Connector.NAME_FIELD.getPreferredName(), name); + } + if (description != null) { + builder.field(Connector.DESCRIPTION_FIELD.getPreferredName(), description); + } + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorId); + out.writeOptionalString(name); + out.writeOptionalString(description); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorId, request.connectorId) + && Objects.equals(name, request.name) + && Objects.equals(description, request.description); + } + + @Override + public int hashCode() { + return Objects.hash(connectorId, name, description); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + final DocWriteResponse.Result result; + + public Response(StreamInput in) throws IOException { + super(in); + result = DocWriteResponse.Result.readFrom(in); + } + + public Response(DocWriteResponse.Result result) { + this.result = result; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + this.result.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("result", this.result.getLowercase()); + builder.endObject(); + return builder; + } + + public RestStatus status() { + return switch (result) { + case NOT_FOUND -> RestStatus.NOT_FOUND; + default -> RestStatus.OK; + }; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response that = (Response) o; + return Objects.equals(result, that.result); + } + + @Override + public int hashCode() { + return Objects.hash(result); + } + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index ffa532012d982..1960d14faeda2 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSyncStatsAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorNameAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; import org.junit.Before; @@ -215,6 +216,25 @@ public void testUpdateConnectorError() throws Exception { assertThat(updateErrorRequest.getError(), equalTo(indexedConnector.getError())); } + public void testUpdateConnectorNameOrDescription() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + DocWriteResponse resp = awaitPutConnector(connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + UpdateConnectorNameAction.Request updateNameDescriptionRequest = new UpdateConnectorNameAction.Request( + connector.getConnectorId(), + randomAlphaOfLengthBetween(5, 15), + randomAlphaOfLengthBetween(5, 15) + ); + + DocWriteResponse updateResponse = awaitUpdateConnectorName(updateNameDescriptionRequest); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + + Connector indexedConnector = awaitGetConnector(connector.getConnectorId()); + assertThat(updateNameDescriptionRequest.getName(), equalTo(indexedConnector.getName())); + assertThat(updateNameDescriptionRequest.getDescription(), equalTo(indexedConnector.getDescription())); + } + private DeleteResponse awaitDeleteConnector(String connectorId) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); @@ -469,6 +489,31 @@ public void onFailure(Exception e) { return resp.get(); } + private UpdateResponse awaitUpdateConnectorName(UpdateConnectorNameAction.Request updatedNameOrDescription) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorIndexService.updateConnectorNameOrDescription(updatedNameOrDescription, new ActionListener<>() { + @Override + public void onResponse(UpdateResponse indexResponse) { + resp.set(indexResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for update name request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from update name request", resp.get()); + return resp.get(); + } + private UpdateResponse awaitUpdateConnectorError(UpdateConnectorErrorAction.Request updatedError) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); @@ -493,4 +538,5 @@ public void onFailure(Exception e) { assertNotNull("Received null response from update error request", resp.get()); return resp.get(); } + } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..7ee377a7933bf --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameActionRequestBWCSerializingTests.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorNameActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase< + UpdateConnectorNameAction.Request> { + + private String connectorId; + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorNameAction.Request::new; + } + + @Override + protected UpdateConnectorNameAction.Request createTestInstance() { + this.connectorId = randomUUID(); + return new UpdateConnectorNameAction.Request(connectorId, randomAlphaOfLengthBetween(5, 15), randomAlphaOfLengthBetween(5, 15)); + } + + @Override + protected UpdateConnectorNameAction.Request mutateInstance(UpdateConnectorNameAction.Request instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorNameAction.Request doParseInstance(XContentParser parser) throws IOException { + return UpdateConnectorNameAction.Request.fromXContent(parser, this.connectorId); + } + + @Override + protected UpdateConnectorNameAction.Request mutateInstanceForVersion( + UpdateConnectorNameAction.Request instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameActionResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..2297ccb565b5e --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameActionResponseBWCSerializingTests.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorNameActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< + UpdateConnectorNameAction.Response> { + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorNameAction.Response::new; + } + + @Override + protected UpdateConnectorNameAction.Response createTestInstance() { + return new UpdateConnectorNameAction.Response(randomFrom(DocWriteResponse.Result.values())); + } + + @Override + protected UpdateConnectorNameAction.Response mutateInstance(UpdateConnectorNameAction.Response instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorNameAction.Response mutateInstanceForVersion( + UpdateConnectorNameAction.Response instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 5c4fd44d77c9b..a432f28f71e54 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -132,6 +132,7 @@ public class Constants { "cluster:admin/xpack/connector/update_filtering", "cluster:admin/xpack/connector/update_last_seen", "cluster:admin/xpack/connector/update_last_sync_stats", + "cluster:admin/xpack/connector/update_name", "cluster:admin/xpack/connector/update_pipeline", "cluster:admin/xpack/connector/update_scheduling", "cluster:admin/xpack/connector/sync_job/cancel", From 96fcf04a121041ae2e69d44c4222e4a84354e60f Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 5 Dec 2023 11:46:49 +0000 Subject: [PATCH 155/181] AwaitsFix for #102974 --- .../search/aggregations/bucket/nested/NestedAggregatorTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index fd848895e25f6..b71e10c34eef8 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -502,6 +502,7 @@ public void testNestedOrdering() throws IOException { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102974") public void testNestedOrdering_random() throws IOException { int numBooks = randomIntBetween(32, 512); List> books = new ArrayList<>(); From 2502af81980c8156f07f230d12549c7fcb3df7e5 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Tue, 5 Dec 2023 12:52:04 +0100 Subject: [PATCH 156/181] x-pack:apm-data module should mention it is for APM Server (#102866) apm-data module should be more explicit that it is for the apm server usage. It is confusing when starting up ES to see APM is disabled log line. Especially since we also have :modules:apm which is meant to be for sending apm metrics and traces This commit rephrases the log messages and rename APMPlugin class to mention APM Server. --- .../src/main/java/org/elasticsearch/telemetry/apm/APM.java | 5 +++++ x-pack/plugin/apm-data/README.md | 6 ++++-- .../xpack/apmdata/APMIndexTemplateRegistry.java | 4 ++-- .../java/org/elasticsearch/xpack/apmdata/APMPlugin.java | 2 +- 4 files changed, 12 insertions(+), 5 deletions(-) diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java index bd751f95b2eef..979815f497583 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java @@ -8,6 +8,8 @@ package org.elasticsearch.telemetry.apm; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.lucene.util.SetOnce; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -44,6 +46,7 @@ * and applies the new settings values, provided those settings can be dynamically updated. */ public class APM extends Plugin implements NetworkPlugin, TelemetryPlugin { + private static final Logger logger = LogManager.getLogger(APM.class); private final SetOnce telemetryProvider = new SetOnce<>(); private final Settings settings; @@ -69,6 +72,8 @@ public Collection createComponents(PluginServices services) { apmAgentSettings.syncAgentSystemProperties(settings); final APMMeterService apmMeter = new APMMeterService(settings); apmAgentSettings.addClusterSettingsListeners(services.clusterService(), telemetryProvider.get(), apmMeter); + logger.info("Sending apm metrics is {}", APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.get(settings) ? "enabled" : "disabled"); + logger.info("Sending apm traces is {}", APMAgentSettings.APM_ENABLED_SETTING.get(settings) ? "enabled" : "disabled"); return List.of(apmTracer, apmMeter); } diff --git a/x-pack/plugin/apm-data/README.md b/x-pack/plugin/apm-data/README.md index c4a0d97cb09c0..10892d767b536 100644 --- a/x-pack/plugin/apm-data/README.md +++ b/x-pack/plugin/apm-data/README.md @@ -1,6 +1,8 @@ -## APM Data plugin +NOTE: this plugin is not related to APM Metrics used in ES codebase. The APM Metrics are in :modules:apm -The APM data plugin installs index templates, component templates, and ingest pipelines for Elastic APM. +## APM Ingest plugin + +The APM Ingest plugin installs index templates, component templates, and ingest pipelines for Elastic APM Server. All resources are defined as YAML under [src/main/resources](src/main/resources). diff --git a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java index 665ecc16a1e14..44621ee211838 100644 --- a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java +++ b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java @@ -136,7 +136,7 @@ private static ComponentTemplate loadComponentTemplate(String name, int version) final byte[] content = loadVersionedResourceUTF8("/component-templates/" + name + ".yaml", version); return ComponentTemplate.parse(YamlXContent.yamlXContent.createParser(XContentParserConfiguration.EMPTY, content)); } catch (Exception e) { - throw new RuntimeException("failed to load APM component template: " + name, e); + throw new RuntimeException("failed to load APM Ingest plugin's component template: " + name, e); } } @@ -145,7 +145,7 @@ private static ComposableIndexTemplate loadIndexTemplate(String name, int versio final byte[] content = loadVersionedResourceUTF8("/index-templates/" + name + ".yaml", version); return ComposableIndexTemplate.parse(YamlXContent.yamlXContent.createParser(XContentParserConfiguration.EMPTY, content)); } catch (Exception e) { - throw new RuntimeException("failed to load APM index template: " + name, e); + throw new RuntimeException("failed to load APM Ingest plugin's index template: " + name, e); } } diff --git a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java index f3bf0a4eb800e..7acf3a3c972da 100644 --- a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java +++ b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java @@ -33,7 +33,7 @@ public Collection createComponents(PluginServices services) { ) ); APMIndexTemplateRegistry registryInstance = registry.get(); - logger.info("APM is {}", registryInstance.isEnabled() ? "enabled" : "disabled"); + logger.info("APM ingest plugin is {}", registryInstance.isEnabled() ? "enabled" : "disabled"); registryInstance.initialize(); return List.of(registryInstance); } From df097aae01d47655086ff7f854832590d68ba207 Mon Sep 17 00:00:00 2001 From: Abdon Pijpelink Date: Tue, 5 Dec 2023 12:53:21 +0100 Subject: [PATCH 157/181] [DOCS] Change ES|QL getting started guide for updated dataset (#102970) --- docs/reference/esql/esql-get-started.asciidoc | 4 ++-- .../esql-getting-started-sample-data.asciidoc | 18 +++++++++--------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/docs/reference/esql/esql-get-started.asciidoc b/docs/reference/esql/esql-get-started.asciidoc index 4109d9d6f4ba3..b0b68d281809e 100644 --- a/docs/reference/esql/esql-get-started.asciidoc +++ b/docs/reference/esql/esql-get-started.asciidoc @@ -158,8 +158,8 @@ is different than this example, where the sorting comes before the limit. Use the <> command to append columns to a table, with calculated values. For example, the following query appends a `duration_ms` column. The -values in the column are computed by dividing `event.duration` by 1,000,000. In -other words: `event.duration` converted from nanoseconds to milliseconds. +values in the column are computed by dividing `event_duration` by 1,000,000. In +other words: `event_duration` converted from nanoseconds to milliseconds. [source,esql] ---- diff --git a/docs/reference/tab-widgets/esql/esql-getting-started-sample-data.asciidoc b/docs/reference/tab-widgets/esql/esql-getting-started-sample-data.asciidoc index 434954d8d400a..2a899a9f1ea33 100644 --- a/docs/reference/tab-widgets/esql/esql-getting-started-sample-data.asciidoc +++ b/docs/reference/tab-widgets/esql/esql-getting-started-sample-data.asciidoc @@ -1,7 +1,7 @@ // tag::own-deployment[] First ingest some sample data. In {kib}, open the main menu and select *Dev -Tools*. Run the the following two requests: +Tools*. Run the following two requests: [source,console] ---- @@ -9,7 +9,7 @@ PUT sample_data { "mappings": { "properties": { - "client.ip": { + "client_ip": { "type": "ip" }, "message": { @@ -21,19 +21,19 @@ PUT sample_data PUT sample_data/_bulk {"index": {}} -{"@timestamp": "2023-10-23T12:15:03.360Z", "client.ip": "172.21.2.162", "message": "Connected to 10.1.0.3", "event.duration": 3450233} +{"@timestamp": "2023-10-23T12:15:03.360Z", "client_ip": "172.21.2.162", "message": "Connected to 10.1.0.3", "event_duration": 3450233} {"index": {}} -{"@timestamp": "2023-10-23T12:27:28.948Z", "client.ip": "172.21.2.113", "message": "Connected to 10.1.0.2", "event.duration": 2764889} +{"@timestamp": "2023-10-23T12:27:28.948Z", "client_ip": "172.21.2.113", "message": "Connected to 10.1.0.2", "event_duration": 2764889} {"index": {}} -{"@timestamp": "2023-10-23T13:33:34.937Z", "client.ip": "172.21.0.5", "message": "Disconnected", "event.duration": 1232382} +{"@timestamp": "2023-10-23T13:33:34.937Z", "client_ip": "172.21.0.5", "message": "Disconnected", "event_duration": 1232382} {"index": {}} -{"@timestamp": "2023-10-23T13:51:54.732Z", "client.ip": "172.21.3.15", "message": "Connection error", "event.duration": 725448} +{"@timestamp": "2023-10-23T13:51:54.732Z", "client_ip": "172.21.3.15", "message": "Connection error", "event_duration": 725448} {"index": {}} -{"@timestamp": "2023-10-23T13:52:55.015Z", "client.ip": "172.21.3.15", "message": "Connection error", "event.duration": 8268153} +{"@timestamp": "2023-10-23T13:52:55.015Z", "client_ip": "172.21.3.15", "message": "Connection error", "event_duration": 8268153} {"index": {}} -{"@timestamp": "2023-10-23T13:53:55.832Z", "client.ip": "172.21.3.15", "message": "Connection error", "event.duration": 5033755} +{"@timestamp": "2023-10-23T13:53:55.832Z", "client_ip": "172.21.3.15", "message": "Connection error", "event_duration": 5033755} {"index": {}} -{"@timestamp": "2023-10-23T13:55:01.543Z", "client.ip": "172.21.3.15", "message": "Connected to 10.1.0.1", "event.duration": 1756467} +{"@timestamp": "2023-10-23T13:55:01.543Z", "client_ip": "172.21.3.15", "message": "Connected to 10.1.0.1", "event_duration": 1756467} ---- // end::own-deployment[] From ea6013ca9c1fcf6fa6ba76d988ce4150cb381377 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Tue, 5 Dec 2023 07:33:32 -0500 Subject: [PATCH 158/181] Test mute for issue #102974 (#102975) related https://github.com/elastic/elasticsearch/issues/102974 --- .../search/aggregations/bucket/nested/NestedAggregatorTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index b71e10c34eef8..83a2e856a512e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -563,6 +563,7 @@ public void testNestedOrdering_random() throws IOException { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102974") public void testPreGetChildLeafCollectors() throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { From 8d4677e011c1ce68d97704da92cdca1e67b10f78 Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Tue, 5 Dec 2023 14:16:35 +0100 Subject: [PATCH 159/181] Add AutoscalingMissedIndicesUpdateException (#102817) Add an exception for the case where we can't update autoscaling index metric stats due to a missed index. --- .../elasticsearch/ElasticsearchException.java | 7 ++++++ .../org/elasticsearch/TransportVersions.java | 1 + ...toscalingMissedIndicesUpdateException.java | 24 +++++++++++++++++++ .../ExceptionSerializationTests.java | 2 ++ 4 files changed, 34 insertions(+) create mode 100644 server/src/main/java/org/elasticsearch/indices/AutoscalingMissedIndicesUpdateException.java diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchException.java b/server/src/main/java/org/elasticsearch/ElasticsearchException.java index 5c5133e478ee1..50a5f7420847b 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -28,6 +28,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.mapper.DocumentParsingException; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.AutoscalingMissedIndicesUpdateException; import org.elasticsearch.indices.recovery.RecoveryCommitTooNewException; import org.elasticsearch.rest.ApiNotAvailableException; import org.elasticsearch.rest.RestStatus; @@ -1863,6 +1864,12 @@ private enum ElasticsearchExceptionHandle { AggregationExecutionException.InvalidPath::new, 174, TransportVersions.INVALID_BUCKET_PATH_EXCEPTION_INTRODUCED + ), + MISSED_INDICES_UPDATE_EXCEPTION( + AutoscalingMissedIndicesUpdateException.class, + AutoscalingMissedIndicesUpdateException::new, + 175, + TransportVersions.MISSED_INDICES_UPDATE_EXCEPTION_ADDED ); final Class exceptionClass; diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 30769371f3608..369e1da237aa0 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -188,6 +188,7 @@ static TransportVersion def(int id) { public static final TransportVersion UPGRADE_TO_LUCENE_9_9 = def(8_555_00_0); public static final TransportVersion HEALTH_INFO_ENRICHED_WITH_DSL_STATUS = def(8_556_00_0); public static final TransportVersion SOURCE_IN_SINGLE_VALUE_QUERY_ADDED = def(8_557_00_0); + public static final TransportVersion MISSED_INDICES_UPDATE_EXCEPTION_ADDED = def(8_558_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/indices/AutoscalingMissedIndicesUpdateException.java b/server/src/main/java/org/elasticsearch/indices/AutoscalingMissedIndicesUpdateException.java new file mode 100644 index 0000000000000..e32af622b5531 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/indices/AutoscalingMissedIndicesUpdateException.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ +package org.elasticsearch.indices; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.io.stream.StreamInput; + +import java.io.IOException; + +public class AutoscalingMissedIndicesUpdateException extends ElasticsearchException { + + public AutoscalingMissedIndicesUpdateException(String message) { + super(message); + } + + public AutoscalingMissedIndicesUpdateException(StreamInput in) throws IOException { + super(in); + } +} diff --git a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index f7362c7001c36..3e0d9193ffed9 100644 --- a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -59,6 +59,7 @@ import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardNotInPrimaryModeException; +import org.elasticsearch.indices.AutoscalingMissedIndicesUpdateException; import org.elasticsearch.indices.IndexTemplateMissingException; import org.elasticsearch.indices.InvalidIndexTemplateException; import org.elasticsearch.indices.recovery.PeerRecoveryNotFound; @@ -825,6 +826,7 @@ public void testIds() { ids.put(172, RecoveryCommitTooNewException.class); ids.put(173, TooManyScrollContextsException.class); ids.put(174, AggregationExecutionException.InvalidPath.class); + ids.put(175, AutoscalingMissedIndicesUpdateException.class); Map, Integer> reverse = new HashMap<>(); for (Map.Entry> entry : ids.entrySet()) { From 2703b802c8bba13eddaa4a01ac2ace3d55458863 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Tue, 5 Dec 2023 14:29:21 +0100 Subject: [PATCH 160/181] Add cluster def link to CONTRIBUTING.md (#102979) --- CONTRIBUTING.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index db8cca17a5606..5b68fd807220e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -138,6 +138,16 @@ and then run `curl` in another window like this: curl -u elastic:password localhost:9200 +To send requests to this Elasticsearch instance, either use the built-in `elastic` +user and password as above or use the pre-configured `elastic-admin` user: + + curl -u elastic-admin:elastic-password localhost:9200 + +Security can also be disabled altogether: + + ./gradlew :run -Dtests.es.xpack.security.enabled=false + +The definition of this Elasticsearch cluster can be found [here](build-tools-internal/src/main/groovy/elasticsearch.run.gradle). ### Importing the project into IntelliJ IDEA From 3ace42266821f9baf1823ba4f5fe1a0f52935149 Mon Sep 17 00:00:00 2001 From: Abdon Pijpelink Date: Tue, 5 Dec 2023 14:37:52 +0100 Subject: [PATCH 161/181] [DOCS] More ES|QL getting started updates (#102980) * [DOCS] More ES|QL getting started updates * Change 'server.ip' into 'server_ip' --- docs/reference/esql/esql-get-started.asciidoc | 6 +++--- .../esql-getting-started-enrich-policy.asciidoc | 14 +++++++------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/docs/reference/esql/esql-get-started.asciidoc b/docs/reference/esql/esql-get-started.asciidoc index b0b68d281809e..6e467e1e7312d 100644 --- a/docs/reference/esql/esql-get-started.asciidoc +++ b/docs/reference/esql/esql-get-started.asciidoc @@ -298,10 +298,10 @@ string, you can use the following `DISSECT` command: include::{esql-specs}/dissect.csv-spec[tag=gs-dissect] ---- -This adds a `server.ip` column to those rows that have a `message` that matches -this pattern. For other rows, the value of `server.ip` is `null`. +This adds a `server_ip` column to those rows that have a `message` that matches +this pattern. For other rows, the value of `server_ip` is `null`. -You can use the new `server.ip` column that's added by the `DISSECT` command in +You can use the new `server_ip` column that's added by the `DISSECT` command in subsequent commands. For example, to determine how many connections each server has accepted: diff --git a/docs/reference/tab-widgets/esql/esql-getting-started-enrich-policy.asciidoc b/docs/reference/tab-widgets/esql/esql-getting-started-enrich-policy.asciidoc index a1898dffda684..c51a46bdef3b3 100644 --- a/docs/reference/tab-widgets/esql/esql-getting-started-enrich-policy.asciidoc +++ b/docs/reference/tab-widgets/esql/esql-getting-started-enrich-policy.asciidoc @@ -10,7 +10,7 @@ PUT clientips { "mappings": { "properties": { - "client.ip": { + "client_ip": { "type": "keyword" }, "env": { @@ -22,21 +22,21 @@ PUT clientips PUT clientips/_bulk { "index" : {}} -{ "client.ip": "172.21.0.5", "env": "Development" } +{ "client_ip": "172.21.0.5", "env": "Development" } { "index" : {}} -{ "client.ip": "172.21.2.113", "env": "QA" } +{ "client_ip": "172.21.2.113", "env": "QA" } { "index" : {}} -{ "client.ip": "172.21.2.162", "env": "QA" } +{ "client_ip": "172.21.2.162", "env": "QA" } { "index" : {}} -{ "client.ip": "172.21.3.15", "env": "Production" } +{ "client_ip": "172.21.3.15", "env": "Production" } { "index" : {}} -{ "client.ip": "172.21.3.16", "env": "Production" } +{ "client_ip": "172.21.3.16", "env": "Production" } PUT /_enrich/policy/clientip_policy { "match": { "indices": "clientips", - "match_field": "client.ip", + "match_field": "client_ip", "enrich_fields": ["env"] } } From 903193c97af955e263ecd5a898286d1fa416643b Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Tue, 5 Dec 2023 15:08:56 +0100 Subject: [PATCH 162/181] ES|QL: Better management of allowed errors in generative tests (#102969) --- .../qa/rest/generative/GenerativeRestTest.java | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java index 9ba54ea1941fd..c341ad26cb7a6 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java @@ -17,6 +17,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.regex.Pattern; import java.util.stream.Collectors; import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.CSV_DATASET_MAP; @@ -29,10 +30,15 @@ public abstract class GenerativeRestTest extends ESRestTestCase { public static final int MAX_DEPTH = 10; public static final Set ALLOWED_ERRORS = Set.of( - "is ambiguous (to disambiguate use quotes or qualifiers)", - "due to ambiguities being mapped as" + "Reference \\[.*\\] is ambiguous", + "Cannot use field \\[.*\\] due to ambiguities" ); + public static final Set ALLOWED_ERROR_PATTERNS = ALLOWED_ERRORS.stream() + .map(x -> ".*" + x + ".*") + .map(x -> Pattern.compile(x, Pattern.DOTALL)) + .collect(Collectors.toSet()); + @Before public void setup() throws IOException { if (indexExists(CSV_DATASET_MAP.keySet().iterator().next()) == false) { @@ -65,8 +71,8 @@ public void test() { } private void checkException(EsqlQueryGenerator.QueryExecuted query) { - for (String allowedError : ALLOWED_ERRORS) { - if (query.exception().getMessage().contains(allowedError)) { + for (Pattern allowedError : ALLOWED_ERROR_PATTERNS) { + if (allowedError.matcher(query.exception().getMessage()).matches()) { return; } } From 0c946ff64d16fca265668c34fd21ba4670b0ba18 Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Tue, 5 Dec 2023 15:13:18 +0100 Subject: [PATCH 163/181] [Connectors API] Post connector endpoint (#102959) --- .../rest-api-spec/api/connector.post.json | 33 +++ .../elastic-connectors-mappings.json | 3 - .../test/entsearch/300_connector_put.yml | 6 +- .../test/entsearch/305_connector_post.yml | 78 ++++++ .../test/entsearch/310_connector_list.yml | 20 +- .../xpack/application/EnterpriseSearch.java | 5 + .../application/connector/Connector.java | 100 ++++---- .../connector/ConnectorIndexService.java | 43 +++- .../connector/action/GetConnectorAction.java | 8 +- .../connector/action/PostConnectorAction.java | 242 ++++++++++++++++++ .../action/RestPostConnectorAction.java | 45 ++++ .../action/TransportPostConnectorAction.java | 60 +++++ .../action/TransportPutConnectorAction.java | 9 +- .../syncjob/ConnectorSyncJobIndexService.java | 4 +- .../connector/ConnectorIndexServiceTests.java | 115 ++++++--- .../connector/ConnectorTestUtils.java | 16 +- .../application/connector/ConnectorTests.java | 6 +- ...ctorActionResponseBWCSerializingTests.java | 2 +- ...ectorActionRequestBWCSerializingTests.java | 44 ++++ ...ctorActionResponseBWCSerializingTests.java | 36 +++ .../syncjob/ConnectorSyncJobTests.java | 4 +- .../xpack/security/operator/Constants.java | 1 + 22 files changed, 757 insertions(+), 123 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector.post.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/305_connector_post.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PostConnectorAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestPostConnectorAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPostConnectorAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionResponseBWCSerializingTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.post.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.post.json new file mode 100644 index 0000000000000..2dfaf150c455a --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.post.json @@ -0,0 +1,33 @@ +{ + "connector.post": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Creates a connector." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector", + "methods": [ + "POST" + ] + } + ] + }, + "body": { + "description": "The connector configuration.", + "required": false + } + } +} diff --git a/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-mappings.json index 2a41662a136a7..651e1c84da73a 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-mappings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-mappings.json @@ -18,9 +18,6 @@ "api_key_id": { "type": "keyword" }, - "connector_id": { - "type": "keyword" - }, "configuration": { "type": "object" }, diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/300_connector_put.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/300_connector_put.yml index 464b64a2b24a3..77d4f28721525 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/300_connector_put.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/300_connector_put.yml @@ -22,7 +22,7 @@ setup: connector.get: connector_id: test-connector - - match: { connector_id: test-connector } + - match: { id: test-connector } - match: { index_name: search-test } - match: { name: my-connector } - match: { language: pl } @@ -43,7 +43,7 @@ setup: connector.get: connector_id: test-connector-with-defaults - - match: { connector_id: test-connector-with-defaults } + - match: { id: test-connector-with-defaults } - match: { index_name: search-test } - match: { is_native: false } - match: { sync_now: false } @@ -67,7 +67,7 @@ setup: connector.get: connector_id: test-connector-native - - match: { connector_id: test-connector-native } + - match: { id: test-connector-native } - match: { index_name: search-test } - match: { is_native: true } - match: { sync_now: false } diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/305_connector_post.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/305_connector_post.yml new file mode 100644 index 0000000000000..8d0fa14311608 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/305_connector_post.yml @@ -0,0 +1,78 @@ + +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + +--- +'Create Connector': + - do: + connector.post: + body: + index_name: search-test + name: my-connector + language: pl + is_native: false + service_type: super-connector + + - set: { id: id } + - match: { id: $id } + + - do: + connector.get: + connector_id: $id + + - match: { id: $id } + - match: { index_name: search-test } + - match: { name: my-connector } + - match: { language: pl } + - match: { is_native: false } + - match: { service_type: super-connector } + +--- +'Create Connector - Default values are initialized correctly': + - do: + connector.post: + body: + index_name: search-test + + - set: { id: id } + - match: { id: $id } + + - do: + connector.get: + connector_id: $id + + - match: { id: $id } + - match: { index_name: search-test } + - match: { is_native: false } + - match: { sync_now: false } + - match: { status: created } + - match: { configuration: {} } + - match: { custom_scheduling: {} } + - match: { filtering.0.domain: DEFAULT } + +--- +'Create Connector - Native connector is initialized correctly': + - do: + connector.post: + body: + index_name: search-test + is_native: true + + - set: { id: id } + - match: { id: $id } + + - do: + connector.get: + connector_id: $id + + - match: { id: $id } + - match: { index_name: search-test } + - match: { is_native: true } + - match: { sync_now: false } + - match: { status: needs_configuration } + - match: { configuration: {} } + - match: { custom_scheduling: {} } + - match: { filtering.0.domain: DEFAULT } + diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/310_connector_list.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/310_connector_list.yml index 36cd1c283f7e8..52cfcdee0bb85 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/310_connector_list.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/310_connector_list.yml @@ -39,16 +39,16 @@ setup: - match: { count: 3 } - # Alphabetical order by connector_id for results - - match: { results.0.connector_id: "connector-a" } + # Alphabetical order by index_name for results + - match: { results.0.id: "connector-a" } - match: { results.0.index_name: "search-1-test" } - match: { results.0.language: "pl" } - - match: { results.1.connector_id: "connector-b" } + - match: { results.1.id: "connector-b" } - match: { results.1.index_name: "search-2-test" } - match: { results.1.language: "en" } - - match: { results.2.connector_id: "connector-c" } + - match: { results.2.id: "connector-c" } - match: { results.2.index_name: "search-3-test" } - match: { results.2.language: "nl" } @@ -61,12 +61,12 @@ setup: - match: { count: 3 } - # Alphabetical order by connector_id for results - - match: { results.0.connector_id: "connector-b" } + # Alphabetical order by index_name for results + - match: { results.0.id: "connector-b" } - match: { results.0.index_name: "search-2-test" } - match: { results.0.language: "en" } - - match: { results.1.connector_id: "connector-c" } + - match: { results.1.id: "connector-c" } - match: { results.1.index_name: "search-3-test" } - match: { results.1.language: "nl" } @@ -78,12 +78,12 @@ setup: - match: { count: 3 } - # Alphabetical order by connector_id for results - - match: { results.0.connector_id: "connector-a" } + # Alphabetical order by index_name for results + - match: { results.0.id: "connector-a" } - match: { results.0.index_name: "search-1-test" } - match: { results.0.language: "pl" } - - match: { results.1.connector_id: "connector-b" } + - match: { results.1.id: "connector-b" } - match: { results.1.index_name: "search-2-test" } - match: { results.1.language: "en" } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index 12bd2f4a25bdd..2119d9d0a4c30 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -45,10 +45,12 @@ import org.elasticsearch.xpack.application.connector.action.DeleteConnectorAction; import org.elasticsearch.xpack.application.connector.action.GetConnectorAction; import org.elasticsearch.xpack.application.connector.action.ListConnectorAction; +import org.elasticsearch.xpack.application.connector.action.PostConnectorAction; import org.elasticsearch.xpack.application.connector.action.PutConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestDeleteConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestGetConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestListConnectorAction; +import org.elasticsearch.xpack.application.connector.action.RestPostConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestPutConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorErrorAction; @@ -61,6 +63,7 @@ import org.elasticsearch.xpack.application.connector.action.TransportDeleteConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportGetConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportListConnectorAction; +import org.elasticsearch.xpack.application.connector.action.TransportPostConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportPutConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorErrorAction; @@ -218,6 +221,7 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(DeleteConnectorAction.INSTANCE, TransportDeleteConnectorAction.class), new ActionHandler<>(GetConnectorAction.INSTANCE, TransportGetConnectorAction.class), new ActionHandler<>(ListConnectorAction.INSTANCE, TransportListConnectorAction.class), + new ActionHandler<>(PostConnectorAction.INSTANCE, TransportPostConnectorAction.class), new ActionHandler<>(PutConnectorAction.INSTANCE, TransportPutConnectorAction.class), new ActionHandler<>(UpdateConnectorConfigurationAction.INSTANCE, TransportUpdateConnectorConfigurationAction.class), new ActionHandler<>(UpdateConnectorErrorAction.INSTANCE, TransportUpdateConnectorErrorAction.class), @@ -293,6 +297,7 @@ public List getRestHandlers( new RestDeleteConnectorAction(), new RestGetConnectorAction(), new RestListConnectorAction(), + new RestPostConnectorAction(), new RestPutConnectorAction(), new RestUpdateConnectorConfigurationAction(), new RestUpdateConnectorErrorAction(), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java index 8c0c150ea88af..bcb182774e758 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java @@ -32,7 +32,6 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; /** @@ -107,7 +106,7 @@ public class Connector implements NamedWriteable, ToXContentObject { /** * Constructor for Connector. * - * @param connectorId Unique identifier for the connector. + * @param connectorId Unique identifier for the connector. Used when building get/list response. Equals to doc _id. * @param apiKeyId API key ID used for authentication/authorization against ES. * @param configuration Configuration settings for the connector. * @param customScheduling Custom scheduling settings for the connector. @@ -150,7 +149,7 @@ private Connector( Object syncCursor, boolean syncNow ) { - this.connectorId = Objects.requireNonNull(connectorId, "connectorId cannot be null"); + this.connectorId = connectorId; this.apiKeyId = apiKeyId; this.configuration = configuration; this.customScheduling = customScheduling; @@ -173,7 +172,7 @@ private Connector( } public Connector(StreamInput in) throws IOException { - this.connectorId = in.readString(); + this.connectorId = in.readOptionalString(); this.apiKeyId = in.readOptionalString(); this.configuration = in.readMap(ConnectorConfiguration::new); this.customScheduling = in.readMap(ConnectorCustomSchedule::new); @@ -195,7 +194,7 @@ public Connector(StreamInput in) throws IOException { this.syncNow = in.readBoolean(); } - public static final ParseField ID_FIELD = new ParseField("connector_id"); + public static final ParseField ID_FIELD = new ParseField("id"); static final ParseField API_KEY_ID_FIELD = new ParseField("api_key_id"); public static final ParseField CONFIGURATION_FIELD = new ParseField("configuration"); static final ParseField CUSTOM_SCHEDULING_FIELD = new ParseField("custom_scheduling"); @@ -216,45 +215,48 @@ public Connector(StreamInput in) throws IOException { static final ParseField SYNC_NOW_FIELD = new ParseField("sync_now"); @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("connector", true, (args) -> { - int i = 0; - return new Builder().setConnectorId((String) args[i++]) - .setApiKeyId((String) args[i++]) - .setConfiguration((Map) args[i++]) - .setCustomScheduling((Map) args[i++]) - .setDescription((String) args[i++]) - .setError((String) args[i++]) - .setFeatures((ConnectorFeatures) args[i++]) - .setFiltering((List) args[i++]) - .setIndexName((String) args[i++]) - .setIsNative((Boolean) args[i++]) - .setLanguage((String) args[i++]) - .setLastSeen((Instant) args[i++]) - .setSyncInfo( - new ConnectorSyncInfo.Builder().setLastAccessControlSyncError((String) args[i++]) - .setLastAccessControlSyncScheduledAt((Instant) args[i++]) - .setLastAccessControlSyncStatus((ConnectorSyncStatus) args[i++]) - .setLastDeletedDocumentCount((Long) args[i++]) - .setLastIncrementalSyncScheduledAt((Instant) args[i++]) - .setLastIndexedDocumentCount((Long) args[i++]) - .setLastSyncError((String) args[i++]) - .setLastSyncScheduledAt((Instant) args[i++]) - .setLastSyncStatus((ConnectorSyncStatus) args[i++]) - .setLastSynced((Instant) args[i++]) - .build() - ) - .setName((String) args[i++]) - .setPipeline((ConnectorIngestPipeline) args[i++]) - .setScheduling((ConnectorScheduling) args[i++]) - .setServiceType((String) args[i++]) - .setStatus((ConnectorStatus) args[i++]) - .setSyncCursor(args[i++]) - .setSyncNow((Boolean) args[i]) - .build(); - }); + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector", + true, + (args, docId) -> { + int i = 0; + return new Builder().setConnectorId(docId) + .setApiKeyId((String) args[i++]) + .setConfiguration((Map) args[i++]) + .setCustomScheduling((Map) args[i++]) + .setDescription((String) args[i++]) + .setError((String) args[i++]) + .setFeatures((ConnectorFeatures) args[i++]) + .setFiltering((List) args[i++]) + .setIndexName((String) args[i++]) + .setIsNative((Boolean) args[i++]) + .setLanguage((String) args[i++]) + .setLastSeen((Instant) args[i++]) + .setSyncInfo( + new ConnectorSyncInfo.Builder().setLastAccessControlSyncError((String) args[i++]) + .setLastAccessControlSyncScheduledAt((Instant) args[i++]) + .setLastAccessControlSyncStatus((ConnectorSyncStatus) args[i++]) + .setLastDeletedDocumentCount((Long) args[i++]) + .setLastIncrementalSyncScheduledAt((Instant) args[i++]) + .setLastIndexedDocumentCount((Long) args[i++]) + .setLastSyncError((String) args[i++]) + .setLastSyncScheduledAt((Instant) args[i++]) + .setLastSyncStatus((ConnectorSyncStatus) args[i++]) + .setLastSynced((Instant) args[i++]) + .build() + ) + .setName((String) args[i++]) + .setPipeline((ConnectorIngestPipeline) args[i++]) + .setScheduling((ConnectorScheduling) args[i++]) + .setServiceType((String) args[i++]) + .setStatus((ConnectorStatus) args[i++]) + .setSyncCursor(args[i++]) + .setSyncNow((Boolean) args[i]) + .build(); + } + ); static { - PARSER.declareString(constructorArg(), ID_FIELD); PARSER.declareString(optionalConstructorArg(), API_KEY_ID_FIELD); PARSER.declareField( optionalConstructorArg(), @@ -357,23 +359,25 @@ public Connector(StreamInput in) throws IOException { PARSER.declareBoolean(optionalConstructorArg(), SYNC_NOW_FIELD); } - public static Connector fromXContentBytes(BytesReference source, XContentType xContentType) { + public static Connector fromXContentBytes(BytesReference source, String docId, XContentType xContentType) { try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { - return Connector.fromXContent(parser); + return Connector.fromXContent(parser, docId); } catch (IOException e) { throw new ElasticsearchParseException("Failed to parse a connector document.", e); } } - public static Connector fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); + public static Connector fromXContent(XContentParser parser, String docId) throws IOException { + return PARSER.parse(parser, docId); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); { - builder.field(ID_FIELD.getPreferredName(), connectorId); + if (connectorId != null) { + builder.field(ID_FIELD.getPreferredName(), connectorId); + } if (apiKeyId != null) { builder.field(API_KEY_ID_FIELD.getPreferredName(), apiKeyId); } @@ -431,7 +435,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @Override public void writeTo(StreamOutput out) throws IOException { - out.writeString(connectorId); + out.writeOptionalString(connectorId); out.writeOptionalString(apiKeyId); out.writeMap(configuration, StreamOutput::writeWriteable); out.writeMap(customScheduling, StreamOutput::writeWriteable); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index 4f4e9d234c813..c3afa6a8b31f6 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -31,6 +31,7 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.action.PostConnectorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; @@ -68,13 +69,14 @@ public ConnectorIndexService(Client client) { /** * Creates or updates the {@link Connector} in the underlying index. * + * @param docId The ID of the connector. * @param connector The connector object. * @param listener The action listener to invoke on response/failure. */ - public void putConnector(Connector connector, ActionListener listener) { + public void putConnector(String docId, Connector connector, ActionListener listener) { try { final IndexRequest indexRequest = new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) - .id(connector.getConnectorId()) + .id(docId) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .source(connector.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)); clientWithOrigin.index(indexRequest, listener); @@ -83,6 +85,31 @@ public void putConnector(Connector connector, ActionListener l } } + /** + * Creates or updates the {@link Connector} in the underlying index. + * Assigns connector an auto-generated doc ID. + * + * @param connector The connector object. + * @param listener The action listener to invoke on response/failure. + */ + public void postConnector(Connector connector, ActionListener listener) { + try { + final IndexRequest indexRequest = new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(connector.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)); + + clientWithOrigin.index( + indexRequest, + ActionListener.wrap( + indexResponse -> listener.onResponse(new PostConnectorAction.Response(indexResponse.getId())), + listener::onFailure + ) + ); + } catch (Exception e) { + listener.onFailure(e); + } + } + /** * Gets the {@link Connector} from the underlying index. * @@ -99,7 +126,11 @@ public void getConnector(String connectorId, ActionListener listener) return; } try { - final Connector connector = Connector.fromXContentBytes(getResponse.getSourceAsBytesRef(), XContentType.JSON); + final Connector connector = Connector.fromXContentBytes( + getResponse.getSourceAsBytesRef(), + connectorId, + XContentType.JSON + ); l.onResponse(connector); } catch (Exception e) { listener.onFailure(e); @@ -139,7 +170,7 @@ public void deleteConnector(String connectorId, ActionListener l } /** - * List the {@link Connector} in ascending order of their ids. + * List the {@link Connector} in ascending order of their index names. * * @param from From index to start the search from. * @param size The maximum number of {@link Connector}s to return. @@ -151,7 +182,7 @@ public void listConnectors(int from, int size, ActionListener() { @Override @@ -435,7 +466,7 @@ private static Connector hitToConnector(SearchHit searchHit) { // todo: don't return sensitive data from configuration in list endpoint - return Connector.fromXContentBytes(searchHit.getSourceRef(), XContentType.JSON); + return Connector.fromXContentBytes(searchHit.getSourceRef(), searchHit.getId(), XContentType.JSON); } public record ConnectorResult(List connectors, long totalResults) {} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/GetConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/GetConnectorAction.java index 61d5947489322..9d97b6787c243 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/GetConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/GetConnectorAction.java @@ -88,7 +88,9 @@ public int hashCode() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(CONNECTOR_ID_FIELD.getPreferredName(), connectorId); + { + builder.field(CONNECTOR_ID_FIELD.getPreferredName(), connectorId); + } builder.endObject(); return builder; } @@ -131,8 +133,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return connector.toXContent(builder, params); } - public static GetConnectorAction.Response fromXContent(XContentParser parser) throws IOException { - return new GetConnectorAction.Response(Connector.fromXContent(parser)); + public static GetConnectorAction.Response fromXContent(XContentParser parser, String docId) throws IOException { + return new GetConnectorAction.Response(Connector.fromXContent(parser, docId)); } @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PostConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PostConnectorAction.java new file mode 100644 index 0000000000000..6570b111d8a0e --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PostConnectorAction.java @@ -0,0 +1,242 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.Connector; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class PostConnectorAction extends ActionType { + + public static final PostConnectorAction INSTANCE = new PostConnectorAction(); + public static final String NAME = "cluster:admin/xpack/connector/post"; + + public PostConnectorAction() { + super(NAME, PostConnectorAction.Response::new); + } + + public static class Request extends ActionRequest implements ToXContentObject { + + @Nullable + private final String description; + @Nullable + private final String indexName; + @Nullable + private final Boolean isNative; + @Nullable + private final String language; + @Nullable + private final String name; + @Nullable + private final String serviceType; + + public Request(String description, String indexName, Boolean isNative, String language, String name, String serviceType) { + this.description = description; + this.indexName = indexName; + this.isNative = isNative; + this.language = language; + this.name = name; + this.serviceType = serviceType; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.description = in.readOptionalString(); + this.indexName = in.readOptionalString(); + this.isNative = in.readOptionalBoolean(); + this.language = in.readOptionalString(); + this.name = in.readOptionalString(); + this.serviceType = in.readOptionalString(); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_put_request", + false, + (args) -> new Request( + (String) args[0], + (String) args[1], + (Boolean) args[2], + (String) args[3], + (String) args[4], + (String) args[5] + ) + ); + + static { + PARSER.declareString(optionalConstructorArg(), new ParseField("description")); + PARSER.declareString(optionalConstructorArg(), new ParseField("index_name")); + PARSER.declareBoolean(optionalConstructorArg(), new ParseField("is_native")); + PARSER.declareString(optionalConstructorArg(), new ParseField("language")); + PARSER.declareString(optionalConstructorArg(), new ParseField("name")); + PARSER.declareString(optionalConstructorArg(), new ParseField("service_type")); + } + + public static Request fromXContentBytes(BytesReference source, XContentType xContentType) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return Request.fromXContent(parser); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse: " + source.utf8ToString(), e); + } + } + + public static Request fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + if (description != null) { + builder.field("description", description); + } + if (indexName != null) { + builder.field("index_name", indexName); + } + if (isNative != null) { + builder.field("is_native", isNative); + } + if (language != null) { + builder.field("language", language); + } + if (name != null) { + builder.field("name", name); + } + if (serviceType != null) { + builder.field("service_type", serviceType); + } + } + builder.endObject(); + return builder; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeOptionalString(description); + out.writeOptionalString(indexName); + out.writeOptionalBoolean(isNative); + out.writeOptionalString(language); + out.writeOptionalString(name); + out.writeOptionalString(serviceType); + } + + public String getDescription() { + return description; + } + + public String getIndexName() { + return indexName; + } + + public Boolean getIsNative() { + return isNative; + } + + public String getLanguage() { + return language; + } + + public String getName() { + return name; + } + + public String getServiceType() { + return serviceType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(description, request.description) + && Objects.equals(indexName, request.indexName) + && Objects.equals(isNative, request.isNative) + && Objects.equals(language, request.language) + && Objects.equals(name, request.name) + && Objects.equals(serviceType, request.serviceType); + } + + @Override + public int hashCode() { + return Objects.hash(description, indexName, isNative, language, name, serviceType); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + private final String id; + + public Response(StreamInput in) throws IOException { + super(in); + this.id = in.readString(); + } + + public Response(String id) { + this.id = id; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(id); + } + + public String getId() { + return id; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(Connector.ID_FIELD.getPreferredName(), id); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response response = (Response) o; + return Objects.equals(id, response.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestPostConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestPostConnectorAction.java new file mode 100644 index 0000000000000..9bfa3fd629567 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestPostConnectorAction.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.POST; + +public class RestPostConnectorAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_post_action"; + } + + @Override + public List routes() { + return List.of(new Route(POST, "/" + EnterpriseSearch.CONNECTOR_API_ENDPOINT)); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + PostConnectorAction.Request request = PostConnectorAction.Request.fromXContentBytes( + restRequest.content(), + restRequest.getXContentType() + ); + return channel -> client.execute( + PostConnectorAction.INSTANCE, + request, + new RestToXContentListener<>(channel, r -> RestStatus.CREATED, r -> null) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPostConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPostConnectorAction.java new file mode 100644 index 0000000000000..7b66ca81a77f9 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPostConnectorAction.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.ConnectorIndexService; + +import java.util.Objects; + +public class TransportPostConnectorAction extends HandledTransportAction { + + protected final ConnectorIndexService connectorIndexService; + + @Inject + public TransportPostConnectorAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + PostConnectorAction.NAME, + transportService, + actionFilters, + PostConnectorAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorIndexService = new ConnectorIndexService(client); + } + + @Override + protected void doExecute(Task task, PostConnectorAction.Request request, ActionListener listener) { + + Boolean isNative = Objects.requireNonNullElse(request.getIsNative(), false); + + Connector connector = new Connector.Builder().setDescription(request.getDescription()) + .setIndexName(request.getIndexName()) + .setIsNative(isNative) + .setLanguage(request.getLanguage()) + .setName(request.getName()) + .setServiceType(request.getServiceType()) + .build(); + + connectorIndexService.postConnector(connector, listener); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPutConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPutConnectorAction.java index 013a8f4a8334d..8f4ac53b03bbd 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPutConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPutConnectorAction.java @@ -47,8 +47,7 @@ protected void doExecute(Task task, PutConnectorAction.Request request, ActionLi Boolean isNative = Objects.requireNonNullElse(request.getIsNative(), false); - Connector connector = new Connector.Builder().setConnectorId(request.getConnectorId()) - .setDescription(request.getDescription()) + Connector connector = new Connector.Builder().setDescription(request.getDescription()) .setIndexName(request.getIndexName()) .setIsNative(isNative) .setLanguage(request.getLanguage()) @@ -56,6 +55,10 @@ protected void doExecute(Task task, PutConnectorAction.Request request, ActionLi .setServiceType(request.getServiceType()) .build(); - connectorIndexService.putConnector(connector, listener.map(r -> new PutConnectorAction.Response(r.getResult()))); + connectorIndexService.putConnector( + request.getConnectorId(), + connector, + listener.map(r -> new PutConnectorAction.Response(r.getResult())) + ); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index 9bcd03eb21ca9..2c9ac7c06b91c 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -330,9 +330,7 @@ public void onResponse(GetResponse response) { Map source = response.getSource(); @SuppressWarnings("unchecked") - final Connector syncJobConnectorInfo = new Connector.Builder().setConnectorId( - (String) source.get(Connector.ID_FIELD.getPreferredName()) - ) + final Connector syncJobConnectorInfo = new Connector.Builder().setConnectorId(connectorId) .setFiltering((List) source.get(Connector.FILTERING_FIELD.getPreferredName())) .setIndexName((String) source.get(Connector.INDEX_NAME_FIELD.getPreferredName())) .setLanguage((String) source.get(Connector.LANGUAGE_FIELD.getPreferredName())) diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index 1960d14faeda2..eedfea13c671b 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xpack.application.connector.action.PostConnectorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; @@ -49,8 +50,20 @@ public void setup() { public void testPutConnector() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); - DocWriteResponse resp = awaitPutConnector(connector); + String connectorId = randomUUID(); + DocWriteResponse resp = awaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + Connector indexedConnector = awaitGetConnector(connectorId); + assertThat(connectorId, equalTo(indexedConnector.getConnectorId())); + } + + public void testPostConnector() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + PostConnectorAction.Response resp = awaitPostConnector(connector); + + Connector indexedConnector = awaitGetConnector(resp.getId()); + assertThat(resp.getId(), equalTo(indexedConnector.getConnectorId())); } public void testDeleteConnector() throws Exception { @@ -58,9 +71,8 @@ public void testDeleteConnector() throws Exception { List connectorIds = new ArrayList<>(); for (int i = 0; i < numConnectors; i++) { Connector connector = ConnectorTestUtils.getRandomConnector(); - connectorIds.add(connector.getConnectorId()); - DocWriteResponse resp = awaitPutConnector(connector); - assertThat(resp.status(), equalTo(RestStatus.CREATED)); + PostConnectorAction.Response resp = awaitPostConnector(connector); + connectorIds.add(resp.getId()); } String connectorIdToDelete = connectorIds.get(0); @@ -73,8 +85,8 @@ public void testDeleteConnector() throws Exception { public void testUpdateConnectorConfiguration() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); - - DocWriteResponse resp = awaitPutConnector(connector); + String connectorId = randomUUID(); + DocWriteResponse resp = awaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); Map connectorConfiguration = connector.getConfiguration() @@ -83,20 +95,21 @@ public void testUpdateConnectorConfiguration() throws Exception { .collect(Collectors.toMap(Map.Entry::getKey, entry -> ConnectorTestUtils.getRandomConnectorConfigurationField())); UpdateConnectorConfigurationAction.Request updateConfigurationRequest = new UpdateConnectorConfigurationAction.Request( - connector.getConnectorId(), + connectorId, connectorConfiguration ); DocWriteResponse updateResponse = awaitUpdateConnectorConfiguration(updateConfigurationRequest); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); - Connector indexedConnector = awaitGetConnector(connector.getConnectorId()); + Connector indexedConnector = awaitGetConnector(connectorId); assertThat(connectorConfiguration, equalTo(indexedConnector.getConfiguration())); assertThat(indexedConnector.getStatus(), equalTo(ConnectorStatus.CONFIGURED)); } public void testUpdateConnectorPipeline() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); - DocWriteResponse resp = awaitPutConnector(connector); + String connectorId = randomUUID(); + DocWriteResponse resp = awaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); ConnectorIngestPipeline updatedPipeline = new ConnectorIngestPipeline.Builder().setName("test-pipeline") @@ -106,20 +119,21 @@ public void testUpdateConnectorPipeline() throws Exception { .build(); UpdateConnectorPipelineAction.Request updatePipelineRequest = new UpdateConnectorPipelineAction.Request( - connector.getConnectorId(), + connectorId, updatedPipeline ); DocWriteResponse updateResponse = awaitUpdateConnectorPipeline(updatePipelineRequest); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); - Connector indexedConnector = awaitGetConnector(connector.getConnectorId()); + Connector indexedConnector = awaitGetConnector(connectorId); assertThat(updatedPipeline, equalTo(indexedConnector.getPipeline())); } public void testUpdateConnectorFiltering() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); + String connectorId = randomUUID(); - DocWriteResponse resp = awaitPutConnector(connector); + DocWriteResponse resp = awaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); List filteringList = IntStream.range(0, 10) @@ -127,33 +141,35 @@ public void testUpdateConnectorFiltering() throws Exception { .collect(Collectors.toList()); UpdateConnectorFilteringAction.Request updateFilteringRequest = new UpdateConnectorFilteringAction.Request( - connector.getConnectorId(), + connectorId, filteringList ); DocWriteResponse updateResponse = awaitUpdateConnectorFiltering(updateFilteringRequest); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); - Connector indexedConnector = awaitGetConnector(connector.getConnectorId()); + Connector indexedConnector = awaitGetConnector(connectorId); assertThat(filteringList, equalTo(indexedConnector.getFiltering())); } public void testUpdateConnectorLastSeen() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); - DocWriteResponse resp = awaitPutConnector(connector); + String connectorId = randomUUID(); + + DocWriteResponse resp = awaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); - UpdateConnectorLastSeenAction.Request checkInRequest = new UpdateConnectorLastSeenAction.Request(connector.getConnectorId()); + UpdateConnectorLastSeenAction.Request checkInRequest = new UpdateConnectorLastSeenAction.Request(connectorId); DocWriteResponse updateResponse = awaitUpdateConnectorLastSeen(checkInRequest); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); - Connector indexedConnectorTime1 = awaitGetConnector(connector.getConnectorId()); + Connector indexedConnectorTime1 = awaitGetConnector(connectorId); assertNotNull(indexedConnectorTime1.getLastSeen()); - checkInRequest = new UpdateConnectorLastSeenAction.Request(connector.getConnectorId()); + checkInRequest = new UpdateConnectorLastSeenAction.Request(connectorId); updateResponse = awaitUpdateConnectorLastSeen(checkInRequest); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); - Connector indexedConnectorTime2 = awaitGetConnector(connector.getConnectorId()); + Connector indexedConnectorTime2 = awaitGetConnector(connectorId); assertNotNull(indexedConnectorTime2.getLastSeen()); assertTrue(indexedConnectorTime2.getLastSeen().isAfter(indexedConnectorTime1.getLastSeen())); @@ -161,68 +177,70 @@ public void testUpdateConnectorLastSeen() throws Exception { public void testUpdateConnectorLastSyncStats() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); + String connectorId = randomUUID(); - DocWriteResponse resp = awaitPutConnector(connector); + DocWriteResponse resp = awaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); ConnectorSyncInfo syncStats = ConnectorTestUtils.getRandomConnectorSyncInfo(); - UpdateConnectorLastSyncStatsAction.Request lastSyncStats = new UpdateConnectorLastSyncStatsAction.Request( - connector.getConnectorId(), - syncStats - ); + UpdateConnectorLastSyncStatsAction.Request lastSyncStats = new UpdateConnectorLastSyncStatsAction.Request(connectorId, syncStats); DocWriteResponse updateResponse = awaitUpdateConnectorLastSyncStats(lastSyncStats); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); - Connector indexedConnector = awaitGetConnector(connector.getConnectorId()); + Connector indexedConnector = awaitGetConnector(connectorId); assertThat(syncStats, equalTo(indexedConnector.getSyncInfo())); } public void testUpdateConnectorScheduling() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); - DocWriteResponse resp = awaitPutConnector(connector); + String connectorId = randomUUID(); + + DocWriteResponse resp = awaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); ConnectorScheduling updatedScheduling = ConnectorTestUtils.getRandomConnectorScheduling(); UpdateConnectorSchedulingAction.Request updateSchedulingRequest = new UpdateConnectorSchedulingAction.Request( - connector.getConnectorId(), + connectorId, updatedScheduling ); DocWriteResponse updateResponse = awaitUpdateConnectorScheduling(updateSchedulingRequest); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); - Connector indexedConnector = awaitGetConnector(connector.getConnectorId()); + Connector indexedConnector = awaitGetConnector(connectorId); assertThat(updatedScheduling, equalTo(indexedConnector.getScheduling())); } public void testUpdateConnectorError() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); - DocWriteResponse resp = awaitPutConnector(connector); + String connectorId = randomUUID(); + DocWriteResponse resp = awaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); UpdateConnectorErrorAction.Request updateErrorRequest = new UpdateConnectorErrorAction.Request( - connector.getConnectorId(), + connectorId, randomAlphaOfLengthBetween(5, 15) ); DocWriteResponse updateResponse = awaitUpdateConnectorError(updateErrorRequest); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); - Connector indexedConnector = awaitGetConnector(connector.getConnectorId()); + Connector indexedConnector = awaitGetConnector(connectorId); assertThat(updateErrorRequest.getError(), equalTo(indexedConnector.getError())); } public void testUpdateConnectorNameOrDescription() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); - DocWriteResponse resp = awaitPutConnector(connector); + String connectorId = randomUUID(); + DocWriteResponse resp = awaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); UpdateConnectorNameAction.Request updateNameDescriptionRequest = new UpdateConnectorNameAction.Request( - connector.getConnectorId(), + connectorId, randomAlphaOfLengthBetween(5, 15), randomAlphaOfLengthBetween(5, 15) ); @@ -230,7 +248,7 @@ public void testUpdateConnectorNameOrDescription() throws Exception { DocWriteResponse updateResponse = awaitUpdateConnectorName(updateNameDescriptionRequest); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); - Connector indexedConnector = awaitGetConnector(connector.getConnectorId()); + Connector indexedConnector = awaitGetConnector(connectorId); assertThat(updateNameDescriptionRequest.getName(), equalTo(indexedConnector.getName())); assertThat(updateNameDescriptionRequest.getDescription(), equalTo(indexedConnector.getDescription())); } @@ -260,11 +278,11 @@ public void onFailure(Exception e) { return resp.get(); } - private DocWriteResponse awaitPutConnector(Connector connector) throws Exception { + private DocWriteResponse awaitPutConnector(String docId, Connector connector) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); final AtomicReference exc = new AtomicReference<>(null); - connectorIndexService.putConnector(connector, new ActionListener<>() { + connectorIndexService.putConnector(docId, connector, new ActionListener<>() { @Override public void onResponse(DocWriteResponse indexResponse) { resp.set(indexResponse); @@ -285,6 +303,31 @@ public void onFailure(Exception e) { return resp.get(); } + private PostConnectorAction.Response awaitPostConnector(Connector connector) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorIndexService.postConnector(connector, new ActionListener<>() { + @Override + public void onResponse(PostConnectorAction.Response indexResponse) { + resp.set(indexResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for post connector request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from post connector request", resp.get()); + return resp.get(); + } + private Connector awaitGetConnector(String connectorId) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java index 3488c7d9c8ba7..200b14109059b 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.application.connector; +import org.elasticsearch.xpack.application.connector.action.PostConnectorAction; import org.elasticsearch.xpack.application.connector.action.PutConnectorAction; import org.elasticsearch.xpack.application.connector.configuration.ConfigurationDependency; import org.elasticsearch.xpack.application.connector.configuration.ConfigurationDisplayType; @@ -52,6 +53,17 @@ public static PutConnectorAction.Request getRandomPutConnectorActionRequest() { ); } + public static PostConnectorAction.Request getRandomPostConnectorActionRequest() { + return new PostConnectorAction.Request( + randomFrom(randomAlphaOfLengthBetween(5, 15)), + randomFrom(randomAlphaOfLengthBetween(5, 15)), + randomFrom(randomBoolean()), + randomFrom(randomAlphaOfLengthBetween(5, 15)), + randomFrom(randomAlphaOfLengthBetween(5, 15)), + randomFrom(randomAlphaOfLengthBetween(5, 15)) + ); + } + public static ConnectorScheduling getRandomConnectorScheduling() { return new ConnectorScheduling.Builder().setAccessControl( new ConnectorScheduling.ScheduleConfig.Builder().setEnabled(randomBoolean()).setInterval(getRandomCronExpression()).build() @@ -226,8 +238,8 @@ public static Map getRandomConnectorConfiguratio } public static Connector getRandomConnector() { - return new Connector.Builder().setConnectorId(randomAlphaOfLength(10)) - .setApiKeyId(randomFrom(new String[] { null, randomAlphaOfLength(10) })) + + return new Connector.Builder().setApiKeyId(randomFrom(new String[] { null, randomAlphaOfLength(10) })) .setConfiguration(getRandomConnectorConfiguration()) .setCustomScheduling(Map.of(randomAlphaOfLengthBetween(5, 10), getRandomConnectorCustomSchedule())) .setDescription(randomFrom(new String[] { null, randomAlphaOfLength(10) })) diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java index 481f50bb41711..c08cd37218aeb 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java @@ -47,10 +47,10 @@ public final void testRandomSerialization() throws IOException { } public void testToXContent() throws IOException { + String connectorId = "test-connector"; String content = XContentHelper.stripWhitespace(""" { "api_key_id":"test", - "connector_id":"test-connector", "custom_scheduling":{ "schedule-key":{ "configuration_overrides":{ @@ -206,12 +206,12 @@ public void testToXContent() throws IOException { "sync_now":false }"""); - Connector connector = Connector.fromXContentBytes(new BytesArray(content), XContentType.JSON); + Connector connector = Connector.fromXContentBytes(new BytesArray(content), connectorId, XContentType.JSON); boolean humanReadable = true; BytesReference originalBytes = toShuffledXContent(connector, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable); Connector parsed; try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) { - parsed = Connector.fromXContent(parser); + parsed = Connector.fromXContent(parser, connectorId); } assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/GetConnectorActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/GetConnectorActionResponseBWCSerializingTests.java index bcb1bcc86402f..168e9ec8f433e 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/GetConnectorActionResponseBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/GetConnectorActionResponseBWCSerializingTests.java @@ -38,7 +38,7 @@ protected GetConnectorAction.Response mutateInstance(GetConnectorAction.Response @Override protected GetConnectorAction.Response doParseInstance(XContentParser parser) throws IOException { - return GetConnectorAction.Response.fromXContent(parser); + return GetConnectorAction.Response.fromXContent(parser, connector.getConnectorId()); } @Override diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..0587ef7da8654 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionRequestBWCSerializingTests.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class PostConnectorActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return PostConnectorAction.Request::new; + } + + @Override + protected PostConnectorAction.Request createTestInstance() { + return ConnectorTestUtils.getRandomPostConnectorActionRequest(); + } + + @Override + protected PostConnectorAction.Request mutateInstance(PostConnectorAction.Request instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected PostConnectorAction.Request doParseInstance(XContentParser parser) throws IOException { + return PostConnectorAction.Request.fromXContent(parser); + } + + @Override + protected PostConnectorAction.Request mutateInstanceForVersion(PostConnectorAction.Request instance, TransportVersion version) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..fbce905cb4771 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionResponseBWCSerializingTests.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class PostConnectorActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase { + @Override + protected Writeable.Reader instanceReader() { + return PostConnectorAction.Response::new; + } + + @Override + protected PostConnectorAction.Response createTestInstance() { + return new PostConnectorAction.Response(randomUUID()); + } + + @Override + protected PostConnectorAction.Response mutateInstance(PostConnectorAction.Response instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected PostConnectorAction.Response mutateInstanceForVersion(PostConnectorAction.Response instance, TransportVersion version) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java index ace1138b8e987..49a3f0c4ad043 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java @@ -50,7 +50,7 @@ public void testFromXContent_WithAllFields_AllSet() throws IOException { "canceled_at": "2023-12-01T14:19:39.394194Z", "completed_at": "2023-12-01T14:19:39.394194Z", "connector": { - "connector_id": "connector-id", + "id": "connector-id", "filtering": [ { "active": { @@ -162,7 +162,7 @@ public void testFromXContent_WithAllNonOptionalFieldsSet_DoesNotThrow() throws I String content = XContentHelper.stripWhitespace(""" { "connector": { - "connector_id": "connector-id", + "id": "connector-id", "filtering": [ { "active": { diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index a432f28f71e54..ccefd8ab6bdb7 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -126,6 +126,7 @@ public class Constants { "cluster:admin/xpack/connector/delete", "cluster:admin/xpack/connector/get", "cluster:admin/xpack/connector/list", + "cluster:admin/xpack/connector/post", "cluster:admin/xpack/connector/put", "cluster:admin/xpack/connector/update_configuration", "cluster:admin/xpack/connector/update_error", From 0b9487b269a68c404c7030094b9ac83af065b283 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 5 Dec 2023 14:15:03 +0000 Subject: [PATCH 164/181] Rethrow errors in S3HttpFixture (#102976) This fixture runs in a context that catches `Throwable` and quietly turns all errors into a `500 Internal Server Error`, which may not lead to a test failure. This commit rethrows any errors on another thread to ensure they lead to test failures. --- .../src/main/java/fixture/s3/S3HttpFixture.java | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixture.java b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixture.java index 539905b4a815f..5bca7523db4a4 100644 --- a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixture.java +++ b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixture.java @@ -11,6 +11,7 @@ import com.sun.net.httpserver.HttpHandler; import com.sun.net.httpserver.HttpServer; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.rest.RestStatus; import org.junit.rules.ExternalResource; @@ -48,12 +49,18 @@ protected HttpHandler createHandler() { return new S3HttpHandler(bucket, basePath) { @Override public void handle(final HttpExchange exchange) throws IOException { - final String authorization = exchange.getRequestHeaders().getFirst("Authorization"); - if (authorization == null || authorization.contains(accessKey) == false) { - sendError(exchange, RestStatus.FORBIDDEN, "AccessDenied", "Bad access key"); - return; + try { + final String authorization = exchange.getRequestHeaders().getFirst("Authorization"); + if (authorization == null || authorization.contains(accessKey) == false) { + sendError(exchange, RestStatus.FORBIDDEN, "AccessDenied", "Bad access key"); + return; + } + super.handle(exchange); + } catch (Error e) { + // HttpServer catches Throwable, so we must throw errors on another thread + ExceptionsHelper.maybeDieOnAnotherThread(e); + throw e; } - super.handle(exchange); } }; } From 9f9e60ad3f78935fc5686c5116830f6d873a2066 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 5 Dec 2023 17:22:34 +0100 Subject: [PATCH 165/181] Add RefCountAwareThreadedActionListener and use it for transport messages (#102989) The current ThreadedActionListener is not compatible with ref-counted response types. I created a version of it that correclty handles ref-couting and is otherwise a drop-in replacement for the ThreadedActionListener. Made use of this new listener in all spots where a ref-counted value is used (obviously all noop for now, but soon at least the search response won't be). part of #102030 --- .../TransportSnapshotsStatusAction.java | 4 +- .../AbstractThreadedActionListener.java | 76 +++++++++++++++++++ .../RefCountAwareThreadedActionListener.java | 52 +++++++++++++ .../support/ThreadedActionListener.java | 57 +------------- .../elasticsearch/indices/IndicesService.java | 4 +- .../retention/ExpiredForecastsRemover.java | 4 +- .../TransportGetStackTracesAction.java | 4 +- 7 files changed, 138 insertions(+), 63 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/action/support/AbstractThreadedActionListener.java create mode 100644 server/src/main/java/org/elasticsearch/action/support/RefCountAwareThreadedActionListener.java diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java index f8b9a9571ddd2..e1f1636781a08 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java @@ -12,7 +12,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.ThreadedActionListener; +import org.elasticsearch.action.support.RefCountAwareThreadedActionListener; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.ClusterState; @@ -144,7 +144,7 @@ protected void masterOperation( new TransportNodesSnapshotsStatus.Request(nodesIds.toArray(Strings.EMPTY_ARRAY)).snapshots(snapshots) .timeout(request.masterNodeTimeout()), // fork to snapshot meta since building the response is expensive for large snapshots - new ThreadedActionListener<>( + new RefCountAwareThreadedActionListener<>( threadPool.executor(ThreadPool.Names.SNAPSHOT_META), listener.delegateFailureAndWrap( (l, nodeSnapshotStatuses) -> buildResponse( diff --git a/server/src/main/java/org/elasticsearch/action/support/AbstractThreadedActionListener.java b/server/src/main/java/org/elasticsearch/action/support/AbstractThreadedActionListener.java new file mode 100644 index 0000000000000..54f01abc63833 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/support/AbstractThreadedActionListener.java @@ -0,0 +1,76 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.support; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; + +import java.util.concurrent.Executor; + +/** + * Base class for action listeners that wrap another action listener and dispatch its completion to an executor. + */ +public abstract class AbstractThreadedActionListener implements ActionListener { + + private static final Logger logger = LogManager.getLogger(AbstractThreadedActionListener.class); + + protected final Executor executor; + protected final ActionListener delegate; + protected final boolean forceExecution; + + protected AbstractThreadedActionListener(Executor executor, boolean forceExecution, ActionListener delegate) { + this.forceExecution = forceExecution; + this.executor = executor; + this.delegate = delegate; + } + + @Override + public final void onFailure(final Exception e) { + executor.execute(new AbstractRunnable() { + @Override + public boolean isForceExecution() { + return forceExecution; + } + + @Override + protected void doRun() { + delegate.onFailure(e); + } + + @Override + public void onRejection(Exception rejectionException) { + rejectionException.addSuppressed(e); + try { + delegate.onFailure(rejectionException); + } catch (Exception doubleFailure) { + rejectionException.addSuppressed(doubleFailure); + onFailure(rejectionException); + } + } + + @Override + public void onFailure(Exception e) { + logger.error(() -> "failed to execute failure callback on [" + AbstractThreadedActionListener.this + "]", e); + assert false : e; + } + + @Override + public String toString() { + return AbstractThreadedActionListener.this + "/onFailure"; + } + }); + } + + @Override + public final String toString() { + return getClass().getSimpleName() + "[" + executor + "/" + delegate + "]"; + } +} diff --git a/server/src/main/java/org/elasticsearch/action/support/RefCountAwareThreadedActionListener.java b/server/src/main/java/org/elasticsearch/action/support/RefCountAwareThreadedActionListener.java new file mode 100644 index 0000000000000..3b13f37d238a0 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/support/RefCountAwareThreadedActionListener.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.support; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRunnable; +import org.elasticsearch.core.RefCounted; + +import java.util.concurrent.Executor; + +/** + * Same as {@link ThreadedActionListener} but for {@link RefCounted} types. Makes sure to increment ref-count by one before forking + * to another thread and decrementing after the forked task completes. + */ +public final class RefCountAwareThreadedActionListener extends AbstractThreadedActionListener { + + public RefCountAwareThreadedActionListener(Executor executor, ActionListener delegate) { + super(executor, false, delegate); + } + + @Override + public void onResponse(final Response response) { + response.mustIncRef(); + executor.execute(new ActionRunnable<>(delegate) { + @Override + public boolean isForceExecution() { + return forceExecution; + } + + @Override + protected void doRun() { + listener.onResponse(response); + } + + @Override + public String toString() { + return RefCountAwareThreadedActionListener.this + "/onResponse"; + } + + @Override + public void onAfter() { + response.decRef(); + } + }); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java b/server/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java index 1a505bdf620ed..5c13d8f93746e 100644 --- a/server/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java +++ b/server/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java @@ -8,33 +8,22 @@ package org.elasticsearch.action.support; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; -import org.elasticsearch.common.util.concurrent.AbstractRunnable; import java.util.concurrent.Executor; /** * An action listener that wraps another action listener and dispatches its completion to an executor. */ -public final class ThreadedActionListener implements ActionListener { - - private static final Logger logger = LogManager.getLogger(ThreadedActionListener.class); - - private final Executor executor; - private final ActionListener delegate; - private final boolean forceExecution; +public final class ThreadedActionListener extends AbstractThreadedActionListener { public ThreadedActionListener(Executor executor, ActionListener delegate) { this(executor, false, delegate); } public ThreadedActionListener(Executor executor, boolean forceExecution, ActionListener delegate) { - this.forceExecution = forceExecution; - this.executor = executor; - this.delegate = delegate; + super(executor, forceExecution, delegate); } @Override @@ -56,46 +45,4 @@ public String toString() { } }); } - - @Override - public void onFailure(final Exception e) { - executor.execute(new AbstractRunnable() { - @Override - public boolean isForceExecution() { - return forceExecution; - } - - @Override - protected void doRun() { - delegate.onFailure(e); - } - - @Override - public void onRejection(Exception rejectionException) { - rejectionException.addSuppressed(e); - try { - delegate.onFailure(rejectionException); - } catch (Exception doubleFailure) { - rejectionException.addSuppressed(doubleFailure); - onFailure(rejectionException); - } - } - - @Override - public void onFailure(Exception e) { - logger.error(() -> "failed to execute failure callback on [" + ThreadedActionListener.this + "]", e); - assert false : e; - } - - @Override - public String toString() { - return ThreadedActionListener.this + "/onFailure"; - } - }); - } - - @Override - public String toString() { - return "ThreadedActionListener[" + executor + "/" + delegate + "]"; - } } diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 0faa66a9d21da..dbbf2bb98212a 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -27,7 +27,7 @@ import org.elasticsearch.action.admin.indices.stats.IndexShardStats; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.search.SearchType; -import org.elasticsearch.action.support.ThreadedActionListener; +import org.elasticsearch.action.support.RefCountAwareThreadedActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.DataStream; @@ -890,7 +890,7 @@ public void createShard( .source(mapping.source().string(), XContentType.JSON) .timeout(TimeValue.MAX_VALUE) .masterNodeTimeout(TimeValue.MAX_VALUE), - new ThreadedActionListener<>(threadPool.generic(), listener.map(ignored -> null)) + new RefCountAwareThreadedActionListener<>(threadPool.generic(), listener.map(ignored -> null)) ); }, this, clusterStateVersion); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java index 424668a20bf05..677e71b304cb9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java @@ -14,7 +14,7 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.TransportSearchAction; -import org.elasticsearch.action.support.ThreadedActionListener; +import org.elasticsearch.action.support.RefCountAwareThreadedActionListener; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -110,7 +110,7 @@ public void remove(float requestsPerSec, ActionListener listener, Boole client.execute( TransportSearchAction.TYPE, searchRequest, - new ThreadedActionListener<>(threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME), forecastStatsHandler) + new RefCountAwareThreadedActionListener<>(threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME), forecastStatsHandler) ); } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java index 801ed012de0ee..a51d8b509003a 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java @@ -14,7 +14,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.action.support.ThreadedActionListener; +import org.elasticsearch.action.support.RefCountAwareThreadedActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.client.internal.node.NodeClient; @@ -764,7 +764,7 @@ private void mget(Client client, List indices, List slice, Action client.prepareMultiGet() .addIds(index.getName(), slice) .setRealtime(realtime) - .execute(new ThreadedActionListener<>(responseExecutor, listener)); + .execute(new RefCountAwareThreadedActionListener<>(responseExecutor, listener)); } } From a71e4c51fdc9a93fceb3271a68c6be5075732d4a Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 5 Dec 2023 16:29:31 +0000 Subject: [PATCH 166/181] Measure embedding size and store in model config (#102939) When OpenAI or HF text embedding model is PUT, evaluate the model, measure the embedding size and add to the service_settings. For the OpenAI models the similarity measure is known for HF models it is unknown. 2 new fields are added to the `service_settings`: ``` "service_settings": { "similarity": "dot_product", "dimensions": 1536 }, ``` ### Example: Create an OpenAI text embedding model ``` PUT _inference/text_embedding/openai_embeddings { "service": "openai", "service_settings": { "api_key": "${API_KEY}" }, "task_settings": { "model": "text-embedding-ada-002" } } # Responds with { "model_id": "openai_embeddings", "task_type": "text_embedding", "service": "openai", "service_settings": { "similarity": "dot_product", "dimensions": 1536 }, "task_settings": { "model": "text-embedding-ada-002" } } ``` --- .../org/elasticsearch/TransportVersions.java | 1 + .../inference/InferenceService.java | 11 ++ .../TransportPutInferenceModelAction.java | 16 ++- .../inference/common/SimilarityMeasure.java | 24 ++++ .../inference/services/ServiceFields.java | 23 ++++ .../inference/services/ServiceUtils.java | 60 +++++++++ .../huggingface/HuggingFaceService.java | 30 +++++ .../HuggingFaceServiceSettings.java | 115 +++++++++++++++--- .../HuggingFaceElserServiceSettings.java | 9 +- .../HuggingFaceEmbeddingsModel.java | 4 + .../services/openai/OpenAiService.java | 37 ++++++ .../openai/OpenAiServiceSettings.java | 115 ++++++++++++++++-- .../embeddings/OpenAiEmbeddingsModel.java | 14 +++ .../HuggingFaceServiceSettingsTests.java | 76 +++++++++--- .../openai/OpenAiServiceSettingsTests.java | 68 ++++++++--- .../OpenAiEmbeddingsModelTests.java | 3 +- 16 files changed, 541 insertions(+), 65 deletions(-) create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/SimilarityMeasure.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceFields.java diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 369e1da237aa0..0b899a863e197 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -189,6 +189,7 @@ static TransportVersion def(int id) { public static final TransportVersion HEALTH_INFO_ENRICHED_WITH_DSL_STATUS = def(8_556_00_0); public static final TransportVersion SOURCE_IN_SINGLE_VALUE_QUERY_ADDED = def(8_557_00_0); public static final TransportVersion MISSED_INDICES_UPDATE_EXCEPTION_ADDED = def(8_558_00_0); + public static final TransportVersion INFERENCE_SERVICE_EMBEDDING_SIZE_ADDED = def(8_559_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java index 2f83310ea2388..b6ae21977e4bc 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java @@ -87,6 +87,17 @@ default void init(Client client) {} */ void start(Model model, ActionListener listener); + /** + * Optionally test the new model configuration in the inference service. + * This function should be called when the model is first created, the + * default action is to do nothing. + * @param model The new model + * @param listener The listener + */ + default void checkModelConfig(Model model, ActionListener listener) { + listener.onResponse(model); + }; + /** * Return true if this model is hosted in the local Elasticsearch cluster * @return True if in cluster diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java index f6bb90d701a4a..de561846a7a68 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java @@ -162,8 +162,20 @@ private void parseAndStoreModel( ActionListener listener ) { var model = service.parseRequestConfig(modelId, taskType, config, platformArchitectures); - // model is valid good to persist then start - this.modelRegistry.storeModel(model, ActionListener.wrap(r -> { startModel(service, model, listener); }, listener::onFailure)); + + service.checkModelConfig( + model, + ActionListener.wrap( + // model is valid good to persist then start + verifiedModel -> { + modelRegistry.storeModel( + verifiedModel, + ActionListener.wrap(r -> { startModel(service, verifiedModel, listener); }, listener::onFailure) + ); + }, + listener::onFailure + ) + ); } private static void startModel(InferenceService service, Model model, ActionListener listener) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/SimilarityMeasure.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/SimilarityMeasure.java new file mode 100644 index 0000000000000..3028ecd078597 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/SimilarityMeasure.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import java.util.Locale; + +public enum SimilarityMeasure { + COSINE, + DOT_PRODUCT; + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + + public static SimilarityMeasure fromString(String name) { + return valueOf(name.trim().toUpperCase(Locale.ROOT)); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceFields.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceFields.java new file mode 100644 index 0000000000000..80e6e4a6124ec --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceFields.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services; + +/** + * Common strings and definitions shared by service implementations + */ +public final class ServiceFields { + + public static final String SIMILARITY = "similarity"; + public static final String DIMENSIONS = "dimensions"; + public static final String MAX_INPUT_TOKENS = "max_input_tokens"; + public static final String URL = "url"; + + private ServiceFields() { + + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java index 597cd172ff661..6689229b35da2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java @@ -8,18 +8,25 @@ package org.elasticsearch.xpack.inference.services; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.core.Strings; +import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.inference.common.SimilarityMeasure; import java.net.URI; import java.net.URISyntaxException; +import java.util.List; import java.util.Map; import java.util.Objects; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.services.ServiceFields.SIMILARITY; public class ServiceUtils { /** @@ -133,6 +140,20 @@ public static SecureString extractRequiredSecureString( return new SecureString(Objects.requireNonNull(requiredField).toCharArray()); } + public static SimilarityMeasure extractSimilarity(Map map, String scope, ValidationException validationException) { + String similarity = extractOptionalString(map, SIMILARITY, scope, validationException); + + if (similarity != null) { + try { + return SimilarityMeasure.fromString(similarity); + } catch (IllegalArgumentException iae) { + validationException.addValidationError("[" + scope + "] Unknown similarity measure [" + similarity + "]"); + } + } + + return null; + } + public static String extractRequiredString( Map map, String settingName, @@ -187,4 +208,43 @@ public static ElasticsearchStatusException createInvalidModelException(Model mod RestStatus.INTERNAL_SERVER_ERROR ); } + + /** + * Evaluate the model and return the text embedding size + * @param model Should be a text embedding model + * @param service The inference service + * @param listener Size listener + */ + public static void getEmbeddingSize(Model model, InferenceService service, ActionListener listener) { + assert model.getTaskType() == TaskType.TEXT_EMBEDDING; + + service.infer(model, List.of(TEST_EMBEDDING_INPUT), Map.of(), ActionListener.wrap(r -> { + if (r instanceof TextEmbeddingResults embeddingResults) { + if (embeddingResults.embeddings().isEmpty()) { + listener.onFailure( + new ElasticsearchStatusException( + "Could not determine embedding size, no embeddings were returned in test call", + RestStatus.BAD_REQUEST + ) + ); + } else { + listener.onResponse(embeddingResults.embeddings().get(0).values().size()); + } + } else { + listener.onFailure( + new ElasticsearchStatusException( + "Could not determine embedding size. " + + "Expected a result of type [" + + TextEmbeddingResults.NAME + + "] got [" + + r.getWriteableName() + + "]", + RestStatus.BAD_REQUEST + ) + ); + } + }, listener::onFailure)); + } + + private static final String TEST_EMBEDDING_INPUT = "how big"; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java index 99e39f6f55912..dc189352c8fc4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java @@ -11,11 +11,14 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModel; import org.elasticsearch.xpack.inference.services.huggingface.embeddings.HuggingFaceEmbeddingsModel; @@ -43,6 +46,33 @@ protected HuggingFaceModel createModel( }; } + @Override + public void checkModelConfig(Model model, ActionListener listener) { + if (model instanceof HuggingFaceEmbeddingsModel embeddingsModel) { + ServiceUtils.getEmbeddingSize( + model, + this, + ActionListener.wrap( + size -> listener.onResponse(updateModelWithEmbeddingDetails(embeddingsModel, size)), + listener::onFailure + ) + ); + } else { + listener.onResponse(model); + } + } + + private static HuggingFaceEmbeddingsModel updateModelWithEmbeddingDetails(HuggingFaceEmbeddingsModel model, int embeddingSize) { + var serviceSettings = new HuggingFaceServiceSettings( + model.getServiceSettings().uri(), + null, // Similarity measure is unknown + embeddingSize, + null // max input tokens is unknown + ); + + return new HuggingFaceEmbeddingsModel(model, serviceSettings); + } + @Override public String name() { return NAME; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java index dc98990b1ef8c..6464ca0e0fda8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java @@ -12,47 +12,74 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.common.SimilarityMeasure; import java.io.IOException; import java.net.URI; import java.util.Map; import java.util.Objects; +import static org.elasticsearch.xpack.inference.services.ServiceFields.DIMENSIONS; +import static org.elasticsearch.xpack.inference.services.ServiceFields.MAX_INPUT_TOKENS; +import static org.elasticsearch.xpack.inference.services.ServiceFields.SIMILARITY; +import static org.elasticsearch.xpack.inference.services.ServiceFields.URL; import static org.elasticsearch.xpack.inference.services.ServiceUtils.convertToUri; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractSimilarity; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeAsType; -public record HuggingFaceServiceSettings(URI uri) implements ServiceSettings { +public class HuggingFaceServiceSettings implements ServiceSettings { public static final String NAME = "hugging_face_service_settings"; - static final String URL = "url"; - public static HuggingFaceServiceSettings fromMap(Map map) { - return new HuggingFaceServiceSettings(extractUri(map, URL)); - } - - public static URI extractUri(Map map, String fieldName) { ValidationException validationException = new ValidationException(); + var uri = extractUri(map, URL, validationException); + + SimilarityMeasure similarityMeasure = extractSimilarity(map, ModelConfigurations.SERVICE_SETTINGS, validationException); + Integer dims = removeAsType(map, DIMENSIONS, Integer.class); + Integer maxInputTokens = removeAsType(map, MAX_INPUT_TOKENS, Integer.class); - String parsedUrl = extractRequiredString(map, fieldName, ModelConfigurations.SERVICE_SETTINGS, validationException); if (validationException.validationErrors().isEmpty() == false) { throw validationException; } + return new HuggingFaceServiceSettings(uri, similarityMeasure, dims, maxInputTokens); + } - URI uri = convertToUri(parsedUrl, fieldName, ModelConfigurations.SERVICE_SETTINGS, validationException); - - if (validationException.validationErrors().isEmpty() == false) { - throw validationException; + public static URI extractUri(Map map, String fieldName, ValidationException validationException) { + String parsedUrl = extractRequiredString(map, fieldName, ModelConfigurations.SERVICE_SETTINGS, validationException); + if (parsedUrl == null) { + return null; } + return convertToUri(parsedUrl, fieldName, ModelConfigurations.SERVICE_SETTINGS, validationException); + } - return uri; + private final URI uri; + private final SimilarityMeasure similarity; + private final Integer dimensions; + private final Integer maxInputTokens; + + public HuggingFaceServiceSettings(URI uri) { + this.uri = Objects.requireNonNull(uri); + this.similarity = null; + this.dimensions = null; + this.maxInputTokens = null; } - public HuggingFaceServiceSettings { - Objects.requireNonNull(uri); + public HuggingFaceServiceSettings( + URI uri, + @Nullable SimilarityMeasure similarityMeasure, + @Nullable Integer dimensions, + @Nullable Integer maxInputTokens + ) { + this.uri = Objects.requireNonNull(uri); + this.similarity = similarityMeasure; + this.dimensions = dimensions; + this.maxInputTokens = maxInputTokens; } public HuggingFaceServiceSettings(String url) { @@ -60,15 +87,32 @@ public HuggingFaceServiceSettings(String url) { } public HuggingFaceServiceSettings(StreamInput in) throws IOException { - this(in.readString()); + this.uri = createUri(in.readString()); + if (in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_SERVICE_EMBEDDING_SIZE_ADDED)) { + similarity = in.readOptionalEnum(SimilarityMeasure.class); + dimensions = in.readOptionalVInt(); + maxInputTokens = in.readOptionalVInt(); + } else { + similarity = null; + dimensions = null; + maxInputTokens = null; + } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(URL, uri.toString()); + if (similarity != null) { + builder.field(SIMILARITY, similarity); + } + if (dimensions != null) { + builder.field(DIMENSIONS, dimensions); + } + if (maxInputTokens != null) { + builder.field(MAX_INPUT_TOKENS, maxInputTokens); + } builder.endObject(); - return builder; } @@ -85,5 +129,42 @@ public TransportVersion getMinimalSupportedVersion() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(uri.toString()); + if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_SERVICE_EMBEDDING_SIZE_ADDED)) { + out.writeOptionalEnum(similarity); + out.writeOptionalVInt(dimensions); + out.writeOptionalVInt(maxInputTokens); + } + } + + public URI uri() { + return uri; + } + + public SimilarityMeasure similarity() { + return similarity; + } + + public Integer dimensions() { + return dimensions; + } + + public Integer maxInputTokens() { + return maxInputTokens; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + HuggingFaceServiceSettings that = (HuggingFaceServiceSettings) o; + return Objects.equals(uri, that.uri) + && similarity == that.similarity + && Objects.equals(dimensions, that.dimensions) + && Objects.equals(maxInputTokens, that.maxInputTokens); + } + + @Override + public int hashCode() { + return Objects.hash(uri, similarity, dimensions, maxInputTokens); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java index dd185c4ca8385..777c55b56ff3a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java @@ -9,6 +9,7 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.inference.ServiceSettings; @@ -19,6 +20,7 @@ import java.util.Map; import java.util.Objects; +import static org.elasticsearch.xpack.inference.services.ServiceFields.URL; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; import static org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettings.extractUri; @@ -28,7 +30,12 @@ public record HuggingFaceElserServiceSettings(URI uri) implements ServiceSetting static final String URL = "url"; public static HuggingFaceElserServiceSettings fromMap(Map map) { - return new HuggingFaceElserServiceSettings(extractUri(map, URL)); + ValidationException validationException = new ValidationException(); + var uri = extractUri(map, URL, validationException); + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + return new HuggingFaceElserServiceSettings(uri); } public HuggingFaceElserServiceSettings { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java index 1f2e545a06901..ad9f09529de40 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java @@ -43,6 +43,10 @@ public HuggingFaceEmbeddingsModel( super(new ModelConfigurations(modelId, taskType, service, serviceSettings), new ModelSecrets(secrets)); } + public HuggingFaceEmbeddingsModel(HuggingFaceEmbeddingsModel model, HuggingFaceServiceSettings serviceSettings) { + this(model.getModelId(), model.getTaskType(), model.getConfigurations().getService(), serviceSettings, model.getSecretSettings()); + } + @Override public HuggingFaceServiceSettings getServiceSettings() { return (HuggingFaceServiceSettings) super.getServiceSettings(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java index acf7b84bfccb1..8a2f6295b41c8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java @@ -19,10 +19,12 @@ import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.inference.common.SimilarityMeasure; import org.elasticsearch.xpack.inference.external.action.openai.OpenAiActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; import org.elasticsearch.xpack.inference.services.SenderService; import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModel; import java.util.List; @@ -134,6 +136,41 @@ public void doInfer( action.execute(input, listener); } + /** + * For text embedding models get the embedding size and + * update the service settings. + * + * @param model The new model + * @param listener The listener + */ + @Override + public void checkModelConfig(Model model, ActionListener listener) { + if (model instanceof OpenAiEmbeddingsModel embeddingsModel) { + ServiceUtils.getEmbeddingSize( + model, + this, + ActionListener.wrap( + size -> listener.onResponse(updateModelWithEmbeddingDetails(embeddingsModel, size)), + listener::onFailure + ) + ); + } else { + listener.onResponse(model); + } + } + + private OpenAiEmbeddingsModel updateModelWithEmbeddingDetails(OpenAiEmbeddingsModel model, int embeddingSize) { + OpenAiServiceSettings serviceSettings = new OpenAiServiceSettings( + model.getServiceSettings().uri(), + model.getServiceSettings().organizationId(), + SimilarityMeasure.DOT_PRODUCT, + embeddingSize, + null + ); + + return new OpenAiEmbeddingsModel(model, serviceSettings); + } + @Override public TransportVersion getMinimalSupportedVersion() { return TransportVersions.ML_INFERENCE_OPENAI_ADDED; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettings.java index 6c7ff17e352d5..5ade2aad0acb4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettings.java @@ -16,24 +16,30 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.common.SimilarityMeasure; import java.io.IOException; import java.net.URI; import java.util.Map; +import java.util.Objects; +import static org.elasticsearch.xpack.inference.services.ServiceFields.DIMENSIONS; +import static org.elasticsearch.xpack.inference.services.ServiceFields.MAX_INPUT_TOKENS; +import static org.elasticsearch.xpack.inference.services.ServiceFields.SIMILARITY; +import static org.elasticsearch.xpack.inference.services.ServiceFields.URL; import static org.elasticsearch.xpack.inference.services.ServiceUtils.convertToUri; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractSimilarity; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeAsType; /** * Defines the base settings for interacting with OpenAI. - * @param uri an optional uri to override the openai url. This should only be used for testing. */ -public record OpenAiServiceSettings(@Nullable URI uri, @Nullable String organizationId) implements ServiceSettings { +public class OpenAiServiceSettings implements ServiceSettings { public static final String NAME = "openai_service_settings"; - public static final String URL = "url"; public static final String ORGANIZATION = "organization_id"; public static OpenAiServiceSettings fromMap(Map map) { @@ -41,15 +47,18 @@ public static OpenAiServiceSettings fromMap(Map map) { String url = extractOptionalString(map, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); String organizationId = extractOptionalString(map, ORGANIZATION, ModelConfigurations.SERVICE_SETTINGS, validationException); + SimilarityMeasure similarity = extractSimilarity(map, ModelConfigurations.SERVICE_SETTINGS, validationException); + Integer dims = removeAsType(map, DIMENSIONS, Integer.class); + Integer maxInputTokens = removeAsType(map, MAX_INPUT_TOKENS, Integer.class); - // Throw if any of the settings were empty strings + // Throw if any of the settings were empty strings or invalid if (validationException.validationErrors().isEmpty() == false) { throw validationException; } // the url is optional and only for testing if (url == null) { - return new OpenAiServiceSettings((URI) null, organizationId); + return new OpenAiServiceSettings((URI) null, organizationId, similarity, dims, maxInputTokens); } URI uri = convertToUri(url, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); @@ -58,11 +67,37 @@ public static OpenAiServiceSettings fromMap(Map map) { throw validationException; } - return new OpenAiServiceSettings(uri, organizationId); + return new OpenAiServiceSettings(uri, organizationId, similarity, dims, maxInputTokens); } - public OpenAiServiceSettings(@Nullable String url, @Nullable String organizationId) { - this(createOptionalUri(url), organizationId); + private final URI uri; + private final String organizationId; + private final SimilarityMeasure similarity; + private final Integer dimensions; + private final Integer maxInputTokens; + + public OpenAiServiceSettings( + @Nullable URI uri, + @Nullable String organizationId, + @Nullable SimilarityMeasure similarity, + @Nullable Integer dimensions, + @Nullable Integer maxInputTokens + ) { + this.uri = uri; + this.organizationId = organizationId; + this.similarity = similarity; + this.dimensions = dimensions; + this.maxInputTokens = maxInputTokens; + } + + public OpenAiServiceSettings( + @Nullable String uri, + @Nullable String organizationId, + @Nullable SimilarityMeasure similarity, + @Nullable Integer dimensions, + @Nullable Integer maxInputTokens + ) { + this(createOptionalUri(uri), organizationId, similarity, dimensions, maxInputTokens); } private static URI createOptionalUri(String url) { @@ -74,7 +109,37 @@ private static URI createOptionalUri(String url) { } public OpenAiServiceSettings(StreamInput in) throws IOException { - this(in.readOptionalString(), in.readOptionalString()); + uri = createOptionalUri(in.readOptionalString()); + organizationId = in.readOptionalString(); + if (in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_SERVICE_EMBEDDING_SIZE_ADDED)) { + similarity = in.readOptionalEnum(SimilarityMeasure.class); + dimensions = in.readOptionalVInt(); + maxInputTokens = in.readOptionalVInt(); + } else { + similarity = null; + dimensions = null; + maxInputTokens = null; + } + } + + public URI uri() { + return uri; + } + + public String organizationId() { + return organizationId; + } + + public SimilarityMeasure similarity() { + return similarity; + } + + public Integer dimensions() { + return dimensions; + } + + public Integer maxInputTokens() { + return maxInputTokens; } @Override @@ -89,10 +154,18 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (uri != null) { builder.field(URL, uri.toString()); } - if (organizationId != null) { builder.field(ORGANIZATION, organizationId); } + if (similarity != null) { + builder.field(SIMILARITY, similarity); + } + if (dimensions != null) { + builder.field(DIMENSIONS, dimensions); + } + if (maxInputTokens != null) { + builder.field(MAX_INPUT_TOKENS, maxInputTokens); + } builder.endObject(); return builder; @@ -108,5 +181,27 @@ public void writeTo(StreamOutput out) throws IOException { var uriToWrite = uri != null ? uri.toString() : null; out.writeOptionalString(uriToWrite); out.writeOptionalString(organizationId); + if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_SERVICE_EMBEDDING_SIZE_ADDED)) { + out.writeOptionalEnum(similarity); + out.writeOptionalVInt(dimensions); + out.writeOptionalVInt(maxInputTokens); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + OpenAiServiceSettings that = (OpenAiServiceSettings) o; + return Objects.equals(uri, that.uri) + && Objects.equals(organizationId, that.organizationId) + && Objects.equals(similarity, that.similarity) + && Objects.equals(dimensions, that.dimensions) + && Objects.equals(maxInputTokens, that.maxInputTokens); + } + + @Override + public int hashCode() { + return Objects.hash(uri, organizationId, similarity, dimensions, maxInputTokens); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java index 02c1e41e0374a..250837d895590 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java @@ -20,6 +20,7 @@ import java.util.Map; public class OpenAiEmbeddingsModel extends OpenAiModel { + public OpenAiEmbeddingsModel( String modelId, TaskType taskType, @@ -63,6 +64,19 @@ private OpenAiEmbeddingsModel(OpenAiEmbeddingsModel originalModel, OpenAiEmbeddi ); } + public OpenAiEmbeddingsModel(OpenAiEmbeddingsModel originalModel, OpenAiServiceSettings serviceSettings) { + super( + new ModelConfigurations( + originalModel.getConfigurations().getModelId(), + originalModel.getConfigurations().getTaskType(), + originalModel.getConfigurations().getService(), + serviceSettings, + originalModel.getTaskSettings() + ), + new ModelSecrets(originalModel.getSecretSettings()) + ); + } + @Override public OpenAiServiceSettings getServiceSettings() { return (OpenAiServiceSettings) super.getServiceSettings(); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java index 001d869f67a5c..7e2a333685321 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java @@ -11,6 +11,9 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.inference.common.SimilarityMeasure; +import org.elasticsearch.xpack.inference.services.ServiceFields; +import org.elasticsearch.xpack.inference.services.ServiceUtils; import java.io.IOException; import java.util.HashMap; @@ -22,14 +25,47 @@ public class HuggingFaceServiceSettingsTests extends AbstractWireSerializingTestCase { public static HuggingFaceServiceSettings createRandom() { - return new HuggingFaceServiceSettings(randomAlphaOfLength(15)); + return createRandom(randomAlphaOfLength(15)); + } + + private static HuggingFaceServiceSettings createRandom(String url) { + SimilarityMeasure similarityMeasure = null; + Integer dims = null; + var isTextEmbeddingModel = randomBoolean(); + if (isTextEmbeddingModel) { + similarityMeasure = randomFrom(SimilarityMeasure.values()); + dims = randomIntBetween(32, 256); + } + Integer maxInputTokens = randomBoolean() ? null : randomIntBetween(128, 256); + return new HuggingFaceServiceSettings(ServiceUtils.createUri(url), similarityMeasure, dims, maxInputTokens); } public void testFromMap() { var url = "https://www.abc.com"; - var serviceSettings = HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(HuggingFaceServiceSettings.URL, url))); - - assertThat(serviceSettings, is(new HuggingFaceServiceSettings(url))); + var similarity = SimilarityMeasure.DOT_PRODUCT; + var dims = 384; + var maxInputTokens = 128; + { + var serviceSettings = HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, url))); + assertThat(serviceSettings, is(new HuggingFaceServiceSettings(url))); + } + { + var serviceSettings = HuggingFaceServiceSettings.fromMap( + new HashMap<>( + Map.of( + ServiceFields.URL, + url, + ServiceFields.SIMILARITY, + similarity.toString(), + ServiceFields.DIMENSIONS, + dims, + ServiceFields.MAX_INPUT_TOKENS, + maxInputTokens + ) + ) + ); + assertThat(serviceSettings, is(new HuggingFaceServiceSettings(ServiceUtils.createUri(url), similarity, dims, maxInputTokens))); + } } public void testFromMap_MissingUrl_ThrowsError() { @@ -38,10 +74,7 @@ public void testFromMap_MissingUrl_ThrowsError() { assertThat( thrownException.getMessage(), containsString( - Strings.format( - "Validation Failed: 1: [service_settings] does not contain the required setting [%s];", - HuggingFaceServiceSettings.URL - ) + Strings.format("Validation Failed: 1: [service_settings] does not contain the required setting [%s];", ServiceFields.URL) ) ); } @@ -49,7 +82,7 @@ public void testFromMap_MissingUrl_ThrowsError() { public void testFromMap_EmptyUrl_ThrowsError() { var thrownException = expectThrows( ValidationException.class, - () -> HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(HuggingFaceServiceSettings.URL, ""))) + () -> HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, ""))) ); assertThat( @@ -57,7 +90,7 @@ public void testFromMap_EmptyUrl_ThrowsError() { containsString( Strings.format( "Validation Failed: 1: [service_settings] Invalid value empty string. [%s] must be a non-empty string;", - HuggingFaceServiceSettings.URL + ServiceFields.URL ) ) ); @@ -67,19 +100,24 @@ public void testFromMap_InvalidUrl_ThrowsError() { var url = "https://www.abc^.com"; var thrownException = expectThrows( ValidationException.class, - () -> HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(HuggingFaceServiceSettings.URL, url))) + () -> HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, url))) ); assertThat( thrownException.getMessage(), - is( - Strings.format( - "Validation Failed: 1: [service_settings] Invalid url [%s] received for field [%s];", - url, - HuggingFaceServiceSettings.URL - ) - ) + is(Strings.format("Validation Failed: 1: [service_settings] Invalid url [%s] received for field [%s];", url, ServiceFields.URL)) + ); + } + + public void testFromMap_InvalidSimilarity_ThrowsError() { + var url = "https://www.abc.com"; + var similarity = "by_size"; + var thrownException = expectThrows( + ValidationException.class, + () -> HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, url, ServiceFields.SIMILARITY, similarity))) ); + + assertThat(thrownException.getMessage(), is("Validation Failed: 1: [service_settings] Unknown similarity measure [by_size];")); } @Override @@ -100,7 +138,7 @@ protected HuggingFaceServiceSettings mutateInstance(HuggingFaceServiceSettings i public static Map getServiceSettingsMap(String url) { var map = new HashMap(); - map.put(HuggingFaceServiceSettings.URL, url); + map.put(ServiceFields.URL, url); return map; } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettingsTests.java index 9e20286c1d0ff..81bbb4b041c51 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettingsTests.java @@ -12,6 +12,9 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.inference.common.SimilarityMeasure; +import org.elasticsearch.xpack.inference.services.ServiceFields; +import org.elasticsearch.xpack.inference.services.ServiceUtils; import java.io.IOException; import java.util.HashMap; @@ -23,7 +26,7 @@ public class OpenAiServiceSettingsTests extends AbstractWireSerializingTestCase { public static OpenAiServiceSettings createRandomWithNonNullUrl() { - return new OpenAiServiceSettings(randomAlphaOfLength(15), randomAlphaOfLength(15)); + return createRandom(randomAlphaOfLength(15)); } /** @@ -31,18 +34,49 @@ public static OpenAiServiceSettings createRandomWithNonNullUrl() { */ public static OpenAiServiceSettings createRandom() { var url = randomBoolean() ? randomAlphaOfLength(15) : null; + return createRandom(url); + } + + private static OpenAiServiceSettings createRandom(String url) { var organizationId = randomBoolean() ? randomAlphaOfLength(15) : null; - return new OpenAiServiceSettings(url, organizationId); + SimilarityMeasure similarityMeasure = null; + Integer dims = null; + var isTextEmbeddingModel = randomBoolean(); + if (isTextEmbeddingModel) { + similarityMeasure = SimilarityMeasure.DOT_PRODUCT; + dims = 1536; + } + Integer maxInputTokens = randomBoolean() ? null : randomIntBetween(128, 256); + return new OpenAiServiceSettings(ServiceUtils.createUri(url), organizationId, similarityMeasure, dims, maxInputTokens); } public void testFromMap() { var url = "https://www.abc.com"; var org = "organization"; + var similarity = SimilarityMeasure.DOT_PRODUCT.toString(); + var dims = 1536; + var maxInputTokens = 512; var serviceSettings = OpenAiServiceSettings.fromMap( - new HashMap<>(Map.of(OpenAiServiceSettings.URL, url, OpenAiServiceSettings.ORGANIZATION, org)) + new HashMap<>( + Map.of( + ServiceFields.URL, + url, + OpenAiServiceSettings.ORGANIZATION, + org, + ServiceFields.SIMILARITY, + similarity, + ServiceFields.DIMENSIONS, + dims, + ServiceFields.MAX_INPUT_TOKENS, + maxInputTokens + ) + ) ); - assertThat(serviceSettings, is(new OpenAiServiceSettings(url, org))); + assertThat( + serviceSettings, + is(new OpenAiServiceSettings(ServiceUtils.createUri(url), org, SimilarityMeasure.DOT_PRODUCT, dims, maxInputTokens)) + ); } public void testFromMap_MissingUrl_DoesNotThrowException() { @@ -54,7 +88,7 @@ public void testFromMap_MissingUrl_DoesNotThrowException() { public void testFromMap_EmptyUrl_ThrowsError() { var thrownException = expectThrows( ValidationException.class, - () -> OpenAiServiceSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceSettings.URL, ""))) + () -> OpenAiServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, ""))) ); assertThat( @@ -62,7 +96,7 @@ public void testFromMap_EmptyUrl_ThrowsError() { containsString( Strings.format( "Validation Failed: 1: [service_settings] Invalid value empty string. [%s] must be a non-empty string;", - OpenAiServiceSettings.URL + ServiceFields.URL ) ) ); @@ -95,21 +129,25 @@ public void testFromMap_InvalidUrl_ThrowsError() { var url = "https://www.abc^.com"; var thrownException = expectThrows( ValidationException.class, - () -> OpenAiServiceSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceSettings.URL, url))) + () -> OpenAiServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, url))) ); assertThat( thrownException.getMessage(), - is( - Strings.format( - "Validation Failed: 1: [service_settings] Invalid url [%s] received for field [%s];", - url, - OpenAiServiceSettings.URL - ) - ) + is(Strings.format("Validation Failed: 1: [service_settings] Invalid url [%s] received for field [%s];", url, ServiceFields.URL)) ); } + public void testFromMap_InvalidSimilarity_ThrowsError() { + var similarity = "by_size"; + var thrownException = expectThrows( + ValidationException.class, + () -> OpenAiServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.SIMILARITY, similarity))) + ); + + assertThat(thrownException.getMessage(), is("Validation Failed: 1: [service_settings] Unknown similarity measure [by_size];")); + } + @Override protected Writeable.Reader instanceReader() { return OpenAiServiceSettings::new; @@ -129,7 +167,7 @@ public static Map getServiceSettingsMap(@Nullable String url, @N var map = new HashMap(); if (url != null) { - map.put(OpenAiServiceSettings.URL, url); + map.put(ServiceFields.URL, url); } if (org != null) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java index 62cb609a59d2a..302aacdc30606 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.common.SimilarityMeasure; import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceSettings; import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; @@ -58,7 +59,7 @@ public static OpenAiEmbeddingsModel createModel( "id", TaskType.TEXT_EMBEDDING, "service", - new OpenAiServiceSettings(url, org), + new OpenAiServiceSettings(url, org, SimilarityMeasure.DOT_PRODUCT, 1536, null), new OpenAiEmbeddingsTaskSettings(modelName, user), new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) ); From b7344575df3d4cb13df720308821fc6d1e057891 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 5 Dec 2023 08:35:32 -0800 Subject: [PATCH 167/181] Introduce local block factory (#102901) Requesting and returning memory from a CircuitBreaker can be costly due to the involvement of read/write on one or several atomic longs. To address this issue, the local breaker adopts a strategy of over-requesting memory, utilizing the reserved amount for subsequent memory requests without direct access to the actual breaker. Before passing a Block to another Driver, it is necessary to switch the owning block factory to its parent, which is associated with the global breaker. This is done to bypass the local breaker when releasing memory, as the releasing thread can be any thread, not necessarily the one executing the Driver. There are two specific operators that need to change the owning block factory: SinkOperator (superset of ExchangeSinkOperator), which is the last operator of a Driver, and AsyncOperator, which can be responded by any thread in response. The optimization reduces the latency of the enrich operation in the nyc_taxis benchmark from 100ms to 50ms. When combined with #102902, it further reduces the latency to below 40ms, better than the previous performance before the regression. Relates #102625 --- docs/changelog/102901.yaml | 5 + .../compute/data/BooleanArrayBlock.java | 6 +- .../compute/data/BooleanArrayVector.java | 2 +- .../compute/data/BooleanBigArrayVector.java | 1 + .../compute/data/BooleanVectorBlock.java | 5 + .../compute/data/BytesRefArrayBlock.java | 6 +- .../compute/data/BytesRefArrayVector.java | 4 +- .../compute/data/BytesRefVectorBlock.java | 5 + .../compute/data/ConstantBooleanVector.java | 2 +- .../compute/data/ConstantBytesRefVector.java | 2 +- .../compute/data/ConstantDoubleVector.java | 2 +- .../compute/data/ConstantIntVector.java | 2 +- .../compute/data/ConstantLongVector.java | 2 +- .../compute/data/DoubleArrayBlock.java | 6 +- .../compute/data/DoubleArrayVector.java | 2 +- .../compute/data/DoubleBigArrayVector.java | 1 + .../compute/data/DoubleVectorBlock.java | 5 + .../compute/data/IntArrayBlock.java | 6 +- .../compute/data/IntArrayVector.java | 2 +- .../compute/data/IntBigArrayVector.java | 1 + .../compute/data/IntVectorBlock.java | 5 + .../compute/data/LongArrayBlock.java | 6 +- .../compute/data/LongArrayVector.java | 2 +- .../compute/data/LongBigArrayVector.java | 1 + .../compute/data/LongVectorBlock.java | 5 + .../compute/data/AbstractBlock.java | 7 +- .../compute/data/AbstractVector.java | 7 +- .../org/elasticsearch/compute/data/Block.java | 11 ++ .../compute/data/BlockFactory.java | 23 +++ .../compute/data/ConstantNullBlock.java | 4 +- .../elasticsearch/compute/data/DocBlock.java | 5 + .../elasticsearch/compute/data/DocVector.java | 7 + .../compute/data/LocalCircuitBreaker.java | 137 ++++++++++++++++++ .../org/elasticsearch/compute/data/Page.java | 12 +- .../elasticsearch/compute/data/Vector.java | 9 ++ .../compute/data/X-ArrayBlock.java.st | 8 +- .../compute/data/X-ArrayVector.java.st | 4 +- .../compute/data/X-BigArrayVector.java.st | 1 + .../compute/data/X-ConstantVector.java.st | 2 +- .../compute/data/X-VectorBlock.java.st | 5 + .../compute/operator/AsyncOperator.java | 10 +- .../compute/operator/OutputOperator.java | 2 +- .../operator/PageConsumerOperator.java | 2 +- .../compute/operator/SinkOperator.java | 9 ++ .../exchange/ExchangeSinkOperator.java | 5 +- .../compute/data/BlockFactoryTests.java | 80 ++++++++++ .../data/LocalCircuitBreakerTests.java | 121 ++++++++++++++++ .../compute/data/MockBlockFactory.java | 14 +- .../compute/operator/AsyncOperatorTests.java | 38 ++++- .../exchange/ExchangeServiceTests.java | 2 +- .../action/AbstractEsqlIntegTestCase.java | 11 ++ .../xpack/esql/action/EnrichIT.java | 3 + .../esql/action/EsqlActionBreakerIT.java | 3 + .../esql/enrich/EnrichLookupService.java | 31 +++- .../esql/planner/LocalExecutionPlanner.java | 46 ++++-- .../xpack/esql/plugin/ComputeService.java | 5 + .../esql/plugin/TransportEsqlQueryAction.java | 1 + .../elasticsearch/xpack/esql/CsvTests.java | 12 ++ .../planner/LocalExecutionPlannerTests.java | 1 + 59 files changed, 651 insertions(+), 73 deletions(-) create mode 100644 docs/changelog/102901.yaml create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/LocalCircuitBreaker.java create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LocalCircuitBreakerTests.java diff --git a/docs/changelog/102901.yaml b/docs/changelog/102901.yaml new file mode 100644 index 0000000000000..ac417691b525c --- /dev/null +++ b/docs/changelog/102901.yaml @@ -0,0 +1,5 @@ +pr: 102901 +summary: Introduce local block factory +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java index 844a8bc1b7290..7c2723163197a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java @@ -50,7 +50,7 @@ public boolean getBoolean(int valueIndex) { @Override public BooleanBlock filter(int... positions) { - try (var builder = blockFactory.newBooleanBlockBuilder(positions.length)) { + try (var builder = blockFactory().newBooleanBlockBuilder(positions.length)) { for (int pos : positions) { if (isNull(pos)) { builder.appendNull(); @@ -84,7 +84,7 @@ public BooleanBlock expand() { return this; } // TODO use reference counting to share the values - try (var builder = blockFactory.newBooleanBlockBuilder(firstValueIndexes[getPositionCount()])) { + try (var builder = blockFactory().newBooleanBlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { if (isNull(pos)) { builder.appendNull(); @@ -137,6 +137,6 @@ public String toString() { @Override public void closeInternal() { - blockFactory.adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java index 8ad4196c57997..5aa8724eb0ca2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java @@ -55,7 +55,7 @@ public boolean isConstant() { @Override public BooleanVector filter(int... positions) { - try (BooleanVector.Builder builder = blockFactory.newBooleanVectorBuilder(positions.length)) { + try (BooleanVector.Builder builder = blockFactory().newBooleanVectorBuilder(positions.length)) { for (int pos : positions) { builder.appendBoolean(values[pos]); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java index 5ad88ab1ac6e9..2621ec612944e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java @@ -60,6 +60,7 @@ public long ramBytesUsed() { @Override public BooleanVector filter(int... positions) { + var blockFactory = blockFactory(); final BitArray filtered = new BitArray(positions.length, blockFactory.bigArrays()); for (int i = 0; i < positions.length; i++) { if (values.get(positions[i])) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java index c5c3a24736c16..19c551d85617f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java @@ -83,4 +83,9 @@ public void closeInternal() { assert (vector.isReleased() == false) : "can't release block [" + this + "] containing already released vector"; Releasables.closeExpectNoException(vector); } + + @Override + public void allowPassingToDifferentDriver() { + vector.allowPassingToDifferentDriver(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java index 2ae412b3867a0..34d4e5aaa43e2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java @@ -53,7 +53,7 @@ public BytesRef getBytesRef(int valueIndex, BytesRef dest) { @Override public BytesRefBlock filter(int... positions) { final BytesRef scratch = new BytesRef(); - try (var builder = blockFactory.newBytesRefBlockBuilder(positions.length)) { + try (var builder = blockFactory().newBytesRefBlockBuilder(positions.length)) { for (int pos : positions) { if (isNull(pos)) { builder.appendNull(); @@ -88,7 +88,7 @@ public BytesRefBlock expand() { } // TODO use reference counting to share the values final BytesRef scratch = new BytesRef(); - try (var builder = blockFactory.newBytesRefBlockBuilder(firstValueIndexes[getPositionCount()])) { + try (var builder = blockFactory().newBytesRefBlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { if (isNull(pos)) { builder.appendNull(); @@ -141,7 +141,7 @@ public String toString() { @Override public void closeInternal() { - blockFactory.adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); Releasables.closeExpectNoException(values); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java index 02ab9a09b15e1..a8bb60f9f20fa 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java @@ -57,7 +57,7 @@ public boolean isConstant() { @Override public BytesRefVector filter(int... positions) { final var scratch = new BytesRef(); - try (BytesRefVector.Builder builder = blockFactory.newBytesRefVectorBuilder(positions.length)) { + try (BytesRefVector.Builder builder = blockFactory().newBytesRefVectorBuilder(positions.length)) { for (int pos : positions) { builder.appendBytesRef(values.get(pos, scratch)); } @@ -98,7 +98,7 @@ public void close() { throw new IllegalStateException("can't release already released vector [" + this + "]"); } released = true; - blockFactory.adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); Releasables.closeExpectNoException(values); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java index d8c2c615a3dfb..e834a1c171e49 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java @@ -84,4 +84,9 @@ public void closeInternal() { assert (vector.isReleased() == false) : "can't release block [" + this + "] containing already released vector"; Releasables.closeExpectNoException(vector); } + + @Override + public void allowPassingToDifferentDriver() { + vector.allowPassingToDifferentDriver(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java index e4f6e6f144abe..b636d89a206e0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java @@ -84,6 +84,6 @@ public void close() { throw new IllegalStateException("can't release already released vector [" + this + "]"); } released = true; - blockFactory.adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java index f5f6e7945d03b..be34db592b228 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java @@ -89,6 +89,6 @@ public void close() { throw new IllegalStateException("can't release already released vector [" + this + "]"); } released = true; - blockFactory.adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java index 05e71f3853155..f6cce49aa3d42 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java @@ -84,6 +84,6 @@ public void close() { throw new IllegalStateException("can't release already released vector [" + this + "]"); } released = true; - blockFactory.adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java index 3f1eb45843c66..fa7b9223d5107 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java @@ -84,6 +84,6 @@ public void close() { throw new IllegalStateException("can't release already released vector [" + this + "]"); } released = true; - blockFactory.adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java index 08840c3772a9e..21d4d81dfd193 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java @@ -84,6 +84,6 @@ public void close() { throw new IllegalStateException("can't release already released vector [" + this + "]"); } released = true; - blockFactory.adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java index a8dbb5ba1d963..db3546c73c054 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java @@ -50,7 +50,7 @@ public double getDouble(int valueIndex) { @Override public DoubleBlock filter(int... positions) { - try (var builder = blockFactory.newDoubleBlockBuilder(positions.length)) { + try (var builder = blockFactory().newDoubleBlockBuilder(positions.length)) { for (int pos : positions) { if (isNull(pos)) { builder.appendNull(); @@ -84,7 +84,7 @@ public DoubleBlock expand() { return this; } // TODO use reference counting to share the values - try (var builder = blockFactory.newDoubleBlockBuilder(firstValueIndexes[getPositionCount()])) { + try (var builder = blockFactory().newDoubleBlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { if (isNull(pos)) { builder.appendNull(); @@ -137,6 +137,6 @@ public String toString() { @Override public void closeInternal() { - blockFactory.adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java index 69cf686a1576a..08e51b0e313d8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java @@ -55,7 +55,7 @@ public boolean isConstant() { @Override public DoubleVector filter(int... positions) { - try (DoubleVector.Builder builder = blockFactory.newDoubleVectorBuilder(positions.length)) { + try (DoubleVector.Builder builder = blockFactory().newDoubleVectorBuilder(positions.length)) { for (int pos : positions) { builder.appendDouble(values[pos]); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java index d50e8adbbd37d..476b94ad3fa05 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java @@ -60,6 +60,7 @@ public long ramBytesUsed() { @Override public DoubleVector filter(int... positions) { + var blockFactory = blockFactory(); final DoubleArray filtered = blockFactory.bigArrays().newDoubleArray(positions.length, true); for (int i = 0; i < positions.length; i++) { filtered.set(i, values.get(positions[i])); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java index ac4c826b5f2d2..62319e9c100cb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java @@ -83,4 +83,9 @@ public void closeInternal() { assert (vector.isReleased() == false) : "can't release block [" + this + "] containing already released vector"; Releasables.closeExpectNoException(vector); } + + @Override + public void allowPassingToDifferentDriver() { + vector.allowPassingToDifferentDriver(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java index bc41b83eca375..111fc0c757af1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java @@ -50,7 +50,7 @@ public int getInt(int valueIndex) { @Override public IntBlock filter(int... positions) { - try (var builder = blockFactory.newIntBlockBuilder(positions.length)) { + try (var builder = blockFactory().newIntBlockBuilder(positions.length)) { for (int pos : positions) { if (isNull(pos)) { builder.appendNull(); @@ -84,7 +84,7 @@ public IntBlock expand() { return this; } // TODO use reference counting to share the values - try (var builder = blockFactory.newIntBlockBuilder(firstValueIndexes[getPositionCount()])) { + try (var builder = blockFactory().newIntBlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { if (isNull(pos)) { builder.appendNull(); @@ -137,6 +137,6 @@ public String toString() { @Override public void closeInternal() { - blockFactory.adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java index 90766a9a67d81..9c8c27efa0806 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java @@ -55,7 +55,7 @@ public boolean isConstant() { @Override public IntVector filter(int... positions) { - try (IntVector.Builder builder = blockFactory.newIntVectorBuilder(positions.length)) { + try (IntVector.Builder builder = blockFactory().newIntVectorBuilder(positions.length)) { for (int pos : positions) { builder.appendInt(values[pos]); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java index 3bb9461300ee4..76d2797f2a64b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java @@ -60,6 +60,7 @@ public long ramBytesUsed() { @Override public IntVector filter(int... positions) { + var blockFactory = blockFactory(); final IntArray filtered = blockFactory.bigArrays().newIntArray(positions.length, true); for (int i = 0; i < positions.length; i++) { filtered.set(i, values.get(positions[i])); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java index 60280ebb13064..ccc242dd1a573 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java @@ -83,4 +83,9 @@ public void closeInternal() { assert (vector.isReleased() == false) : "can't release block [" + this + "] containing already released vector"; Releasables.closeExpectNoException(vector); } + + @Override + public void allowPassingToDifferentDriver() { + vector.allowPassingToDifferentDriver(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java index be10a517b7df0..9e0fa9bcc2993 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java @@ -50,7 +50,7 @@ public long getLong(int valueIndex) { @Override public LongBlock filter(int... positions) { - try (var builder = blockFactory.newLongBlockBuilder(positions.length)) { + try (var builder = blockFactory().newLongBlockBuilder(positions.length)) { for (int pos : positions) { if (isNull(pos)) { builder.appendNull(); @@ -84,7 +84,7 @@ public LongBlock expand() { return this; } // TODO use reference counting to share the values - try (var builder = blockFactory.newLongBlockBuilder(firstValueIndexes[getPositionCount()])) { + try (var builder = blockFactory().newLongBlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { if (isNull(pos)) { builder.appendNull(); @@ -137,6 +137,6 @@ public String toString() { @Override public void closeInternal() { - blockFactory.adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java index b476556ce27fa..0a3ada321d94c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java @@ -55,7 +55,7 @@ public boolean isConstant() { @Override public LongVector filter(int... positions) { - try (LongVector.Builder builder = blockFactory.newLongVectorBuilder(positions.length)) { + try (LongVector.Builder builder = blockFactory().newLongVectorBuilder(positions.length)) { for (int pos : positions) { builder.appendLong(values[pos]); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java index ccf4a6944b60e..2101b606e9a90 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java @@ -60,6 +60,7 @@ public long ramBytesUsed() { @Override public LongVector filter(int... positions) { + var blockFactory = blockFactory(); final LongArray filtered = blockFactory.bigArrays().newLongArray(positions.length, true); for (int i = 0; i < positions.length; i++) { filtered.set(i, values.get(positions[i])); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java index c9b65ba3e9029..94697b3136fce 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java @@ -83,4 +83,9 @@ public void closeInternal() { assert (vector.isReleased() == false) : "can't release block [" + this + "] containing already released vector"; Releasables.closeExpectNoException(vector); } + + @Override + public void allowPassingToDifferentDriver() { + vector.allowPassingToDifferentDriver(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlock.java index 39f17cfecab1a..177e3fb6798d1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlock.java @@ -21,7 +21,7 @@ abstract class AbstractBlock implements Block { @Nullable protected final BitSet nullsMask; - protected final BlockFactory blockFactory; + private BlockFactory blockFactory; /** * @param positionCount the number of values in this block @@ -95,6 +95,11 @@ public BlockFactory blockFactory() { return blockFactory; } + @Override + public void allowPassingToDifferentDriver() { + blockFactory = blockFactory.parent(); + } + @Override public boolean isReleased() { return hasReferences() == false; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java index d7bda952bdcd0..33ef14cfb4ad8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java @@ -13,7 +13,7 @@ abstract class AbstractVector implements Vector { private final int positionCount; - protected final BlockFactory blockFactory; + private BlockFactory blockFactory; protected boolean released; protected AbstractVector(int positionCount, BlockFactory blockFactory) { @@ -35,6 +35,11 @@ public BlockFactory blockFactory() { return blockFactory; } + @Override + public void allowPassingToDifferentDriver() { + blockFactory = blockFactory.parent(); + } + @Override public void close() { if (released) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index 1d8c548d90571..964e510de9a20 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -62,8 +62,17 @@ public interface Block extends Accountable, BlockLoader.Block, NamedWriteable, R ElementType elementType(); /** The block factory associated with this block. */ + // TODO: renaming this to owning blockFactory once we pass blockFactory for filter and expand BlockFactory blockFactory(); + /** + * Before passing a Block to another Driver, it is necessary to switch the owning block factory to its parent, which is associated + * with the global circuit breaker. This ensures that when the new driver releases this Block, it returns memory directly to the + * parent block factory instead of the local block factory of this Block. This is important because the local block factory is + * not thread safe and doesn't support simultaneous access by more than one thread. + */ + void allowPassingToDifferentDriver(); + /** * Tells if this block has been released. A block is released by calling its {@link Block#close()} or {@link Block#decRef()} methods. * @return true iff the block's reference count is zero. @@ -102,6 +111,7 @@ public interface Block extends Accountable, BlockLoader.Block, NamedWriteable, R * The new block may hold a reference to this block, increasing this block's reference count. * @param positions the positions to retain * @return a filtered block + * TODO: pass BlockFactory */ Block filter(int... positions); @@ -145,6 +155,7 @@ default boolean mvSortedAscending() { /** * Expand multivalued fields into one row per value. Returns the same block if there aren't any multivalued * fields to expand. The returned block needs to be closed by the caller to release the block's resources. + * TODO: pass BlockFactory */ Block expand(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java index 88616e7fc95fd..092f66a7d4427 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.breaker.NoopCircuitBreaker; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.Block.MvOrdering; @@ -18,6 +19,11 @@ import java.util.BitSet; public class BlockFactory { + public static final String LOCAL_BREAKER_OVER_RESERVED_SIZE_SETTING = "esql.block_factory.local_breaker.over_reserved"; + public static final ByteSizeValue LOCAL_BREAKER_OVER_RESERVED_DEFAULT_SIZE = ByteSizeValue.ofKb(4); + + public static final String LOCAL_BREAKER_OVER_RESERVED_MAX_SIZE_SETTING = "esql.block_factory.local_breaker.max_over_reserved"; + public static final ByteSizeValue LOCAL_BREAKER_OVER_RESERVED_DEFAULT_MAX_SIZE = ByteSizeValue.ofKb(16); private static final BlockFactory NON_BREAKING = BlockFactory.getInstance( new NoopCircuitBreaker("noop-esql-breaker"), @@ -27,10 +33,16 @@ public class BlockFactory { private final CircuitBreaker breaker; private final BigArrays bigArrays; + private final BlockFactory parent; public BlockFactory(CircuitBreaker breaker, BigArrays bigArrays) { + this(breaker, bigArrays, null); + } + + protected BlockFactory(CircuitBreaker breaker, BigArrays bigArrays, BlockFactory parent) { this.breaker = breaker; this.bigArrays = bigArrays; + this.parent = parent; } /** @@ -54,6 +66,17 @@ public BigArrays bigArrays() { return bigArrays; } + protected BlockFactory parent() { + return parent != null ? parent : this; + } + + public BlockFactory newChildFactory(LocalCircuitBreaker childBreaker) { + if (childBreaker.parentBreaker() != breaker) { + throw new IllegalStateException("Different parent breaker"); + } + return new BlockFactory(childBreaker, bigArrays, this); + } + /** * Adjust the circuit breaker with the given delta, if the delta is negative, the breaker will * be adjusted without tripping. If the data was already created before calling this method, diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java index 03cfa2d940efd..29e39f43cddc2 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java @@ -69,7 +69,7 @@ public ElementType elementType() { @Override public ConstantNullBlock filter(int... positions) { - return (ConstantNullBlock) blockFactory.newConstantNullBlock(positions.length); + return (ConstantNullBlock) blockFactory().newConstantNullBlock(positions.length); } public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( @@ -128,7 +128,7 @@ public String toString() { @Override public void closeInternal() { - blockFactory.adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed(), true); } static class Builder implements Block.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java index 9dc27196bd128..d45314f5c8a78 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java @@ -167,4 +167,9 @@ public void close() { Releasables.closeExpectNoException(shards, segments, docs); } } + + @Override + public void allowPassingToDifferentDriver() { + vector.allowPassingToDifferentDriver(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java index 24c656404e89f..3097dc73fb814 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java @@ -223,6 +223,13 @@ public long ramBytesUsed() { return ramBytesEstimated(shards, segments, docs, shardSegmentDocMapForwards, shardSegmentDocMapBackwards); } + @Override + public void allowPassingToDifferentDriver() { + shards.allowPassingToDifferentDriver(); + segments.allowPassingToDifferentDriver(); + docs.allowPassingToDifferentDriver(); + } + @Override public void close() { released = true; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/LocalCircuitBreaker.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/LocalCircuitBreaker.java new file mode 100644 index 0000000000000..4d134963f12e7 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/LocalCircuitBreaker.java @@ -0,0 +1,137 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Releasable; + +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * Requesting and returning memory from a {@link CircuitBreaker} can be costly due to the involvement of read/write + * on one or several atomic longs. To address this issue, the local breaker adopts a strategy of over-requesting memory, + * utilizing the reserved amount for subsequent memory requests without direct access to the actual breaker. + * + * @see BlockFactory#newChildFactory(LocalCircuitBreaker) + * @see Block#allowPassingToDifferentDriver() + */ +public final class LocalCircuitBreaker implements CircuitBreaker, Releasable { + private final CircuitBreaker breaker; + private final long overReservedBytes; + private final long maxOverReservedBytes; + private long reservedBytes; + private final AtomicBoolean closed = new AtomicBoolean(false); + + public record SizeSettings(long overReservedBytes, long maxOverReservedBytes) { + public SizeSettings(Settings settings) { + this( + settings.getAsBytesSize( + BlockFactory.LOCAL_BREAKER_OVER_RESERVED_SIZE_SETTING, + BlockFactory.LOCAL_BREAKER_OVER_RESERVED_DEFAULT_SIZE + ).getBytes(), + settings.getAsBytesSize( + BlockFactory.LOCAL_BREAKER_OVER_RESERVED_MAX_SIZE_SETTING, + BlockFactory.LOCAL_BREAKER_OVER_RESERVED_DEFAULT_MAX_SIZE + ).getBytes() + ); + } + } + + public LocalCircuitBreaker(CircuitBreaker breaker, long overReservedBytes, long maxOverReservedBytes) { + this.breaker = breaker; + this.maxOverReservedBytes = maxOverReservedBytes; + this.overReservedBytes = Math.min(overReservedBytes, maxOverReservedBytes); + } + + @Override + public void circuitBreak(String fieldName, long bytesNeeded) { + breaker.circuitBreak(fieldName, bytesNeeded); + } + + @Override + public void addEstimateBytesAndMaybeBreak(long bytes, String label) throws CircuitBreakingException { + if (bytes <= reservedBytes) { + reservedBytes -= bytes; + maybeReduceReservedBytes(); + } else { + breaker.addEstimateBytesAndMaybeBreak(bytes - reservedBytes + overReservedBytes, label); + reservedBytes = overReservedBytes; + } + } + + @Override + public void addWithoutBreaking(long bytes) { + if (bytes <= reservedBytes) { + reservedBytes -= bytes; + maybeReduceReservedBytes(); + } else { + // leave the reserve untouched as we are making a call anyway + breaker.addWithoutBreaking(bytes); + } + } + + private void maybeReduceReservedBytes() { + if (reservedBytes > maxOverReservedBytes) { + breaker.addWithoutBreaking(maxOverReservedBytes - reservedBytes); + reservedBytes = maxOverReservedBytes; + } + } + + public CircuitBreaker parentBreaker() { + return breaker; + } + + @Override + public long getUsed() { + return breaker.getUsed(); + } + + // for testings + long getReservedBytes() { + return reservedBytes; + } + + @Override + public long getLimit() { + return breaker.getLimit(); + } + + @Override + public double getOverhead() { + return breaker.getOverhead(); + } + + @Override + public long getTrippedCount() { + return breaker.getTrippedCount(); + } + + @Override + public String getName() { + return breaker.getName(); + } + + @Override + public Durability getDurability() { + return breaker.getDurability(); + } + + @Override + public void setLimitAndOverhead(long limit, double overhead) { + breaker.setLimitAndOverhead(limit, overhead); + } + + @Override + public void close() { + if (closed.compareAndSet(false, true)) { + breaker.addWithoutBreaking(-reservedBytes); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Page.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Page.java index de6b5385ab167..a9903046bb4ed 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Page.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Page.java @@ -236,7 +236,15 @@ public void releaseBlocks() { Releasables.closeExpectNoException(blocks); } - static int mapSize(int expectedSize) { - return expectedSize < 2 ? expectedSize + 1 : (int) (expectedSize / 0.75 + 1.0); + /** + * Before passing a Page to another Driver, it is necessary to switch the owning block factories of its Blocks to their parents, + * which are associated with the global circuit breaker. This ensures that when the new driver releases this Page, it returns + * memory directly to the parent block factory instead of the local block factory. This is important because the local block + * factory is not thread safe and doesn't support simultaneous access by more than one thread. + */ + public void allowPassingToDifferentDriver() { + for (Block block : blocks) { + block.allowPassingToDifferentDriver(); + } } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java index e2cea86a5a38f..0ca06498f7129 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java @@ -48,8 +48,17 @@ public interface Vector extends Accountable, Releasable { boolean isConstant(); /** The block factory associated with this vector. */ + // TODO: Renaming this to owningBlockFactory BlockFactory blockFactory(); + /** + * Before passing a Vector to another Driver, it is necessary to switch the owning block factory to its parent, which is associated + * with the global circuit breaker. This ensures that when the new driver releases this Vector, it returns memory directly to the + * parent block factory instead of the local block factory of this Block. This is important because the local block factory is + * not thread safe and doesn't support simultaneous access by more than one thread. + */ + void allowPassingToDifferentDriver(); + /** * Builds {@link Vector}s. Typically, you use one of it's direct supinterfaces like {@link IntVector.Builder}. * This is {@link Releasable} and should be released after building the vector or if building the vector fails. diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st index 72123764e9b55..03397e1a2e5ad 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st @@ -71,7 +71,7 @@ $endif$ $if(BytesRef)$ final BytesRef scratch = new BytesRef(); $endif$ - try (var builder = blockFactory.new$Type$BlockBuilder(positions.length)) { + try (var builder = blockFactory().new$Type$BlockBuilder(positions.length)) { for (int pos : positions) { if (isNull(pos)) { builder.appendNull(); @@ -108,7 +108,7 @@ $endif$ $if(BytesRef)$ final BytesRef scratch = new BytesRef(); $endif$ - try (var builder = blockFactory.new$Type$BlockBuilder(firstValueIndexes[getPositionCount()])) { + try (var builder = blockFactory().new$Type$BlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { if (isNull(pos)) { builder.appendNull(); @@ -171,10 +171,10 @@ $endif$ @Override public void closeInternal() { $if(BytesRef)$ - blockFactory.adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); Releasables.closeExpectNoException(values); $else$ - blockFactory.adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed(), true); $endif$ } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st index 02a876142fb0d..4dd903945d04f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st @@ -79,7 +79,7 @@ $endif$ $if(BytesRef)$ final var scratch = new BytesRef(); $endif$ - try ($Type$Vector.Builder builder = blockFactory.new$Type$VectorBuilder(positions.length)) { + try ($Type$Vector.Builder builder = blockFactory().new$Type$VectorBuilder(positions.length)) { for (int pos : positions) { $if(BytesRef)$ builder.append$Type$(values.get(pos, scratch)); @@ -129,7 +129,7 @@ $if(BytesRef)$ throw new IllegalStateException("can't release already released vector [" + this + "]"); } released = true; - blockFactory.adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); Releasables.closeExpectNoException(values); } $endif$ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st index 5bf629cec61d3..6a231d9ff6bf3 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st @@ -60,6 +60,7 @@ public final class $Type$BigArrayVector extends AbstractVector implements $Type$ @Override public $Type$Vector filter(int... positions) { + var blockFactory = blockFactory(); $if(boolean)$ final BitArray filtered = new BitArray(positions.length, blockFactory.bigArrays()); for (int i = 0; i < positions.length; i++) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st index b80188cefba2e..f685d38d6459b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st @@ -108,6 +108,6 @@ $endif$ throw new IllegalStateException("can't release already released vector [" + this + "]"); } released = true; - blockFactory.adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st index 89bc84d551b63..91b6bb0ffac87 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st @@ -91,4 +91,9 @@ $endif$ assert (vector.isReleased() == false) : "can't release block [" + this + "] containing already released vector"; Releasables.closeExpectNoException(vector); } + + @Override + public void allowPassingToDifferentDriver() { + vector.allowPassingToDifferentDriver(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java index 1835bea60de24..98ba37e3f32d1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.core.Releasables; import org.elasticsearch.index.seqno.LocalCheckpointTracker; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.tasks.TaskCancelledException; @@ -78,7 +77,7 @@ public void addInput(Page input) { buffers.put(seqNo, output); onSeqNoCompleted(seqNo); }, e -> { - input.releaseBlocks(); + releasePageOnAnyThread(input); onFailure(e); onSeqNoCompleted(seqNo); }); @@ -91,6 +90,11 @@ public void addInput(Page input) { } } + private void releasePageOnAnyThread(Page page) { + page.allowPassingToDifferentDriver(); + page.releaseBlocks(); + } + /** * Performs an external computation and notify the listener when the result is ready. * @@ -157,7 +161,7 @@ private void discardPages() { Page page = buffers.remove(nextCheckpoint); checkpoint.markSeqNoAsPersisted(nextCheckpoint); if (page != null) { - Releasables.closeExpectNoException(page::releaseBlocks); + releasePageOnAnyThread(page); } } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OutputOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OutputOperator.java index 6d5f914b74eb3..fd70a72cc3255 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OutputOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OutputOperator.java @@ -62,7 +62,7 @@ public boolean needsInput() { } @Override - public void addInput(Page page) { + protected void doAddInput(Page page) { pageConsumer.accept(mapper.apply(page)); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/PageConsumerOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/PageConsumerOperator.java index 3d53a09856c1f..ee1e61d74bfc6 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/PageConsumerOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/PageConsumerOperator.java @@ -40,7 +40,7 @@ public boolean needsInput() { } @Override - public void addInput(Page page) { + protected void doAddInput(Page page) { pageConsumer.accept(page); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/SinkOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/SinkOperator.java index 93c170cbcfc8a..0751abf4562a7 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/SinkOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/SinkOperator.java @@ -23,6 +23,15 @@ public final Page getOutput() { throw new UnsupportedOperationException(); } + protected abstract void doAddInput(Page page); + + @Override + public final void addInput(Page page) { + // We need to change the ownership of the blocks of the input page before passing them to another driver. + page.allowPassingToDifferentDriver(); + doAddInput(page); + } + /** * A factory for creating sink operators. */ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperator.java index 0fb6ec6f63d96..fed0b2de4454b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperator.java @@ -73,10 +73,9 @@ public boolean needsInput() { } @Override - public void addInput(Page page) { + protected void doAddInput(Page page) { pagesAccepted++; - var newPage = transformer.apply(page); - sink.addPage(newPage); + sink.addPage(transformer.apply(page)); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java index 88a584ac5ee44..27b0380ecfea0 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java @@ -27,12 +27,16 @@ import java.util.BitSet; import java.util.List; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.TimeUnit; import java.util.function.Supplier; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.sameInstance; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -568,6 +572,82 @@ public void testReleaseVector() { assertThat(breaker.getUsed(), equalTo(0L)); } + public void testParent() { + long overLimit = between(1, 10); + long maxOverLimit = randomLongBetween(overLimit, 1000); + LocalCircuitBreaker localBreaker = new LocalCircuitBreaker(blockFactory.breaker(), overLimit, maxOverLimit); + BlockFactory childFactory = blockFactory.newChildFactory(localBreaker); + assertThat(childFactory.parent(), sameInstance(blockFactory)); + assertThat(blockFactory.parent(), sameInstance(blockFactory)); + localBreaker.close(); + } + + private Block randomBlock(BlockFactory blockFactory, int positionCount) { + return BasicBlockTests.randomBlock( + blockFactory, + randomFrom(ElementType.BYTES_REF, ElementType.LONG, ElementType.BOOLEAN), + positionCount, + randomBoolean(), + between(0, 1), + between(1, 3), + between(0, 1), + between(1, 3) + ).block(); + } + + public void testAllowPassingBlockToDifferentContext() throws Exception { + long overLimit1 = between(0, 10 * 1024); + long maxOverLimit1 = randomLongBetween(overLimit1, 100 * 1024); + LocalCircuitBreaker localBreaker1 = new LocalCircuitBreaker(blockFactory.breaker(), overLimit1, maxOverLimit1); + long overLimit2 = between(0, 10 * 1024); + long maxOverLimit2 = randomLongBetween(overLimit1, 100 * 1024); + LocalCircuitBreaker localBreaker2 = new LocalCircuitBreaker(blockFactory.breaker(), overLimit2, maxOverLimit2); + BlockFactory childFactory1 = blockFactory.newChildFactory(localBreaker1); + BlockFactory childFactory2 = blockFactory.newChildFactory(localBreaker2); + Thread[] releasingThreads = new Thread[between(1, 4)]; + Page[] passedPages = new Page[releasingThreads.length]; + for (int i = 0; i < passedPages.length; i++) { + int positionCount = between(1, 100); + Block[] blocks = new Block[between(1, 10)]; + for (int b = 0; b < blocks.length; b++) { + blocks[b] = randomBlock(randomFrom(childFactory1, childFactory2), positionCount); + blocks[b].allowPassingToDifferentDriver(); + assertThat(blocks[b].blockFactory(), equalTo(blockFactory)); + } + passedPages[i] = new Page(blocks); + } + Block[] localBlocks = new Block[between(1, 100)]; + for (int i = 0; i < localBlocks.length; i++) { + BlockFactory childFactory = randomFrom(childFactory1, childFactory2); + localBlocks[i] = randomBlock(childFactory, between(1, 100)); + assertThat(localBlocks[i].blockFactory(), equalTo(childFactory)); + } + CyclicBarrier barrier = new CyclicBarrier(releasingThreads.length + 1); + for (int i = 0; i < releasingThreads.length; i++) { + int threadIndex = i; + releasingThreads[threadIndex] = new Thread(() -> { + try { + barrier.await(30, TimeUnit.SECONDS); + passedPages[threadIndex].releaseBlocks(); + } catch (Exception e) { + throw new AssertionError(e); + } + }); + releasingThreads[threadIndex].start(); + } + barrier.await(30, TimeUnit.SECONDS); + for (Block block : localBlocks) { + block.close(); + } + for (Thread releasingThread : releasingThreads) { + releasingThread.join(); + } + assertThat(localBreaker1.getReservedBytes(), lessThanOrEqualTo(maxOverLimit1)); + assertThat(localBreaker2.getReservedBytes(), lessThanOrEqualTo(maxOverLimit2)); + localBreaker1.close(); + localBreaker2.close(); + } + static BytesRef randomBytesRef() { return new BytesRef(randomByteArrayOfLength(between(1, 20))); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LocalCircuitBreakerTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LocalCircuitBreakerTests.java new file mode 100644 index 0000000000000..a95c78df83646 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LocalCircuitBreakerTests.java @@ -0,0 +1,121 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.test.ESTestCase; + +import java.util.concurrent.atomic.AtomicInteger; + +import static org.hamcrest.Matchers.equalTo; + +public class LocalCircuitBreakerTests extends ESTestCase { + + static class TrackingCircuitBreaker implements CircuitBreaker { + private final CircuitBreaker breaker; + private final AtomicInteger called = new AtomicInteger(); + + TrackingCircuitBreaker(CircuitBreaker breaker) { + this.breaker = breaker; + } + + @Override + public void circuitBreak(String fieldName, long bytesNeeded) { + + } + + @Override + public void addEstimateBytesAndMaybeBreak(long bytes, String label) throws CircuitBreakingException { + called.incrementAndGet(); + breaker.addEstimateBytesAndMaybeBreak(bytes, label); + } + + @Override + public void addWithoutBreaking(long bytes) { + called.incrementAndGet(); + breaker.addWithoutBreaking(bytes); + } + + @Override + public long getUsed() { + return breaker.getUsed(); + } + + @Override + public long getLimit() { + return breaker.getLimit(); + } + + @Override + public double getOverhead() { + return breaker.getOverhead(); + } + + @Override + public long getTrippedCount() { + return breaker.getTrippedCount(); + } + + @Override + public String getName() { + return breaker.getName(); + } + + @Override + public Durability getDurability() { + return breaker.getDurability(); + } + + @Override + public void setLimitAndOverhead(long limit, double overhead) { + breaker.setLimitAndOverhead(limit, overhead); + } + + int callTimes() { + return called.get(); + } + } + + private TrackingCircuitBreaker newTestBreaker(long limit) { + var bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofBytes(limit)).withCircuitBreaking(); + return new TrackingCircuitBreaker(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST)); + } + + public void testBasic() { + TrackingCircuitBreaker breaker = newTestBreaker(120); + LocalCircuitBreaker localBreaker = new LocalCircuitBreaker(breaker, 30, 50); + localBreaker.addEstimateBytesAndMaybeBreak(20, "test"); + assertThat(localBreaker.getReservedBytes(), equalTo(30L)); + assertThat(breaker.callTimes(), equalTo(1)); + assertThat(breaker.getUsed(), equalTo(50L)); + localBreaker.addWithoutBreaking(-5); + assertThat(breaker.getUsed(), equalTo(50L)); + assertThat(localBreaker.getReservedBytes(), equalTo(35L)); + localBreaker.addEstimateBytesAndMaybeBreak(25, "test"); + assertThat(breaker.getUsed(), equalTo(50L)); + assertThat(breaker.callTimes(), equalTo(1)); + assertThat(localBreaker.getReservedBytes(), equalTo(10L)); + var error = expectThrows(CircuitBreakingException.class, () -> localBreaker.addEstimateBytesAndMaybeBreak(60, "test")); + assertThat(error.getBytesWanted(), equalTo(80L)); + assertThat(breaker.callTimes(), equalTo(2)); + localBreaker.addEstimateBytesAndMaybeBreak(30, "test"); + assertThat(breaker.getUsed(), equalTo(100L)); + assertThat(localBreaker.getReservedBytes(), equalTo(30L)); + assertThat(breaker.callTimes(), equalTo(3)); + localBreaker.addWithoutBreaking(-40L); + assertThat(breaker.getUsed(), equalTo(80L)); + assertThat(localBreaker.getReservedBytes(), equalTo(50L)); + assertThat(breaker.callTimes(), equalTo(4)); + localBreaker.close(); + assertThat(breaker.getUsed(), equalTo(30L)); + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MockBlockFactory.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MockBlockFactory.java index 8183a055f2998..35623b93357df 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MockBlockFactory.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MockBlockFactory.java @@ -63,7 +63,19 @@ public void ensureAllBlocksAreReleased() { } public MockBlockFactory(CircuitBreaker breaker, BigArrays bigArrays) { - super(breaker, bigArrays); + this(breaker, bigArrays, null); + } + + protected MockBlockFactory(CircuitBreaker breaker, BigArrays bigArrays, BlockFactory parent) { + super(breaker, bigArrays, parent); + } + + @Override + public BlockFactory newChildFactory(LocalCircuitBreaker childBreaker) { + if (childBreaker.parentBreaker() != breaker()) { + throw new IllegalStateException("Different parent breaker"); + } + return new MockBlockFactory(childBreaker, bigArrays(), this); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java index 290a16f83ed38..8cd7116677fd0 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java @@ -23,11 +23,13 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LocalCircuitBreaker; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.MockBlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.FixedExecutorBuilder; @@ -69,7 +71,16 @@ public void shutdownThreadPool() { } public void testBasic() { - DriverContext driverContext = driverContext(); + BlockFactory globalBlockFactory = blockFactory(); + LocalCircuitBreaker localBreaker = null; + final DriverContext driverContext; + if (randomBoolean()) { + localBreaker = new LocalCircuitBreaker(globalBlockFactory.breaker(), between(0, 1024), between(0, 4096)); + BlockFactory localFactory = new BlockFactory(localBreaker, globalBlockFactory.bigArrays()); + driverContext = new DriverContext(globalBlockFactory.bigArrays(), localFactory); + } else { + driverContext = new DriverContext(globalBlockFactory.bigArrays(), globalBlockFactory); + } int positions = randomIntBetween(0, 10_000); List ids = new ArrayList<>(positions); Map dict = new HashMap<>(); @@ -98,7 +109,7 @@ protected Page createPage(int positionOffset, int length) { }; int maxConcurrentRequests = randomIntBetween(1, 10); AsyncOperator asyncOperator = new AsyncOperator(driverContext, maxConcurrentRequests) { - final LookupService lookupService = new LookupService(threadPool, driverContext.blockFactory(), dict, maxConcurrentRequests); + final LookupService lookupService = new LookupService(threadPool, globalBlockFactory, dict, maxConcurrentRequests); @Override protected void performAsync(Page inputPage, ActionListener listener) { @@ -143,10 +154,12 @@ public void doClose() { Driver driver = new Driver(driverContext, sourceOperator, intermediateOperators, outputOperator, () -> assertFalse(it.hasNext())); Driver.start(threadPool.getThreadContext(), threadPool.executor(ESQL_TEST_EXECUTOR), driver, between(1, 10000), future); future.actionGet(); + Releasables.close(localBreaker); } public void testStatus() { - DriverContext driverContext = driverContext(); + BlockFactory blockFactory = blockFactory(); + DriverContext driverContext = new DriverContext(blockFactory.bigArrays(), blockFactory); Map> handlers = new HashMap<>(); AsyncOperator operator = new AsyncOperator(driverContext, 2) { @Override @@ -195,7 +208,16 @@ protected void doClose() { } public void testFailure() throws Exception { - DriverContext driverContext = driverContext(); + BlockFactory globalBlockFactory = blockFactory(); + LocalCircuitBreaker localBreaker = null; + final DriverContext driverContext; + if (randomBoolean()) { + localBreaker = new LocalCircuitBreaker(globalBlockFactory.breaker(), between(0, 1024), between(0, 4096)); + BlockFactory localFactory = new BlockFactory(localBreaker, globalBlockFactory.bigArrays()); + driverContext = new DriverContext(globalBlockFactory.bigArrays(), localFactory); + } else { + driverContext = new DriverContext(globalBlockFactory.bigArrays(), globalBlockFactory); + } final SequenceLongBlockSourceOperator sourceOperator = new SequenceLongBlockSourceOperator( driverContext.blockFactory(), LongStream.range(0, 100 * 1024) @@ -213,7 +235,7 @@ protected void doRun() { throw new ElasticsearchException("simulated"); } int positionCount = inputPage.getBlock(0).getPositionCount(); - IntBlock block = driverContext.blockFactory().newConstantIntBlockWith(between(1, 100), positionCount); + IntBlock block = globalBlockFactory.newConstantIntBlockWith(between(1, 100), positionCount); listener.onResponse(inputPage.appendPage(new Page(block))); } }; @@ -232,7 +254,7 @@ protected void doClose() { }; SinkOperator outputOperator = new PageConsumerOperator(Page::releaseBlocks); PlainActionFuture future = new PlainActionFuture<>(); - Driver driver = new Driver(driverContext, sourceOperator, List.of(asyncOperator), outputOperator, () -> {}); + Driver driver = new Driver(driverContext, sourceOperator, List.of(asyncOperator), outputOperator, localBreaker); Driver.start(threadPool.getThreadContext(), threadPool.executor(ESQL_TEST_EXECUTOR), driver, between(1, 1000), future); assertBusy(() -> assertTrue(future.isDone())); if (failed.get()) { @@ -290,13 +312,13 @@ protected void doRun() { } } - protected DriverContext driverContext() { + protected BlockFactory blockFactory() { BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)).withCircuitBreaking(); CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); breakers.add(breaker); BlockFactory factory = new MockBlockFactory(breaker, bigArrays); blockFactories.add(factory); - return new DriverContext(bigArrays, factory); + return factory; } private final List breakers = new ArrayList<>(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index f44131c006b94..74e83017e03bf 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -225,7 +225,7 @@ public boolean needsInput() { } @Override - public void addInput(Page page) { + protected void doAddInput(Page page) { try { assertFalse("already finished", finished); IntBlock block = page.getBlock(0); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java index 768353a1c8d35..9b5012e56a3ff 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilder; @@ -69,6 +70,16 @@ public List> getSettings() { ExchangeService.INACTIVE_SINKS_INTERVAL_SETTING, TimeValue.timeValueSeconds(5), Setting.Property.NodeScope + ), + Setting.byteSizeSetting( + BlockFactory.LOCAL_BREAKER_OVER_RESERVED_SIZE_SETTING, + ByteSizeValue.ofBytes(randomIntBetween(0, 4096)), + Setting.Property.NodeScope + ), + Setting.byteSizeSetting( + BlockFactory.LOCAL_BREAKER_OVER_RESERVED_MAX_SIZE_SETTING, + ByteSizeValue.ofBytes(randomIntBetween(0, 16 * 1024)), + Setting.Property.NodeScope ) ); } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java index 46aaa6fab16a5..daefa8899b443 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; @@ -100,6 +101,8 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING.getDefault(Settings.EMPTY) ) .put(ExchangeService.INACTIVE_SINKS_INTERVAL_SETTING, TimeValue.timeValueMillis(between(500, 2000))) + .put(BlockFactory.LOCAL_BREAKER_OVER_RESERVED_SIZE_SETTING, ByteSizeValue.ofBytes(between(0, 256))) + .put(BlockFactory.LOCAL_BREAKER_OVER_RESERVED_MAX_SIZE_SETTING, ByteSizeValue.ofBytes(between(0, 1024))) // allow reading pages from network can trip the circuit breaker .put(IGNORE_DESERIALIZATION_ERRORS_SETTING.getKey(), true) .build(); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java index 3e8ac6fc3d5fb..5e1c3128d4076 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; @@ -68,6 +69,8 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING.getDefault(Settings.EMPTY) ) .put(ExchangeService.INACTIVE_SINKS_INTERVAL_SETTING, TimeValue.timeValueMillis(between(500, 2000))) + .put(BlockFactory.LOCAL_BREAKER_OVER_RESERVED_SIZE_SETTING, ByteSizeValue.ofBytes(between(0, 256))) + .put(BlockFactory.LOCAL_BREAKER_OVER_RESERVED_MAX_SIZE_SETTING, ByteSizeValue.ofBytes(between(0, 1024))) // allow reading pages from network can trip the circuit breaker .put(IGNORE_DESERIALIZATION_ERRORS_SETTING.getKey(), true) .build(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index 0f8fd70c3016b..da305da3ea84d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -29,6 +29,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockStreamInput; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LocalCircuitBreaker; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.BlockReaderFactories; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; @@ -114,6 +115,7 @@ public class EnrichLookupService { private final Executor executor; private final BigArrays bigArrays; private final BlockFactory blockFactory; + private final LocalCircuitBreaker.SizeSettings localBreakerSettings; public EnrichLookupService( ClusterService clusterService, @@ -128,6 +130,7 @@ public EnrichLookupService( this.executor = transportService.getThreadPool().executor(EsqlPlugin.ESQL_THREAD_POOL_NAME); this.bigArrays = bigArrays; this.blockFactory = blockFactory; + this.localBreakerSettings = new LocalCircuitBreaker.SizeSettings(clusterService.getSettings()); transportService.registerRequestHandler( LOOKUP_ACTION_NAME, this.executor, @@ -238,6 +241,7 @@ private void doLookup( ActionListener listener ) { Block inputBlock = inputPage.getBlock(0); + LocalCircuitBreaker localBreaker = null; try { if (inputBlock.areAllValuesNull()) { listener.onResponse(createNullResponse(inputPage.getPositionCount(), extractFields)); @@ -246,18 +250,23 @@ private void doLookup( ShardSearchRequest shardSearchRequest = new ShardSearchRequest(shardId, 0, AliasFilter.EMPTY); SearchContext searchContext = searchService.createSearchContext(shardSearchRequest, SearchService.NO_TIMEOUT); listener = ActionListener.runBefore(listener, searchContext::close); + localBreaker = new LocalCircuitBreaker( + blockFactory.breaker(), + localBreakerSettings.overReservedBytes(), + localBreakerSettings.maxOverReservedBytes() + ); + DriverContext driverContext = new DriverContext(bigArrays, blockFactory.newChildFactory(localBreaker)); SearchExecutionContext searchExecutionContext = searchContext.getSearchExecutionContext(); MappedFieldType fieldType = searchExecutionContext.getFieldType(matchField); final SourceOperator queryOperator = switch (matchType) { case "match", "range" -> { QueryList queryList = QueryList.termQueryList(fieldType, searchExecutionContext, inputBlock); - yield new EnrichQuerySourceOperator(blockFactory, queryList, searchExecutionContext.getIndexReader()); + yield new EnrichQuerySourceOperator(driverContext.blockFactory(), queryList, searchExecutionContext.getIndexReader()); } default -> throw new EsqlIllegalArgumentException("illegal match type " + matchType); }; List intermediateOperators = new ArrayList<>(extractFields.size() + 2); final ElementType[] mergingTypes = new ElementType[extractFields.size()]; - // load the fields List fields = new ArrayList<>(extractFields.size()); for (int i = 0; i < extractFields.size(); i++) { @@ -273,7 +282,7 @@ private void doLookup( } intermediateOperators.add( new ValuesSourceReaderOperator( - blockFactory, + driverContext.blockFactory(), fields, List.of(new ValuesSourceReaderOperator.ShardContext(searchContext.searcher().getIndexReader(), () -> { throw new UnsupportedOperationException("can't load _source as part of enrich"); @@ -289,19 +298,26 @@ private void doLookup( // merging field-values by position final int[] mergingChannels = IntStream.range(0, extractFields.size()).map(i -> i + 1).toArray(); intermediateOperators.add( - new MergePositionsOperator(singleLeaf, inputPage.getPositionCount(), 0, mergingChannels, mergingTypes, blockFactory) + new MergePositionsOperator( + singleLeaf, + inputPage.getPositionCount(), + 0, + mergingChannels, + mergingTypes, + driverContext.blockFactory() + ) ); AtomicReference result = new AtomicReference<>(); OutputOperator outputOperator = new OutputOperator(List.of(), Function.identity(), result::set); Driver driver = new Driver( "enrich-lookup:" + sessionId, - new DriverContext(bigArrays, blockFactory), + driverContext, () -> lookupDescription(sessionId, shardId, matchType, matchField, extractFields, inputPage.getPositionCount()), queryOperator, intermediateOperators, outputOperator, Driver.DEFAULT_STATUS_INTERVAL, - searchContext + localBreaker ); task.addListener(() -> { String reason = Objects.requireNonNullElse(task.getReasonCancelled(), "task was cancelled"); @@ -309,6 +325,7 @@ private void doLookup( }); var threadContext = transportService.getThreadPool().getThreadContext(); + localBreaker = null; Driver.start(threadContext, executor, driver, Driver.DEFAULT_MAX_ITERATIONS, listener.map(ignored -> { Page out = result.get(); if (out == null) { @@ -318,6 +335,8 @@ private void doLookup( })); } catch (Exception e) { listener.onFailure(e); + } finally { + Releasables.close(localBreaker); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index c531fd01c2a40..e4e2402a9c7a3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -8,12 +8,14 @@ package org.elasticsearch.xpack.esql.planner; import org.apache.lucene.search.Query; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.compute.Describable; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LocalCircuitBreaker; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.LuceneCountOperator; import org.elasticsearch.compute.lucene.LuceneOperator; @@ -117,6 +119,7 @@ public class LocalExecutionPlanner { private final CancellableTask parentTask; private final BigArrays bigArrays; private final BlockFactory blockFactory; + private final Settings settings; private final EsqlConfiguration configuration; private final ExchangeSourceHandler exchangeSourceHandler; private final ExchangeSinkHandler exchangeSinkHandler; @@ -128,6 +131,7 @@ public LocalExecutionPlanner( CancellableTask parentTask, BigArrays bigArrays, BlockFactory blockFactory, + Settings settings, EsqlConfiguration configuration, ExchangeSourceHandler exchangeSourceHandler, ExchangeSinkHandler exchangeSinkHandler, @@ -138,6 +142,7 @@ public LocalExecutionPlanner( this.parentTask = parentTask; this.bigArrays = bigArrays; this.blockFactory = blockFactory; + this.settings = settings; this.exchangeSourceHandler = exchangeSourceHandler; this.exchangeSinkHandler = exchangeSinkHandler; this.enrichLookupService = enrichLookupService; @@ -154,7 +159,8 @@ public LocalExecutionPlan plan(PhysicalPlan node) { new Holder<>(DriverParallelism.SINGLE), configuration.pragmas(), bigArrays, - blockFactory + blockFactory, + settings ); // workaround for https://github.com/elastic/elasticsearch/issues/99782 @@ -165,9 +171,10 @@ public LocalExecutionPlan plan(PhysicalPlan node) { PhysicalOperation physicalOperation = plan(node, context); + final TimeValue statusInterval = configuration.pragmas().statusInterval(); context.addDriverFactory( new DriverFactory( - new DriverSupplier(context.bigArrays, context.blockFactory, physicalOperation, configuration.pragmas().statusInterval()), + new DriverSupplier(context.bigArrays, context.blockFactory, physicalOperation, statusInterval, settings), context.driverParallelism().get() ) ); @@ -691,7 +698,8 @@ public record LocalExecutionPlannerContext( Holder driverParallelism, QueryPragmas queryPragmas, BigArrays bigArrays, - BlockFactory blockFactory + BlockFactory blockFactory, + Settings settings ) { void addDriverFactory(DriverFactory driverFactory) { driverFactories.add(driverFactory); @@ -715,26 +723,44 @@ int pageSize(Integer estimatedRowSize) { } } - record DriverSupplier(BigArrays bigArrays, BlockFactory blockFactory, PhysicalOperation physicalOperation, TimeValue statusInterval) - implements - Function, - Describable { + record DriverSupplier( + BigArrays bigArrays, + BlockFactory blockFactory, + PhysicalOperation physicalOperation, + TimeValue statusInterval, + Settings settings + ) implements Function, Describable { @Override public Driver apply(String sessionId) { SourceOperator source = null; List operators = new ArrayList<>(); SinkOperator sink = null; boolean success = false; - var driverContext = new DriverContext(bigArrays, blockFactory); + var localBreakerSettings = new LocalCircuitBreaker.SizeSettings(settings); + final var localBreaker = new LocalCircuitBreaker( + blockFactory.breaker(), + localBreakerSettings.overReservedBytes(), + localBreakerSettings.maxOverReservedBytes() + ); + var driverContext = new DriverContext(bigArrays, blockFactory.newChildFactory(localBreaker)); try { source = physicalOperation.source(driverContext); physicalOperation.operators(operators, driverContext); sink = physicalOperation.sink(driverContext); success = true; - return new Driver(sessionId, driverContext, physicalOperation::describe, source, operators, sink, statusInterval, () -> {}); + return new Driver( + sessionId, + driverContext, + physicalOperation::describe, + source, + operators, + sink, + statusInterval, + localBreaker + ); } finally { if (false == success) { - Releasables.close(source, () -> Releasables.close(operators), sink); + Releasables.close(source, () -> Releasables.close(operators), sink, localBreaker); } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 8d7024f7d889d..dd5ae00294ed0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -20,6 +20,7 @@ import org.elasticsearch.action.support.RefCountingRunnable; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.BigArrays; @@ -100,12 +101,14 @@ public record Result(List pages, List profiles) {} private final DriverTaskRunner driverRunner; private final ExchangeService exchangeService; private final EnrichLookupService enrichLookupService; + private final ClusterService clusterService; public ComputeService( SearchService searchService, TransportService transportService, ExchangeService exchangeService, EnrichLookupService enrichLookupService, + ClusterService clusterService, ThreadPool threadPool, BigArrays bigArrays, BlockFactory blockFactory @@ -119,6 +122,7 @@ public ComputeService( this.driverRunner = new DriverTaskRunner(transportService, this.esqlExecutor); this.exchangeService = exchangeService; this.enrichLookupService = enrichLookupService; + this.clusterService = clusterService; } public void execute( @@ -278,6 +282,7 @@ void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, task, bigArrays, blockFactory, + clusterService.getSettings(), context.configuration, context.exchangeSource(), context.exchangeSink(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index 780d812e2c23b..d272aba26e4e8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -74,6 +74,7 @@ public TransportEsqlQueryAction( transportService, exchangeService, enrichLookupService, + clusterService, threadPool, bigArrays, blockFactory diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index dbb7c1f130a1b..17ed0c1223636 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -329,11 +329,14 @@ private ActualResults executePlan(BigArrays bigArrays) throws Exception { String sessionId = "csv-test"; ExchangeSourceHandler exchangeSource = new ExchangeSourceHandler(between(1, 64), threadPool.executor(ESQL_THREAD_POOL_NAME)); ExchangeSinkHandler exchangeSink = new ExchangeSinkHandler(between(1, 64), threadPool::relativeTimeInMillis); + Settings.Builder settings = Settings.builder(); + LocalExecutionPlanner executionPlanner = new LocalExecutionPlanner( sessionId, new CancellableTask(1, "transport", "esql", null, TaskId.EMPTY_TASK_ID, Map.of()), bigArrays, new BlockFactory(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST), bigArrays), + randomNodeSettings(), configuration, exchangeSource, exchangeSink, @@ -408,6 +411,15 @@ protected void start(Driver driver, ActionListener driverListener) { } } + private Settings randomNodeSettings() { + Settings.Builder builder = Settings.builder(); + if (randomBoolean()) { + builder.put(BlockFactory.LOCAL_BREAKER_OVER_RESERVED_SIZE_SETTING, ByteSizeValue.ofBytes(randomIntBetween(0, 4096))); + builder.put(BlockFactory.LOCAL_BREAKER_OVER_RESERVED_MAX_SIZE_SETTING, ByteSizeValue.ofBytes(randomIntBetween(0, 16 * 1024))); + } + return builder.build(); + } + private Throwable reworkException(Throwable th) { StackTraceElement[] stackTrace = th.getStackTrace(); StackTraceElement[] redone = new StackTraceElement[stackTrace.length + 1]; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java index a01d82731bc94..24fcae0f6bbb0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java @@ -125,6 +125,7 @@ private LocalExecutionPlanner planner() throws IOException { null, BigArrays.NON_RECYCLING_INSTANCE, BlockFactory.getNonBreakingInstance(), + Settings.EMPTY, config(), null, null, From 0e37a919b0fa1ac87139f92a2c65605677b121fa Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 5 Dec 2023 17:46:50 +0100 Subject: [PATCH 168/181] Mute NestedAggregatorTests (#102998) This suite can potentially fail in most of the tests after the lucene upgrade to lucene 9.9. It is just a test failure and we will providing a fix soon but in the meanwhile lets mute it. relates https://github.com/elastic/elasticsearch/issues/102974 --- .../aggregations/bucket/nested/NestedAggregatorTests.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index 83a2e856a512e..82f4597252ac9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; +import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Settings; @@ -105,6 +106,7 @@ * prefixed with the nested path: nestedPath + "." + fieldName * */ +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102974") public class NestedAggregatorTests extends AggregatorTestCase { private static final String VALUE_FIELD_NAME = "number"; @@ -502,7 +504,6 @@ public void testNestedOrdering() throws IOException { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102974") public void testNestedOrdering_random() throws IOException { int numBooks = randomIntBetween(32, 512); List> books = new ArrayList<>(); @@ -563,7 +564,6 @@ public void testNestedOrdering_random() throws IOException { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102974") public void testPreGetChildLeafCollectors() throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { From c603996c74492c0542b46de11b309db9958e86f1 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Tue, 5 Dec 2023 18:01:17 +0100 Subject: [PATCH 169/181] [Connectors API] Connector Sync Job List Endpoint (#102986) Add connector sync job list endpoint --- .../api/connector_sync_job.list.json | 46 +++++ .../entsearch/470_connector_sync_job_list.yml | 175 +++++++++++++++++ .../xpack/application/EnterpriseSearch.java | 5 + .../connector/syncjob/ConnectorSyncJob.java | 2 +- .../syncjob/ConnectorSyncJobIndexService.java | 110 ++++++++++- .../action/ListConnectorSyncJobsAction.java | 172 ++++++++++++++++ .../RestListConnectorSyncJobsAction.java | 49 +++++ .../TransportListConnectorSyncJobsAction.java | 63 ++++++ .../ConnectorSyncJobIndexServiceTests.java | 184 ++++++++++++++++-- .../syncjob/ConnectorSyncJobTestUtils.java | 10 + ...cJobsActionRequestBWCSerializingTests.java | 58 ++++++ ...JobsActionResponseBWCSerializingTests.java | 53 +++++ .../ListConnectorSyncJobsActionTests.java | 25 +++ ...sportListConnectorSyncJobsActionTests.java | 74 +++++++ .../xpack/security/operator/Constants.java | 1 + 15 files changed, 1008 insertions(+), 19 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.list.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/470_connector_sync_job_list.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestListConnectorSyncJobsAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportListConnectorSyncJobsAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionResponseBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportListConnectorSyncJobsActionTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.list.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.list.json new file mode 100644 index 0000000000000..d09a720f748ec --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.list.json @@ -0,0 +1,46 @@ +{ + "connector_sync_job.list": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Lists all connector sync jobs." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/_sync_job", + "methods": [ + "GET" + ] + } + ] + }, + "params": { + "from": { + "type": "int", + "default": 0, + "description": "Starting offset (default: 0)" + }, + "size": { + "type": "int", + "default": 100, + "description": "specifies a max number of results to get (default: 100)" + }, + "status": { + "type": "string", + "description": "Sync job status, which sync jobs are fetched for" + }, + "connector_id": { + "type": "string", + "description": "Id of the connector to fetch the sync jobs for" + } + } + } +} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/470_connector_sync_job_list.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/470_connector_sync_job_list.yml new file mode 100644 index 0000000000000..e076b88528ad5 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/470_connector_sync_job_list.yml @@ -0,0 +1,175 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + - do: + connector.put: + connector_id: connector-one + body: + index_name: search-test + name: my-connector + language: de + is_native: false + service_type: super-connector + - do: + connector.put: + connector_id: connector-two + body: + index_name: search-test-two + name: my-connector-two + language: de + is_native: false + service_type: super-connector + + +--- +"List Connector Sync Jobs": + - do: + connector_sync_job.post: + body: + id: connector-one + job_type: access_control + trigger_method: scheduled + - set: { id: sync-job-one-id } + - do: + connector_sync_job.post: + body: + id: connector-one + job_type: access_control + trigger_method: scheduled + - set: { id: sync-job-two-id } + - do: + connector_sync_job.post: + body: + id: connector-one + job_type: access_control + trigger_method: scheduled + - set: { id: sync-job-three-id } + - do: + connector_sync_job.list: { } + + - match: { count: 3 } + + # Ascending order by creation_date for results + - match: { results.0.id: $sync-job-one-id } + - match: { results.1.id: $sync-job-two-id } + - match: { results.2.id: $sync-job-three-id } + +--- +"List Connector Sync Jobs - with from": + - do: + connector_sync_job.post: + body: + id: connector-one + job_type: access_control + trigger_method: scheduled + - set: { id: sync-job-one-id } + - do: + connector_sync_job.post: + body: + id: connector-one + job_type: access_control + trigger_method: scheduled + - set: { id: sync-job-two-id } + - do: + connector_sync_job.post: + body: + id: connector-one + job_type: access_control + trigger_method: scheduled + - set: { id: sync-job-three-id } + - do: + connector_sync_job.list: + from: 1 + + - match: { count: 3 } + + # Ascending order by creation_date for results + - match: { results.0.id: $sync-job-two-id } + - match: { results.1.id: $sync-job-three-id } + +--- +"List Connector Sync Jobs - with size": + - do: + connector_sync_job.post: + body: + id: connector-one + job_type: access_control + trigger_method: scheduled + - set: { id: sync-job-one-id } + - do: + connector_sync_job.post: + body: + id: connector-one + job_type: access_control + trigger_method: scheduled + - set: { id: sync-job-two-id } + - do: + connector_sync_job.post: + body: + id: connector-one + job_type: access_control + trigger_method: scheduled + - set: { id: sync-job-three-id } + - do: + connector_sync_job.list: + size: 1 + + - match: { count: 3 } + + - match: { results.0.id: $sync-job-one-id } + +--- +"List Connector Sync Jobs - Get pending jobs": + - do: + connector_sync_job.post: + body: + id: connector-one + job_type: access_control + trigger_method: scheduled + - set: { id: sync-job-one-id } + - do: + connector_sync_job.post: + body: + id: connector-one + job_type: access_control + trigger_method: scheduled + - set: { id: sync-job-two-id } + - do: + connector_sync_job.cancel: + connector_sync_job_id: $sync-job-two-id + - do: + connector_sync_job.list: + status: pending + - match: { count: 1 } + - match: { results.0.id: $sync-job-one-id } + +--- +"List Connector Sync Jobs - Get jobs for connector one": + - do: + connector_sync_job.post: + body: + id: connector-one + job_type: access_control + trigger_method: scheduled + - set: { id: sync-job-one-id } + - do: + connector_sync_job.post: + body: + id: connector-two + job_type: access_control + trigger_method: scheduled + - set: { id: sync-job-two-id } + - do: + connector_sync_job.list: + connector_id: connector-one + - match: { count: 1 } + - match: { results.0.id: $sync-job-one-id } + + +--- +"List Connector Sync Jobs - empty list": + - do: + connector_sync_job.list: { } + + - match: { count: 0 } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index 2119d9d0a4c30..73025c0b23b56 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -85,11 +85,13 @@ import org.elasticsearch.xpack.application.connector.syncjob.action.CheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.DeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.GetConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.ListConnectorSyncJobsAction; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestCancelConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestCheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestDeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestGetConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.RestListConnectorSyncJobsAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestPostConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestUpdateConnectorSyncJobErrorAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestUpdateConnectorSyncJobIngestionStatsAction; @@ -97,6 +99,7 @@ import org.elasticsearch.xpack.application.connector.syncjob.action.TransportCheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportDeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportGetConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.TransportListConnectorSyncJobsAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportPostConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportUpdateConnectorSyncJobErrorAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportUpdateConnectorSyncJobIngestionStatsAction; @@ -238,6 +241,7 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(DeleteConnectorSyncJobAction.INSTANCE, TransportDeleteConnectorSyncJobAction.class), new ActionHandler<>(CheckInConnectorSyncJobAction.INSTANCE, TransportCheckInConnectorSyncJobAction.class), new ActionHandler<>(CancelConnectorSyncJobAction.INSTANCE, TransportCancelConnectorSyncJobAction.class), + new ActionHandler<>(ListConnectorSyncJobsAction.INSTANCE, TransportListConnectorSyncJobsAction.class), new ActionHandler<>(UpdateConnectorSyncJobErrorAction.INSTANCE, TransportUpdateConnectorSyncJobErrorAction.class), new ActionHandler<>( UpdateConnectorSyncJobIngestionStatsAction.INSTANCE, @@ -314,6 +318,7 @@ public List getRestHandlers( new RestDeleteConnectorSyncJobAction(), new RestCancelConnectorSyncJobAction(), new RestCheckInConnectorSyncJobAction(), + new RestListConnectorSyncJobsAction(), new RestUpdateConnectorSyncJobErrorAction(), new RestUpdateConnectorSyncJobIngestionStatsAction() ) diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java index 211a423dab99e..f14d0fa52b1c7 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java @@ -93,7 +93,7 @@ public class ConnectorSyncJob implements Writeable, ToXContentObject { static final ParseField STARTED_AT_FIELD = new ParseField("started_at"); - static final ParseField STATUS_FIELD = new ParseField("status"); + public static final ParseField STATUS_FIELD = new ParseField("status"); public static final ParseField TOTAL_DOCUMENT_COUNT_FIELD = new ParseField("total_document_count"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index 2c9ac7c06b91c..326fdb0367e5c 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -18,6 +18,8 @@ import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; @@ -26,6 +28,13 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.engine.DocumentMissingException; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.Connector; @@ -40,11 +49,14 @@ import java.io.IOException; import java.time.Instant; +import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.function.BiConsumer; +import java.util.stream.Stream; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.core.ClientHelper.CONNECTORS_ORIGIN; @@ -252,11 +264,103 @@ public void cancelConnectorSyncJob(String connectorSyncJobId, ActionListener listener + ) { + try { + QueryBuilder query = buildListQuery(connectorId, syncStatus); + + final SearchSourceBuilder searchSource = new SearchSourceBuilder().from(from) + .size(size) + .query(query) + .fetchSource(true) + .sort(ConnectorSyncJob.CREATED_AT_FIELD.getPreferredName(), SortOrder.ASC); + + final SearchRequest searchRequest = new SearchRequest(CONNECTOR_SYNC_JOB_INDEX_NAME).source(searchSource); + + clientWithOrigin.search(searchRequest, new ActionListener<>() { + @Override + public void onResponse(SearchResponse searchResponse) { + try { + listener.onResponse(mapSearchResponseToConnectorSyncJobsList(searchResponse)); + } catch (Exception e) { + listener.onFailure(e); + } + } + + @Override + public void onFailure(Exception e) { + if (e instanceof IndexNotFoundException) { + listener.onResponse(new ConnectorSyncJobIndexService.ConnectorSyncJobsResult(Collections.emptyList(), 0L)); + return; + } + listener.onFailure(e); + } + }); + } catch (Exception e) { + listener.onFailure(e); + } + } + + private static QueryBuilder buildListQuery(String connectorId, ConnectorSyncStatus syncStatus) { + boolean usesFilter = Stream.of(connectorId, syncStatus).anyMatch(Objects::nonNull); + BoolQueryBuilder boolFilterQueryBuilder = new BoolQueryBuilder(); + + if (usesFilter) { + if (Objects.nonNull(connectorId)) { + TermQueryBuilder connectorIdQuery = new TermQueryBuilder( + ConnectorSyncJob.CONNECTOR_FIELD.getPreferredName() + "." + Connector.ID_FIELD.getPreferredName(), + connectorId + ); + boolFilterQueryBuilder.must().add(connectorIdQuery); + } + + if (Objects.nonNull(syncStatus)) { + TermQueryBuilder syncStatusQuery = new TermQueryBuilder(ConnectorSyncJob.STATUS_FIELD.getPreferredName(), syncStatus); + boolFilterQueryBuilder.must().add(syncStatusQuery); + } + } + + return usesFilter ? boolFilterQueryBuilder : new MatchAllQueryBuilder(); + } + + private ConnectorSyncJobsResult mapSearchResponseToConnectorSyncJobsList(SearchResponse searchResponse) { + final List connectorSyncJobs = Arrays.stream(searchResponse.getHits().getHits()) + .map(ConnectorSyncJobIndexService::hitToConnectorSyncJob) + .toList(); + + return new ConnectorSyncJobIndexService.ConnectorSyncJobsResult( + connectorSyncJobs, + (int) searchResponse.getHits().getTotalHits().value + ); + } + + private static ConnectorSyncJob hitToConnectorSyncJob(SearchHit searchHit) { + // TODO: don't return sensitive data from configuration inside connector in list endpoint + + return ConnectorSyncJob.fromXContentBytes(searchHit.getSourceRef(), XContentType.JSON); + } + + public record ConnectorSyncJobsResult(List connectorSyncJobs, long totalResults) {} + + /** + * Updates the ingestion stats of the {@link ConnectorSyncJob} in the underlying index. + * + * @param request Request containing the updates to the ingestion stats. + * @param listener The action listener to invoke on response/failure. + */ public void updateConnectorSyncJobIngestionStats( UpdateConnectorSyncJobIngestionStatsAction.Request request, ActionListener listener diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsAction.java new file mode 100644 index 0000000000000..0a22b6f938142 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsAction.java @@ -0,0 +1,172 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJob; +import org.elasticsearch.xpack.core.action.util.PageParams; +import org.elasticsearch.xpack.core.action.util.QueryPage; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class ListConnectorSyncJobsAction extends ActionType { + + public static final ListConnectorSyncJobsAction INSTANCE = new ListConnectorSyncJobsAction(); + public static final String NAME = "cluster:admin/xpack/connector/sync_job/list"; + + public ListConnectorSyncJobsAction() { + super(NAME, ListConnectorSyncJobsAction.Response::new); + } + + public static class Request extends ActionRequest implements ToXContentObject { + public static final ParseField CONNECTOR_ID_FIELD = new ParseField("connector_id"); + private static final ParseField PAGE_PARAMS_FIELD = new ParseField("pageParams"); + private final PageParams pageParams; + private final String connectorId; + private final ConnectorSyncStatus connectorSyncStatus; + + public Request(StreamInput in) throws IOException { + super(in); + this.pageParams = new PageParams(in); + this.connectorId = in.readOptionalString(); + this.connectorSyncStatus = in.readOptionalEnum(ConnectorSyncStatus.class); + } + + public Request(PageParams pageParams, String connectorId, ConnectorSyncStatus connectorSyncStatus) { + this.pageParams = pageParams; + this.connectorId = connectorId; + this.connectorSyncStatus = connectorSyncStatus; + } + + public PageParams getPageParams() { + return pageParams; + } + + public String getConnectorId() { + return connectorId; + } + + public ConnectorSyncStatus getConnectorSyncStatus() { + return connectorSyncStatus; + } + + @Override + public ActionRequestValidationException validate() { + // Pagination validation is done as part of PageParams constructor + return null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + pageParams.writeTo(out); + out.writeOptionalString(connectorId); + out.writeOptionalEnum(connectorSyncStatus); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(pageParams, request.pageParams) + && Objects.equals(connectorId, request.connectorId) + && connectorSyncStatus == request.connectorSyncStatus; + } + + @Override + public int hashCode() { + return Objects.hash(pageParams, connectorId, connectorSyncStatus); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "list_connector_sync_jobs_request", + p -> new ListConnectorSyncJobsAction.Request( + (PageParams) p[0], + (String) p[1], + p[2] != null ? ConnectorSyncStatus.fromString((String) p[2]) : null + ) + ); + + static { + PARSER.declareObject(constructorArg(), (p, c) -> PageParams.fromXContent(p), PAGE_PARAMS_FIELD); + PARSER.declareString(optionalConstructorArg(), CONNECTOR_ID_FIELD); + PARSER.declareString(optionalConstructorArg(), ConnectorSyncJob.STATUS_FIELD); + } + + public static ListConnectorSyncJobsAction.Request parse(XContentParser parser) { + return PARSER.apply(parser, null); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(PAGE_PARAMS_FIELD.getPreferredName(), pageParams); + builder.field(CONNECTOR_ID_FIELD.getPreferredName(), connectorId); + builder.field(ConnectorSyncJob.STATUS_FIELD.getPreferredName(), connectorSyncStatus); + } + builder.endObject(); + return builder; + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + public static final ParseField RESULTS_FIELD = new ParseField("results"); + + final QueryPage queryPage; + + public Response(StreamInput in) throws IOException { + super(in); + this.queryPage = new QueryPage<>(in, ConnectorSyncJob::new); + } + + public Response(List items, Long totalResults) { + this.queryPage = new QueryPage<>(items, totalResults, RESULTS_FIELD); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + queryPage.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return queryPage.toXContent(builder, params); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response response = (Response) o; + return Objects.equals(queryPage, response.queryPage); + } + + @Override + public int hashCode() { + return Objects.hash(queryPage); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestListConnectorSyncJobsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestListConnectorSyncJobsAction.java new file mode 100644 index 0000000000000..ef8851636be1b --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestListConnectorSyncJobsAction.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; +import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJob; +import org.elasticsearch.xpack.core.action.util.PageParams; + +import java.io.IOException; +import java.util.List; + +public class RestListConnectorSyncJobsAction extends BaseRestHandler { + @Override + public String getName() { + return "connector_sync_jobs_list_action"; + } + + @Override + public List routes() { + return List.of(new Route(RestRequest.Method.GET, "/" + EnterpriseSearch.CONNECTOR_SYNC_JOB_API_ENDPOINT)); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + int from = restRequest.paramAsInt("from", PageParams.DEFAULT_FROM); + int size = restRequest.paramAsInt("size", PageParams.DEFAULT_SIZE); + String connectorId = restRequest.param(ListConnectorSyncJobsAction.Request.CONNECTOR_ID_FIELD.getPreferredName()); + String statusString = restRequest.param(ConnectorSyncJob.STATUS_FIELD.getPreferredName()); + ConnectorSyncStatus status = statusString != null ? ConnectorSyncStatus.fromString(statusString) : null; + + ListConnectorSyncJobsAction.Request request = new ListConnectorSyncJobsAction.Request( + new PageParams(from, size), + connectorId, + status + ); + + return channel -> client.execute(ListConnectorSyncJobsAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportListConnectorSyncJobsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportListConnectorSyncJobsAction.java new file mode 100644 index 0000000000000..4ba662f77f8fa --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportListConnectorSyncJobsAction.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobIndexService; +import org.elasticsearch.xpack.core.action.util.PageParams; + +public class TransportListConnectorSyncJobsAction extends HandledTransportAction< + ListConnectorSyncJobsAction.Request, + ListConnectorSyncJobsAction.Response> { + protected final ConnectorSyncJobIndexService connectorSyncJobIndexService; + + @Inject + public TransportListConnectorSyncJobsAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + ListConnectorSyncJobsAction.NAME, + transportService, + actionFilters, + ListConnectorSyncJobsAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorSyncJobIndexService = new ConnectorSyncJobIndexService(client); + } + + @Override + protected void doExecute( + Task task, + ListConnectorSyncJobsAction.Request request, + ActionListener listener + ) { + final PageParams pageParams = request.getPageParams(); + final String connectorId = request.getConnectorId(); + final ConnectorSyncStatus syncStatus = request.getConnectorSyncStatus(); + + connectorSyncJobIndexService.listConnectorSyncJobs( + pageParams.getFrom(), + pageParams.getSize(), + connectorId, + syncStatus, + listener.map(r -> new ListConnectorSyncJobsAction.Response(r.connectorSyncJobs(), r.totalResults())) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java index ab16fb8a46eb0..85d8826b98683 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java @@ -31,8 +31,10 @@ import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction; import org.junit.Before; +import java.io.IOException; import java.time.Instant; import java.time.temporal.ChronoUnit; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; @@ -60,12 +62,21 @@ public class ConnectorSyncJobIndexServiceTests extends ESSingleNodeTestCase { private static final int ONE_SECOND_IN_MILLIS = 1000; private ConnectorSyncJobIndexService connectorSyncJobIndexService; - private Connector connector; + private Connector connectorOne; + private Connector connectorTwo; @Before public void setup() throws Exception { - connector = ConnectorTestUtils.getRandomSyncJobConnectorInfo(); + connectorOne = ConnectorTestUtils.getRandomSyncJobConnectorInfo(); + connectorTwo = ConnectorTestUtils.getRandomSyncJobConnectorInfo(); + createConnector(connectorOne); + createConnector(connectorTwo); + + this.connectorSyncJobIndexService = new ConnectorSyncJobIndexService(client()); + } + + private void createConnector(Connector connector) throws IOException, InterruptedException, ExecutionException, TimeoutException { final IndexRequest indexRequest = new IndexRequest(ConnectorIndexService.CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) .id(connector.getConnectorId()) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) @@ -74,13 +85,11 @@ public void setup() throws Exception { // wait 10 seconds for connector creation index.get(TIMEOUT_SECONDS, TimeUnit.SECONDS); - - this.connectorSyncJobIndexService = new ConnectorSyncJobIndexService(client()); } public void testCreateConnectorSyncJob() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( - connector.getConnectorId() + connectorOne.getConnectorId() ); ConnectorSyncJobType requestJobType = syncJobRequest.getJobType(); ConnectorSyncJobTriggerMethod requestTriggerMethod = syncJobRequest.getTriggerMethod(); @@ -101,7 +110,7 @@ public void testCreateConnectorSyncJob() throws Exception { public void testCreateConnectorSyncJob_WithMissingJobType_ExpectDefaultJobTypeToBeSet() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = new PostConnectorSyncJobAction.Request( - connector.getConnectorId(), + connectorOne.getConnectorId(), null, ConnectorSyncJobTriggerMethod.ON_DEMAND ); @@ -114,7 +123,7 @@ public void testCreateConnectorSyncJob_WithMissingJobType_ExpectDefaultJobTypeTo public void testCreateConnectorSyncJob_WithMissingTriggerMethod_ExpectDefaultTriggerMethodToBeSet() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = new PostConnectorSyncJobAction.Request( - connector.getConnectorId(), + connectorOne.getConnectorId(), ConnectorSyncJobType.FULL, null ); @@ -139,7 +148,7 @@ public void testCreateConnectorSyncJob_WithMissingConnectorId_ExpectException() public void testDeleteConnectorSyncJob() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( - connector.getConnectorId() + connectorOne.getConnectorId() ); PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); String syncJobId = response.getId(); @@ -157,7 +166,7 @@ public void testDeleteConnectorSyncJob_WithMissingSyncJobId_ExpectException() { public void testGetConnectorSyncJob() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( - connector.getConnectorId() + connectorOne.getConnectorId() ); ConnectorSyncJobType jobType = syncJobRequest.getJobType(); ConnectorSyncJobTriggerMethod triggerMethod = syncJobRequest.getTriggerMethod(); @@ -170,7 +179,7 @@ public void testGetConnectorSyncJob() throws Exception { assertThat(syncJob.getId(), equalTo(syncJobId)); assertThat(syncJob.getJobType(), equalTo(jobType)); assertThat(syncJob.getTriggerMethod(), equalTo(triggerMethod)); - assertThat(syncJob.getConnector().getConnectorId(), equalTo(connector.getConnectorId())); + assertThat(syncJob.getConnector().getConnectorId(), equalTo(connectorOne.getConnectorId())); } public void testGetConnectorSyncJob_WithMissingSyncJobId_ExpectException() { @@ -179,7 +188,7 @@ public void testGetConnectorSyncJob_WithMissingSyncJobId_ExpectException() { public void testCheckInConnectorSyncJob() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( - connector.getConnectorId() + connectorOne.getConnectorId() ); PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); String syncJobId = response.getId(); @@ -218,7 +227,7 @@ public void testCheckInConnectorSyncJob_WithMissingSyncJobId_ExpectException() { public void testCancelConnectorSyncJob() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( - connector.getConnectorId() + connectorOne.getConnectorId() ); PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); String syncJobId = response.getId(); @@ -254,9 +263,122 @@ public void testCancelConnectorSyncJob_WithMissingSyncJobId_ExpectException() { expectThrows(ResourceNotFoundException.class, () -> awaitCancelConnectorSyncJob(NON_EXISTING_SYNC_JOB_ID)); } + public void testListConnectorSyncJobs() throws Exception { + int numberOfSyncJobs = 5; + List syncJobs = new ArrayList<>(); + + for (int i = 0; i < numberOfSyncJobs; i++) { + PostConnectorSyncJobAction.Request request = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( + connectorOne.getConnectorId() + ); + PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(request); + ConnectorSyncJob syncJob = awaitGetConnectorSyncJob(response.getId()); + syncJobs.add(syncJob); + } + + ConnectorSyncJobIndexService.ConnectorSyncJobsResult firstTwoSyncJobs = awaitListConnectorSyncJobs(0, 2, null, null); + ConnectorSyncJobIndexService.ConnectorSyncJobsResult nextTwoSyncJobs = awaitListConnectorSyncJobs(2, 2, null, null); + ConnectorSyncJobIndexService.ConnectorSyncJobsResult lastSyncJobs = awaitListConnectorSyncJobs(4, 100, null, null); + + ConnectorSyncJob firstSyncJob = firstTwoSyncJobs.connectorSyncJobs().get(0); + ConnectorSyncJob secondSyncJob = firstTwoSyncJobs.connectorSyncJobs().get(1); + ConnectorSyncJob thirdSyncJob = nextTwoSyncJobs.connectorSyncJobs().get(0); + ConnectorSyncJob fourthSyncJob = nextTwoSyncJobs.connectorSyncJobs().get(1); + ConnectorSyncJob fifthSyncJob = lastSyncJobs.connectorSyncJobs().get(0); + + assertThat(firstTwoSyncJobs.connectorSyncJobs().size(), equalTo(2)); + assertThat(firstTwoSyncJobs.totalResults(), equalTo(5L)); + + assertThat(nextTwoSyncJobs.connectorSyncJobs().size(), equalTo(2)); + assertThat(nextTwoSyncJobs.totalResults(), equalTo(5L)); + + assertThat(lastSyncJobs.connectorSyncJobs().size(), equalTo(1)); + assertThat(lastSyncJobs.totalResults(), equalTo(5L)); + + assertThat(firstSyncJob, equalTo(syncJobs.get(0))); + assertThat(secondSyncJob, equalTo(syncJobs.get(1))); + assertThat(thirdSyncJob, equalTo(syncJobs.get(2))); + assertThat(fourthSyncJob, equalTo(syncJobs.get(3))); + assertThat(fifthSyncJob, equalTo(syncJobs.get(4))); + + // assert ordering: ascending order by creation date + assertTrue(fifthSyncJob.getCreatedAt().isAfter(fourthSyncJob.getCreatedAt())); + assertTrue(fourthSyncJob.getCreatedAt().isAfter(thirdSyncJob.getCreatedAt())); + assertTrue(thirdSyncJob.getCreatedAt().isAfter(secondSyncJob.getCreatedAt())); + assertTrue(secondSyncJob.getCreatedAt().isAfter(firstSyncJob.getCreatedAt())); + } + + public void testListConnectorSyncJobs_WithStatusPending_GivenOnePendingTwoCancelled_ExpectOnePending() throws Exception { + String connectorId = connectorOne.getConnectorId(); + + PostConnectorSyncJobAction.Request requestOne = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest(connectorId); + PostConnectorSyncJobAction.Request requestTwo = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest(connectorId); + PostConnectorSyncJobAction.Request requestThree = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest(connectorId); + + PostConnectorSyncJobAction.Response responseOne = awaitPutConnectorSyncJob(requestOne); + PostConnectorSyncJobAction.Response responseTwo = awaitPutConnectorSyncJob(requestTwo); + PostConnectorSyncJobAction.Response responseThree = awaitPutConnectorSyncJob(requestThree); + + String syncJobOneId = responseOne.getId(); + String syncJobTwoId = responseTwo.getId(); + String syncJobThreeId = responseThree.getId(); + + // cancel sync job two and three -> one pending left + awaitCancelConnectorSyncJob(syncJobTwoId); + awaitCancelConnectorSyncJob(syncJobThreeId); + + ConnectorSyncJobIndexService.ConnectorSyncJobsResult connectorSyncJobsResult = awaitListConnectorSyncJobs( + 0, + 100, + null, + ConnectorSyncStatus.PENDING + ); + long numberOfResults = connectorSyncJobsResult.totalResults(); + String idOfReturnedSyncJob = connectorSyncJobsResult.connectorSyncJobs().get(0).getId(); + + assertThat(numberOfResults, equalTo(1L)); + assertThat(idOfReturnedSyncJob, equalTo(syncJobOneId)); + } + + @AwaitsFix(bugUrl = "https://github.com/elastic/enterprise-search-team/issues/6351") + public void testListConnectorSyncJobs_WithConnectorOneId_GivenTwoOverallOneFromConnectorOne_ExpectOne() throws Exception { + String connectorOneId = connectorOne.getConnectorId(); + String connectorTwoId = connectorTwo.getConnectorId(); + + PostConnectorSyncJobAction.Request requestOne = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( + connectorOneId + ); + PostConnectorSyncJobAction.Request requestTwo = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( + connectorTwoId + ); + + awaitPutConnectorSyncJob(requestOne); + awaitPutConnectorSyncJob(requestTwo); + + ConnectorSyncJobIndexService.ConnectorSyncJobsResult connectorSyncJobsResult = awaitListConnectorSyncJobs( + 0, + 100, + connectorOneId, + null + ); + + long numberOfResults = connectorSyncJobsResult.totalResults(); + String connectorIdOfReturnedSyncJob = connectorSyncJobsResult.connectorSyncJobs().get(0).getConnector().getConnectorId(); + + assertThat(numberOfResults, equalTo(1L)); + assertThat(connectorIdOfReturnedSyncJob, equalTo(connectorOneId)); + } + + public void testListConnectorSyncJobs_WithNoSyncJobs_ReturnEmptyResult() throws Exception { + ConnectorSyncJobIndexService.ConnectorSyncJobsResult firstOneHundredSyncJobs = awaitListConnectorSyncJobs(0, 100, null, null); + + assertThat(firstOneHundredSyncJobs.connectorSyncJobs().size(), equalTo(0)); + assertThat(firstOneHundredSyncJobs.totalResults(), equalTo(0L)); + } + public void testUpdateConnectorSyncJobError() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( - connector.getConnectorId() + connectorOne.getConnectorId() ); PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); String syncJobId = response.getId(); @@ -285,7 +407,7 @@ public void testUpdateConnectorSyncJobError_WithMissingSyncJobId_ExceptException public void testUpdateConnectorSyncJobIngestionStats() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( - connector.getConnectorId() + connectorOne.getConnectorId() ); PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); String syncJobId = response.getId(); @@ -329,7 +451,7 @@ public void testUpdateConnectorSyncJobIngestionStats() throws Exception { public void testUpdateConnectorSyncJobIngestionStats_WithoutLastSeen_ExpectUpdateOfLastSeen() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( - connector.getConnectorId() + connectorOne.getConnectorId() ); PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); String syncJobId = response.getId(); @@ -465,6 +587,38 @@ private static void assertFieldsDidNotUpdateExceptFieldList( } } + private ConnectorSyncJobIndexService.ConnectorSyncJobsResult awaitListConnectorSyncJobs( + int from, + int size, + String connectorId, + ConnectorSyncStatus syncStatus + ) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference result = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + + connectorSyncJobIndexService.listConnectorSyncJobs(from, size, connectorId, syncStatus, new ActionListener<>() { + @Override + public void onResponse(ConnectorSyncJobIndexService.ConnectorSyncJobsResult connectorSyncJobsResult) { + result.set(connectorSyncJobsResult); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + + assertTrue("Timeout waiting for list request", latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from list request", result.get()); + return result.get(); + } + private UpdateResponse awaitUpdateConnectorSyncJob(String syncJobId, String error) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java index fb412db168605..96a12c9efac51 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java @@ -13,9 +13,11 @@ import org.elasticsearch.xpack.application.connector.syncjob.action.CheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.DeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.GetConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.ListConnectorSyncJobsAction; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobErrorAction; import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction; +import org.elasticsearch.xpack.application.search.SearchApplicationTestUtils; import java.time.Instant; @@ -146,4 +148,12 @@ public static GetConnectorSyncJobAction.Request getRandomGetConnectorSyncJobRequ public static GetConnectorSyncJobAction.Response getRandomGetConnectorSyncJobResponse() { return new GetConnectorSyncJobAction.Response(getRandomConnectorSyncJob()); } + + public static ListConnectorSyncJobsAction.Request getRandomListConnectorSyncJobsActionRequest() { + return new ListConnectorSyncJobsAction.Request( + SearchApplicationTestUtils.randomPageParams(), + randomAlphaOfLength(10), + ConnectorTestUtils.getRandomSyncStatus() + ); + } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..52ad207d18ffd --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionRequestBWCSerializingTests.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; +import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; +import org.elasticsearch.xpack.application.search.SearchApplicationTestUtils; +import org.elasticsearch.xpack.core.action.util.PageParams; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class ListConnectorSyncJobsActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase< + ListConnectorSyncJobsAction.Request> { + @Override + protected Writeable.Reader instanceReader() { + return ListConnectorSyncJobsAction.Request::new; + } + + @Override + protected ListConnectorSyncJobsAction.Request createTestInstance() { + PageParams pageParams = SearchApplicationTestUtils.randomPageParams(); + String connectorId = randomAlphaOfLength(10); + ConnectorSyncStatus syncStatus = ConnectorTestUtils.getRandomSyncStatus(); + + return new ListConnectorSyncJobsAction.Request(pageParams, connectorId, syncStatus); + } + + @Override + protected ListConnectorSyncJobsAction.Request mutateInstance(ListConnectorSyncJobsAction.Request instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected ListConnectorSyncJobsAction.Request doParseInstance(XContentParser parser) throws IOException { + return ListConnectorSyncJobsAction.Request.parse(parser); + } + + @Override + protected ListConnectorSyncJobsAction.Request mutateInstanceForVersion( + ListConnectorSyncJobsAction.Request instance, + TransportVersion version + ) { + return new ListConnectorSyncJobsAction.Request( + instance.getPageParams(), + instance.getConnectorId(), + instance.getConnectorSyncStatus() + ); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..48a358ad043cd --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionResponseBWCSerializingTests.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; +import java.util.List; + +public class ListConnectorSyncJobsActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< + ListConnectorSyncJobsAction.Response> { + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(List.of(new NamedWriteableRegistry.Entry(Connector.class, Connector.NAME, Connector::new))); + } + + @Override + protected Writeable.Reader instanceReader() { + return ListConnectorSyncJobsAction.Response::new; + } + + @Override + protected ListConnectorSyncJobsAction.Response createTestInstance() { + return new ListConnectorSyncJobsAction.Response( + randomList(10, ConnectorSyncJobTestUtils::getRandomConnectorSyncJob), + randomLongBetween(0, 100) + ); + } + + @Override + protected ListConnectorSyncJobsAction.Response mutateInstance(ListConnectorSyncJobsAction.Response instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected ListConnectorSyncJobsAction.Response mutateInstanceForVersion( + ListConnectorSyncJobsAction.Response instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionTests.java new file mode 100644 index 0000000000000..39a7551bdfcab --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionTests.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; + +import static org.hamcrest.Matchers.nullValue; + +public class ListConnectorSyncJobsActionTests extends ESTestCase { + + public void testValidate_WhenPageParamsAreValid_ExpectNoValidationError() { + ListConnectorSyncJobsAction.Request request = ConnectorSyncJobTestUtils.getRandomListConnectorSyncJobsActionRequest(); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, nullValue()); + } + +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportListConnectorSyncJobsActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportListConnectorSyncJobsActionTests.java new file mode 100644 index 0000000000000..503e7e54255e3 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportListConnectorSyncJobsActionTests.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.junit.Before; + +import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.mockito.Mockito.mock; + +public class TransportListConnectorSyncJobsActionTests extends ESSingleNodeTestCase { + private static final Long TIMEOUT_SECONDS = 10L; + + private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); + private TransportListConnectorSyncJobsAction action; + + @Before + public void setup() { + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + x -> null, + null, + Collections.emptySet() + ); + + action = new TransportListConnectorSyncJobsAction(transportService, clusterService, mock(ActionFilters.class), client()); + } + + @Override + public void tearDown() throws Exception { + super.tearDown(); + ThreadPool.terminate(threadPool, TIMEOUT_SECONDS, TimeUnit.SECONDS); + } + + public void testListConnectorSyncJobs_ExpectNoWarnings() throws InterruptedException { + ListConnectorSyncJobsAction.Request request = ConnectorSyncJobTestUtils.getRandomListConnectorSyncJobsActionRequest(); + + executeRequest(request); + + ensureNoWarnings(); + } + + private void executeRequest(ListConnectorSyncJobsAction.Request request) throws InterruptedException { + final CountDownLatch latch = new CountDownLatch(1); + action.doExecute(mock(Task.class), request, ActionListener.wrap(response -> latch.countDown(), exception -> latch.countDown())); + + boolean requestTimedOut = latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + + assertTrue("Timeout waiting for list request", requestTimedOut); + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index ccefd8ab6bdb7..6e78eb2fb5b83 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -140,6 +140,7 @@ public class Constants { "cluster:admin/xpack/connector/sync_job/check_in", "cluster:admin/xpack/connector/sync_job/delete", "cluster:admin/xpack/connector/sync_job/get", + "cluster:admin/xpack/connector/sync_job/list", "cluster:admin/xpack/connector/sync_job/post", "cluster:admin/xpack/connector/sync_job/update_error", "cluster:admin/xpack/connector/sync_job/update_stats", From 3a7417e3d93c0806a41031953fe0005fd530a627 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 5 Dec 2023 18:39:32 +0100 Subject: [PATCH 170/181] Move a couple more ActionListener.wrap to ActionListener.delegateFailureAndWrap (#102965) It's in the title. Save a little more in allocations and make the behavior easier to reason about. --- .../reindex/TransportRethrottleAction.java | 6 +- .../TransportRethrottleActionTests.java | 3 + .../action/search/TransportSearchAction.java | 8 +- .../upgrades/SystemIndexMigrator.java | 83 +++++++++---------- .../xpack/ccr/action/CcrRequests.java | 14 ++-- .../AbstractTransportSetResetModeAction.java | 14 ++-- .../xpack/core/ilm/DownsampleStep.java | 2 +- .../SwapAliasesAndDeleteSourceIndexStep.java | 6 +- .../xpack/core/ilm/UpdateSettingsStep.java | 4 +- .../core/ml/annotations/AnnotationIndex.java | 15 ++-- .../persistence/AnomalyDetectorsIndex.java | 6 +- .../persistence/ElasticsearchMappings.java | 8 +- .../xpack/core/ml/utils/MlIndexAndAlias.java | 24 ++---- .../ml/utils/MlPlatformArchitecturesUtil.java | 12 +-- .../core/ml/utils/MlIndexAndAliasTests.java | 3 + .../TransformDeprecationChecker.java | 33 ++++---- .../TransportDeprecationInfoAction.java | 14 ++-- .../action/InternalExecutePolicyAction.java | 5 +- .../TransportDeleteEnrichPolicyAction.java | 28 ++++--- .../TransportPutEnrichPolicyAction.java | 8 +- .../syncjob/ConnectorSyncJobIndexService.java | 7 +- ...ortRenderSearchApplicationQueryAction.java | 6 +- .../xpack/esql/session/EsqlSession.java | 2 +- .../action/TransportDeleteSecretAction.java | 8 +- .../action/TransportGetSecretAction.java | 8 +- .../action/TransportPostSecretAction.java | 5 +- ...nsportDeleteSamlServiceProviderAction.java | 13 ++- .../ApplicationActionsResolver.java | 6 +- .../idp/privileges/UserPrivilegeResolver.java | 20 ++--- .../saml/authn/SamlAuthnRequestValidator.java | 30 ++++--- .../idp/saml/idp/SamlIdentityProvider.java | 10 +-- .../idp/saml/idp/SamlMetadataGenerator.java | 10 +-- .../services/elser/ElserMlNodeService.java | 10 ++- .../action/TransportGetPipelineAction.java | 9 +- .../action/TransportPutPipelineAction.java | 7 +- .../TransportDeleteCalendarEventAction.java | 2 +- ...ansportDeleteDataFrameAnalyticsAction.java | 11 ++- .../TransportDeleteExpiredDataAction.java | 9 +- .../ml/action/TransportDeleteJobAction.java | 8 +- .../TransportEvaluateDataFrameAction.java | 6 +- .../action/TransportGetDatafeedsAction.java | 2 +- .../action/TransportUpdateFilterAction.java | 8 +- 42 files changed, 242 insertions(+), 251 deletions(-) diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/TransportRethrottleAction.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/TransportRethrottleAction.java index bc89928358dc2..68e7d14038b67 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/TransportRethrottleAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/TransportRethrottleAction.java @@ -101,10 +101,10 @@ private static void rethrottleParentTask( subRequest.setRequestsPerSecond(newRequestsPerSecond / runningSubtasks); subRequest.setTargetParentTaskId(new TaskId(localNodeId, task.getId())); logger.debug("rethrottling children of task [{}] to [{}] requests per second", task.getId(), subRequest.getRequestsPerSecond()); - client.execute(ReindexPlugin.RETHROTTLE_ACTION, subRequest, ActionListener.wrap(r -> { + client.execute(ReindexPlugin.RETHROTTLE_ACTION, subRequest, listener.delegateFailureAndWrap((l, r) -> { r.rethrowFailures("Rethrottle"); - listener.onResponse(task.taskInfoGivenSubtaskInfo(localNodeId, r.getTasks())); - }, listener::onFailure)); + l.onResponse(task.taskInfoGivenSubtaskInfo(localNodeId, r.getTasks())); + })); } else { logger.debug("children of task [{}] are already finished, nothing to rethrottle", task.getId()); listener.onResponse(task.taskInfo(localNodeId, true)); diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/TransportRethrottleActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/TransportRethrottleActionTests.java index 80af095005c9d..ec8e0ce87ee56 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/TransportRethrottleActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/TransportRethrottleActionTests.java @@ -34,9 +34,11 @@ import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.theInstance; import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.any; import static org.mockito.Mockito.atMost; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; public class TransportRethrottleActionTests extends ESTestCase { private int slices; @@ -65,6 +67,7 @@ private void rethrottleTestCase( float newRequestsPerSecond = randomValueOtherThanMany(f -> f <= 0, () -> randomFloat()); @SuppressWarnings("unchecked") ActionListener listener = mock(ActionListener.class); + when(listener.delegateFailureAndWrap(any())).thenCallRealMethod(); TransportRethrottleAction.rethrottle(logger, localNodeId, client, task, newRequestsPerSecond, listener); diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 1b3b321a530e6..f164e3342fb60 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -710,7 +710,7 @@ Map createFinalResponse() { remoteClusterService.maybeEnsureConnectedAndGetConnection( clusterAlias, skipUnavailable == false, - ActionListener.wrap(connection -> { + singleListener.delegateFailureAndWrap((delegate, connection) -> { final String[] indices = entry.getValue().indices(); final Executor responseExecutor = transportService.getThreadPool().executor(ThreadPool.Names.SEARCH_COORDINATION); // TODO: support point-in-time @@ -729,7 +729,7 @@ Map createFinalResponse() { TransportSearchShardsAction.TYPE.name(), searchShardsRequest, TransportRequestOptions.EMPTY, - new ActionListenerResponseHandler<>(singleListener, SearchShardsResponse::new, responseExecutor) + new ActionListenerResponseHandler<>(delegate, SearchShardsResponse::new, responseExecutor) ); } else { // does not do a can-match @@ -742,13 +742,13 @@ Map createFinalResponse() { searchShardsRequest, TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>( - singleListener.map(SearchShardsResponse::fromLegacyResponse), + delegate.map(SearchShardsResponse::fromLegacyResponse), ClusterSearchShardsResponse::new, responseExecutor ) ); } - }, singleListener::onFailure) + }) ); } } diff --git a/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java b/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java index 728eae67f22cd..968e64fcc3888 100644 --- a/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java +++ b/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java @@ -30,10 +30,10 @@ import org.elasticsearch.cluster.metadata.MetadataIndexTemplateService; import org.elasticsearch.cluster.metadata.MetadataUpdateSettingsService; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; @@ -418,7 +418,7 @@ private void migrateSingleIndex(ClusterState clusterState, Consumer innerListener = ActionListener.wrap(listener::accept, this::markAsFailed); try { - createIndex(migrationInfo, ActionListener.wrap(shardsAcknowledgedResponse -> { + createIndex(migrationInfo, innerListener.delegateFailureAndWrap((delegate, shardsAcknowledgedResponse) -> { logger.debug( "while migrating [{}] , got create index response: [{}]", oldIndexName, @@ -427,45 +427,45 @@ private void migrateSingleIndex(ClusterState clusterState, Consumer reindex(migrationInfo, ActionListener.wrap(bulkByScrollResponse -> { - logger.debug( - "while migrating [{}], got reindex response: [{}]", - oldIndexName, - Strings.toString(bulkByScrollResponse) - ); - if ((bulkByScrollResponse.getBulkFailures() != null && bulkByScrollResponse.getBulkFailures().isEmpty() == false) - || (bulkByScrollResponse.getSearchFailures() != null - && bulkByScrollResponse.getSearchFailures().isEmpty() == false)) { - removeReadOnlyBlockOnReindexFailure( - oldIndex, - innerListener, - logAndThrowExceptionForFailures(bulkByScrollResponse) + delegate.delegateFailureAndWrap( + (delegate2, setReadOnlyResponse) -> reindex(migrationInfo, ActionListener.wrap(bulkByScrollResponse -> { + logger.debug( + "while migrating [{}], got reindex response: [{}]", + oldIndexName, + Strings.toString(bulkByScrollResponse) ); - } else { - // Successful completion of reindexing - remove read only and delete old index - setWriteBlock( - oldIndex, - false, - ActionListener.wrap( - setAliasAndRemoveOldIndex(migrationInfo, bulkByScrollResponse, innerListener), - innerListener::onFailure - ) + if ((bulkByScrollResponse.getBulkFailures() != null + && bulkByScrollResponse.getBulkFailures().isEmpty() == false) + || (bulkByScrollResponse.getSearchFailures() != null + && bulkByScrollResponse.getSearchFailures().isEmpty() == false)) { + removeReadOnlyBlockOnReindexFailure( + oldIndex, + delegate2, + logAndThrowExceptionForFailures(bulkByScrollResponse) + ); + } else { + // Successful completion of reindexing - remove read only and delete old index + setWriteBlock( + oldIndex, + false, + delegate2.delegateFailureAndWrap(setAliasAndRemoveOldIndex(migrationInfo, bulkByScrollResponse)) + ); + } + }, e -> { + logger.error( + () -> format( + "error occurred while reindexing index [%s] from feature [%s] to destination index [%s]", + oldIndexName, + migrationInfo.getFeatureName(), + newIndexName + ), + e ); - } - }, e -> { - logger.error( - () -> format( - "error occurred while reindexing index [%s] from feature [%s] to destination index [%s]", - oldIndexName, - migrationInfo.getFeatureName(), - newIndexName - ), - e - ); - removeReadOnlyBlockOnReindexFailure(oldIndex, innerListener, e); - })), innerListener::onFailure) + removeReadOnlyBlockOnReindexFailure(oldIndex, delegate2, e); + })) + ) ); - }, innerListener::onFailure)); + })); } catch (Exception ex) { logger.error( () -> format( @@ -501,10 +501,9 @@ private void createIndex(SystemIndexMigrationInfo migrationInfo, ActionListener< metadataCreateIndexService.createIndex(createRequest, listener); } - private CheckedConsumer setAliasAndRemoveOldIndex( + private CheckedBiConsumer, AcknowledgedResponse, Exception> setAliasAndRemoveOldIndex( SystemIndexMigrationInfo migrationInfo, - BulkByScrollResponse bulkByScrollResponse, - ActionListener listener + BulkByScrollResponse bulkByScrollResponse ) { final IndicesAliasesRequestBuilder aliasesRequest = migrationInfo.createClient(baseClient).admin().indices().prepareAliases(); aliasesRequest.removeIndex(migrationInfo.getCurrentIndexName()); @@ -526,7 +525,7 @@ private CheckedConsumer setAliasAndRemoveOldInd // Technically this callback might have a different cluster state, but it shouldn't matter - these indices shouldn't be changing // while we're trying to migrate them. - return unsetReadOnlyResponse -> aliasesRequest.execute( + return (listener, unsetReadOnlyResponse) -> aliasesRequest.execute( listener.delegateFailureAndWrap((l, deleteIndexResponse) -> l.onResponse(bulkByScrollResponse)) ); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/CcrRequests.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/CcrRequests.java index d91d989068203..815a61297767a 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/CcrRequests.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/CcrRequests.java @@ -64,11 +64,11 @@ public static void getIndexMetadata( if (metadataVersion > 0) { request.waitForMetadataVersion(metadataVersion).waitForTimeout(timeoutSupplier.get()); } - client.admin().cluster().state(request, ActionListener.wrap(response -> { + client.admin().cluster().state(request, listener.delegateFailureAndWrap((delegate, response) -> { if (response.getState() == null) { // timeout on wait_for_metadata_version assert metadataVersion > 0 : metadataVersion; if (timeoutSupplier.get().nanos() < 0) { - listener.onFailure( + delegate.onFailure( new IllegalStateException( "timeout to get cluster state with" + " metadata version [" @@ -79,25 +79,25 @@ public static void getIndexMetadata( ) ); } else { - getIndexMetadata(client, index, mappingVersion, metadataVersion, timeoutSupplier, listener); + getIndexMetadata(client, index, mappingVersion, metadataVersion, timeoutSupplier, delegate); } } else { final Metadata metadata = response.getState().metadata(); final IndexMetadata indexMetadata = metadata.getIndexSafe(index); if (indexMetadata.getMappingVersion() >= mappingVersion) { - listener.onResponse(indexMetadata); + delegate.onResponse(indexMetadata); return; } if (timeoutSupplier.get().nanos() < 0) { - listener.onFailure( + delegate.onFailure( new IllegalStateException("timeout to get cluster state with mapping version [" + mappingVersion + "]") ); } else { // ask for the next version. - getIndexMetadata(client, index, mappingVersion, metadata.version() + 1, timeoutSupplier, listener); + getIndexMetadata(client, index, mappingVersion, metadata.version() + 1, timeoutSupplier, delegate); } } - }, listener::onFailure)); + })); } public static final RequestValidators.RequestValidator CCR_PUT_MAPPING_REQUEST_VALIDATOR = ( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportSetResetModeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportSetResetModeAction.java index 0d3c45ccedd3d..f94d7c6caae36 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportSetResetModeAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportSetResetModeAction.java @@ -91,13 +91,15 @@ protected void masterOperation( listener.onFailure(e); }); - ActionListener clusterStateUpdateListener = ActionListener.wrap(acknowledgedResponse -> { - if (acknowledgedResponse.isAcknowledged() == false) { - wrappedListener.onFailure(new ElasticsearchTimeoutException("Unknown error occurred while updating cluster state")); - return; + ActionListener clusterStateUpdateListener = wrappedListener.delegateFailureAndWrap( + (delegate, acknowledgedResponse) -> { + if (acknowledgedResponse.isAcknowledged() == false) { + delegate.onFailure(new ElasticsearchTimeoutException("Unknown error occurred while updating cluster state")); + return; + } + delegate.onResponse(acknowledgedResponse); } - wrappedListener.onResponse(acknowledgedResponse); - }, wrappedListener::onFailure); + ); submitUnbatchedTask(featureName() + "-set-reset-mode", new AckedClusterStateUpdateTask(request, clusterStateUpdateListener) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleStep.java index 7cb36051b2cfc..bfbc32e11e93d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleStep.java @@ -85,7 +85,7 @@ public void performAction( return; } } - performDownsampleIndex(indexName, downsampleIndexName, ActionListener.wrap(listener::onResponse, listener::onFailure)); + performDownsampleIndex(indexName, downsampleIndexName, listener.delegateFailureAndWrap((l, r) -> l.onResponse(r))); } void performDownsampleIndex(String indexName, String downsampleIndexName, ActionListener listener) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SwapAliasesAndDeleteSourceIndexStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SwapAliasesAndDeleteSourceIndexStep.java index 9289ac79efbf5..82e4280dcc4cc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SwapAliasesAndDeleteSourceIndexStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SwapAliasesAndDeleteSourceIndexStep.java @@ -136,12 +136,12 @@ static void deleteSourceIndexAndTransferAliases( ); }); - client.admin().indices().aliases(aliasesRequest, ActionListener.wrap(response -> { + client.admin().indices().aliases(aliasesRequest, listener.delegateFailureAndWrap((l, response) -> { if (response.isAcknowledged() == false) { logger.warn("aliases swap from [{}] to [{}] response was not acknowledged", sourceIndexName, targetIndex); } - listener.onResponse(null); - }, listener::onFailure)); + l.onResponse(null); + })); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/UpdateSettingsStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/UpdateSettingsStep.java index bcaf50ed930c4..5d5b04cf78815 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/UpdateSettingsStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/UpdateSettingsStep.java @@ -45,9 +45,7 @@ public void performAction( UpdateSettingsRequest updateSettingsRequest = new UpdateSettingsRequest(indexMetadata.getIndex().getName()).masterNodeTimeout( TimeValue.MAX_VALUE ).settings(settings); - getClient().admin() - .indices() - .updateSettings(updateSettingsRequest, ActionListener.wrap(response -> listener.onResponse(null), listener::onFailure)); + getClient().admin().indices().updateSettings(updateSettingsRequest, listener.delegateFailureAndWrap((l, r) -> l.onResponse(null))); } public Settings getSettings() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java index eb4f4986fa193..09c7348cdc870 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java @@ -71,7 +71,7 @@ public static void createAnnotationsIndexIfNecessaryAndWaitForYellow( final ActionListener finalListener ) { - final ActionListener annotationsIndexCreatedListener = ActionListener.wrap(success -> { + final ActionListener annotationsIndexCreatedListener = finalListener.delegateFailureAndWrap((delegate, success) -> { final ClusterHealthRequest request = new ClusterHealthRequest(READ_ALIAS_NAME).waitForYellowStatus() .masterNodeTimeout(masterNodeTimeout); executeAsyncWithOrigin( @@ -79,9 +79,9 @@ public static void createAnnotationsIndexIfNecessaryAndWaitForYellow( ML_ORIGIN, ClusterHealthAction.INSTANCE, request, - ActionListener.wrap(r -> finalListener.onResponse(r.isTimedOut() == false), finalListener::onFailure) + delegate.delegateFailureAndWrap((l, r) -> l.onResponse(r.isTimedOut() == false)) ); - }, finalListener::onFailure); + }); createAnnotationsIndexIfNecessary(client, state, masterNodeTimeout, annotationsIndexCreatedListener); } @@ -97,17 +97,16 @@ public static void createAnnotationsIndexIfNecessary( final ActionListener finalListener ) { - final ActionListener checkMappingsListener = ActionListener.wrap( - success -> ElasticsearchMappings.addDocMappingIfMissing( + final ActionListener checkMappingsListener = finalListener.delegateFailureAndWrap( + (delegate, success) -> ElasticsearchMappings.addDocMappingIfMissing( WRITE_ALIAS_NAME, AnnotationIndex::annotationsMapping, client, state, masterNodeTimeout, - finalListener, + delegate, ANNOTATION_INDEX_MAPPINGS_VERSION - ), - finalListener::onFailure + ) ); final ActionListener createAliasListener = finalListener.delegateFailureAndWrap((finalDelegate, currentIndexName) -> { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java index 37d070d90be76..2b622a1798508 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java @@ -102,7 +102,7 @@ public static void createStateIndexAndAliasIfNecessaryAndWaitForYellow( TimeValue masterNodeTimeout, final ActionListener finalListener ) { - final ActionListener stateIndexAndAliasCreated = ActionListener.wrap(success -> { + final ActionListener stateIndexAndAliasCreated = finalListener.delegateFailureAndWrap((delegate, success) -> { final ClusterHealthRequest request = new ClusterHealthRequest(AnomalyDetectorsIndex.jobStateIndexWriteAlias()) .waitForYellowStatus() .masterNodeTimeout(masterNodeTimeout); @@ -111,9 +111,9 @@ public static void createStateIndexAndAliasIfNecessaryAndWaitForYellow( ML_ORIGIN, ClusterHealthAction.INSTANCE, request, - ActionListener.wrap(r -> finalListener.onResponse(r.isTimedOut() == false), finalListener::onFailure) + delegate.delegateFailureAndWrap((l, r) -> l.onResponse(r.isTimedOut() == false)) ); - }, finalListener::onFailure); + }); MlIndexAndAlias.createIndexAndAliasIfNecessary( client, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java index 4187762ca58c6..088275ddabb3e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java @@ -185,11 +185,11 @@ protected void doRun() throws Exception { ML_ORIGIN, PutMappingAction.INSTANCE, putMappingRequest, - ActionListener.wrap(response -> { + listener.delegateFailureAndWrap((delegate, response) -> { if (response.isAcknowledged()) { - listener.onResponse(true); + delegate.onResponse(true); } else { - listener.onFailure( + delegate.onFailure( new ElasticsearchStatusException( "Attempt to put missing mapping in indices " + Arrays.toString(indicesThatRequireAnUpdate) @@ -198,7 +198,7 @@ protected void doRun() throws Exception { ) ); } - }, listener::onFailure) + }) ); } else { logger.trace("Mappings are up to date."); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java index 78a3493e8ae6b..d691cb0eb4c53 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java @@ -113,13 +113,13 @@ public static void createIndexAndAliasIfNecessary( }); // If both the index and alias were successfully created then wait for the shards of the index that the alias points to be ready - ActionListener indexCreatedListener = ActionListener.wrap(created -> { + ActionListener indexCreatedListener = loggingListener.delegateFailureAndWrap((delegate, created) -> { if (created) { - waitForShardsReady(client, alias, masterNodeTimeout, loggingListener); + waitForShardsReady(client, alias, masterNodeTimeout, delegate); } else { - loggingListener.onResponse(false); + delegate.onResponse(false); } - }, loggingListener::onFailure); + }); String legacyIndexWithoutSuffix = indexPatternPrefix; String indexPattern = indexPatternPrefix + "*"; @@ -218,10 +218,7 @@ public static void createSystemIndexIfNecessary( client.threadPool().getThreadContext(), ML_ORIGIN, createIndexRequest, - ActionListener.wrap( - r -> indexCreatedListener.onResponse(r.isAcknowledged()), - indexCreatedListener::onFailure - ), + indexCreatedListener.delegateFailureAndWrap((l, r) -> l.onResponse(r.isAcknowledged())), client.admin().indices()::create ); } @@ -235,10 +232,7 @@ private static void waitForShardsReady(Client client, String index, TimeValue ma client.threadPool().getThreadContext(), ML_ORIGIN, healthRequest, - ActionListener.wrap( - response -> listener.onResponse(response.isTimedOut() == false), - listener::onFailure - ), + listener.delegateFailureAndWrap((l, response) -> l.onResponse(response.isTimedOut() == false)), client.admin().cluster()::health ); } @@ -371,12 +365,12 @@ public static void installIndexTemplateIfRequired( return; } - ActionListener innerListener = ActionListener.wrap(response -> { + ActionListener innerListener = listener.delegateFailureAndWrap((l, response) -> { if (response.isAcknowledged() == false) { logger.warn("error adding template [{}], request was not acknowledged", templateRequest.name()); } - listener.onResponse(response.isAcknowledged()); - }, listener::onFailure); + l.onResponse(response.isAcknowledged()); + }); executeAsyncWithOrigin(client, ML_ORIGIN, PutComposableIndexTemplateAction.INSTANCE, templateRequest, innerListener); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlPlatformArchitecturesUtil.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlPlatformArchitecturesUtil.java index 9802e06223332..c0f00cdada28f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlPlatformArchitecturesUtil.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlPlatformArchitecturesUtil.java @@ -48,9 +48,9 @@ static ActionListener getArchitecturesSetFromNodesInfoRespons ExecutorService executor, ActionListener> architecturesListener ) { - return ActionListener.wrap(nodesInfoResponse -> { - executor.execute(() -> { architecturesListener.onResponse(getArchitecturesSetFromNodesInfoResponse(nodesInfoResponse)); }); - }, architecturesListener::onFailure); + return architecturesListener.delegateFailureAndWrap( + (l, nodesInfoResponse) -> executor.execute(() -> l.onResponse(getArchitecturesSetFromNodesInfoResponse(nodesInfoResponse))) + ); } static NodesInfoRequestBuilder getNodesInfoBuilderWithMlNodeArchitectureInfo(Client client) { @@ -77,10 +77,10 @@ public static void verifyMlNodesAndModelArchitectures( String modelID = configToReturn.getModelId(); String modelPlatformArchitecture = configToReturn.getPlatformArchitecture(); - ActionListener> architecturesListener = ActionListener.wrap((architectures) -> { + ActionListener> architecturesListener = successOrFailureListener.delegateFailureAndWrap((l, architectures) -> { verifyMlNodesAndModelArchitectures(architectures, modelPlatformArchitecture, modelID); - successOrFailureListener.onResponse(configToReturn); - }, successOrFailureListener::onFailure); + l.onResponse(configToReturn); + }); getMlNodesArchitecturesSet(architecturesListener, client, executor); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java index cbe0ba99e57ce..db18752cb91b7 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java @@ -127,6 +127,7 @@ public void setUpMocks() { ); listener = mock(ActionListener.class); + when(listener.delegateFailureAndWrap(any())).thenCallRealMethod(); createRequestCaptor = ArgumentCaptor.forClass(CreateIndexRequest.class); aliasesRequestCaptor = ArgumentCaptor.forClass(IndicesAliasesRequest.class); @@ -171,6 +172,7 @@ public void testInstallIndexTemplateIfRequired_GivenLegacyTemplateExistsAndModer listener ); InOrder inOrder = inOrder(client, listener); + inOrder.verify(listener).delegateFailureAndWrap(any()); inOrder.verify(client).execute(same(PutComposableIndexTemplateAction.INSTANCE), any(), any()); inOrder.verify(listener).onResponse(true); } @@ -236,6 +238,7 @@ public void testInstallIndexTemplateIfRequired() { listener ); InOrder inOrder = inOrder(client, listener); + inOrder.verify(listener).delegateFailureAndWrap(any()); inOrder.verify(client).execute(same(PutComposableIndexTemplateAction.INSTANCE), any(), any()); inOrder.verify(listener).onResponse(true); } diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransformDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransformDeprecationChecker.java index 45384afbec59e..0b5eb7ada7655 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransformDeprecationChecker.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransformDeprecationChecker.java @@ -33,9 +33,12 @@ public void check(Components components, ActionListener deprecation PageParams startPage = new PageParams(0, PageParams.DEFAULT_SIZE); List issues = new ArrayList<>(); - recursiveGetTransformsAndCollectDeprecations(components, issues, startPage, ActionListener.wrap(allIssues -> { - deprecationIssueListener.onResponse(new CheckResult(getName(), allIssues)); - }, deprecationIssueListener::onFailure)); + recursiveGetTransformsAndCollectDeprecations( + components, + issues, + startPage, + deprecationIssueListener.delegateFailureAndWrap((l, allIssues) -> l.onResponse(new CheckResult(getName(), allIssues))) + ); } @Override @@ -53,17 +56,17 @@ private static void recursiveGetTransformsAndCollectDeprecations( request.setPageParams(page); request.setAllowNoResources(true); - components.client().execute(GetTransformAction.INSTANCE, request, ActionListener.wrap(getTransformResponse -> { - for (TransformConfig config : getTransformResponse.getTransformConfigurations()) { - issues.addAll(config.checkForDeprecations(components.xContentRegistry())); - } - if (getTransformResponse.getTransformConfigurationCount() >= (page.getFrom() + page.getSize())) { - PageParams nextPage = new PageParams(page.getFrom() + page.getSize(), PageParams.DEFAULT_SIZE); - recursiveGetTransformsAndCollectDeprecations(components, issues, nextPage, listener); - } else { - listener.onResponse(issues); - } - - }, listener::onFailure)); + components.client() + .execute(GetTransformAction.INSTANCE, request, listener.delegateFailureAndWrap((delegate, getTransformResponse) -> { + for (TransformConfig config : getTransformResponse.getTransformConfigurations()) { + issues.addAll(config.checkForDeprecations(components.xContentRegistry())); + } + if (getTransformResponse.getTransformConfigurationCount() >= (page.getFrom() + page.getSize())) { + PageParams nextPage = new PageParams(page.getFrom() + page.getSize(), PageParams.DEFAULT_SIZE); + recursiveGetTransformsAndCollectDeprecations(components, issues, nextPage, delegate); + } else { + delegate.onResponse(issues); + } + })); } } diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java index 9aff1c010cac7..3c16830c2ba97 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java @@ -105,7 +105,7 @@ protected final void masterOperation( ClientHelper.DEPRECATION_ORIGIN, NodesDeprecationCheckAction.INSTANCE, nodeDepReq, - ActionListener.wrap(response -> { + listener.delegateFailureAndWrap((delegate, response) -> { if (response.hasFailures()) { List failedNodeIds = response.failures() .stream() @@ -127,7 +127,7 @@ protected final void masterOperation( components, new ThreadedActionListener<>( client.threadPool().generic(), - listener.map( + delegate.map( deprecationIssues -> DeprecationInfoAction.Response.from( state, indexNameExpressionResolver, @@ -141,8 +141,7 @@ protected final void masterOperation( ) ) ); - - }, listener::onFailure) + }) ); } @@ -158,14 +157,13 @@ static void pluginSettingIssues( } GroupedActionListener groupedActionListener = new GroupedActionListener<>( enabledCheckers.size(), - ActionListener.wrap( - checkResults -> listener.onResponse( + listener.delegateFailureAndWrap( + (l, checkResults) -> l.onResponse( checkResults.stream() .collect( Collectors.toMap(DeprecationChecker.CheckResult::getCheckerName, DeprecationChecker.CheckResult::getIssues) ) - ), - listener::onFailure + ) ) ); for (DeprecationChecker checker : checkers) { diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyAction.java index ff2759244a2f6..e606f6ac8ea9c 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyAction.java @@ -168,10 +168,7 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, try { ActionListener listener; if (request.isWaitForCompletion()) { - listener = ActionListener.wrap( - result -> actionListener.onResponse(new Response(result)), - actionListener::onFailure - ); + listener = actionListener.delegateFailureAndWrap((l, result) -> l.onResponse(new Response(result))); } else { listener = ActionListener.wrap( result -> LOGGER.debug("successfully executed policy [{}]", request.getName()), diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportDeleteEnrichPolicyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportDeleteEnrichPolicyAction.java index bf1327eb8efbe..5dec35149dc52 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportDeleteEnrichPolicyAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportDeleteEnrichPolicyAction.java @@ -149,19 +149,21 @@ private void deleteIndicesAndPolicy(String[] indices, String name, ActionListene // as the setting 'action.destructive_requires_name' may be set to true DeleteIndexRequest deleteRequest = new DeleteIndexRequest().indices(indices).indicesOptions(LENIENT_OPTIONS); - new OriginSettingClient(client, ENRICH_ORIGIN).admin().indices().delete(deleteRequest, ActionListener.wrap((response) -> { - if (response.isAcknowledged() == false) { - listener.onFailure( - new ElasticsearchStatusException( - "Could not fetch indices to delete during policy delete of [{}]", - RestStatus.INTERNAL_SERVER_ERROR, - name - ) - ); - } else { - deletePolicy(name, listener); - } - }, listener::onFailure)); + new OriginSettingClient(client, ENRICH_ORIGIN).admin() + .indices() + .delete(deleteRequest, listener.delegateFailureAndWrap((delegate, response) -> { + if (response.isAcknowledged() == false) { + delegate.onFailure( + new ElasticsearchStatusException( + "Could not fetch indices to delete during policy delete of [{}]", + RestStatus.INTERNAL_SERVER_ERROR, + name + ) + ); + } else { + deletePolicy(name, delegate); + } + })); } private void deletePolicy(String name, ActionListener listener) { diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportPutEnrichPolicyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportPutEnrichPolicyAction.java index 2cfc1dc8fffa0..7433863fcbd5d 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportPutEnrichPolicyAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportPutEnrichPolicyAction.java @@ -88,11 +88,11 @@ protected void masterOperation( privRequest.clusterPrivileges(Strings.EMPTY_ARRAY); privRequest.indexPrivileges(privileges); - ActionListener wrappedListener = ActionListener.wrap(r -> { + ActionListener wrappedListener = listener.delegateFailureAndWrap((delegate, r) -> { if (r.isCompleteMatch()) { - putPolicy(request, listener); + putPolicy(request, delegate); } else { - listener.onFailure( + delegate.onFailure( Exceptions.authorizationError( "unable to store policy because no indices match with the " + "specified index patterns {}", request.getPolicy().getIndices(), @@ -100,7 +100,7 @@ protected void masterOperation( ) ); } - }, listener::onFailure); + }); client.execute(HasPrivilegesAction.INSTANCE, privRequest, wrappedListener); } else { putPolicy(request, listener); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index 326fdb0367e5c..a7d20414d4631 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -120,13 +120,12 @@ public void createConnectorSyncJob( clientWithOrigin.index( indexRequest, - ActionListener.wrap( - indexResponse -> listener.onResponse(new PostConnectorSyncJobAction.Response(indexResponse.getId())), - listener::onFailure + l.delegateFailureAndWrap( + (ll, indexResponse) -> ll.onResponse(new PostConnectorSyncJobAction.Response(indexResponse.getId())) ) ); } catch (IOException e) { - listener.onFailure(e); + l.onFailure(e); } })); } catch (Exception e) { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/TransportRenderSearchApplicationQueryAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/TransportRenderSearchApplicationQueryAction.java index 6f20bd4acb785..4a028a5558e87 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/TransportRenderSearchApplicationQueryAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/TransportRenderSearchApplicationQueryAction.java @@ -66,10 +66,10 @@ protected void doExecute( SearchApplicationSearchRequest request, ActionListener listener ) { - systemIndexService.getSearchApplication(request.name(), ActionListener.wrap(searchApplication -> { + systemIndexService.getSearchApplication(request.name(), listener.delegateFailureAndWrap((delegate, searchApplication) -> { final Map renderedMetadata = templateService.renderTemplate(searchApplication, request.queryParams()); final SearchSourceBuilder sourceBuilder = templateService.renderQuery(searchApplication, renderedMetadata); - listener.onResponse(new RenderSearchApplicationQueryAction.Response(request.name(), sourceBuilder)); - }, listener::onFailure)); + delegate.onResponse(new RenderSearchApplicationQueryAction.Response(request.name(), sourceBuilder)); + })); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 7adcb0a1f9623..284c78c6e0121 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -167,7 +167,7 @@ private void preAnalyze(LogicalPlan parsed, BiFunction l.onResponse(action.apply(indexResolution, resolution)), listener::onFailure), + l.delegateFailureAndWrap((ll, indexResolution) -> ll.onResponse(action.apply(indexResolution, resolution))), matchFields ); }); diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/TransportDeleteSecretAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/TransportDeleteSecretAction.java index b45d89f788b53..520efe269eb96 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/TransportDeleteSecretAction.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/TransportDeleteSecretAction.java @@ -34,12 +34,12 @@ public TransportDeleteSecretAction(TransportService transportService, ActionFilt @Override protected void doExecute(Task task, DeleteSecretRequest request, ActionListener listener) { - client.prepareDelete(FLEET_SECRETS_INDEX_NAME, request.id()).execute(ActionListener.wrap(deleteResponse -> { + client.prepareDelete(FLEET_SECRETS_INDEX_NAME, request.id()).execute(listener.delegateFailureAndWrap((delegate, deleteResponse) -> { if (deleteResponse.getResult() == Result.NOT_FOUND) { - listener.onFailure(new ResourceNotFoundException("No secret with id [" + request.id() + "]")); + delegate.onFailure(new ResourceNotFoundException("No secret with id [" + request.id() + "]")); return; } - listener.onResponse(new DeleteSecretResponse(deleteResponse.getResult() == Result.DELETED)); - }, listener::onFailure)); + delegate.onResponse(new DeleteSecretResponse(deleteResponse.getResult() == Result.DELETED)); + })); } } diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/TransportGetSecretAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/TransportGetSecretAction.java index f1e6f5a4ed864..4c8311924ab4b 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/TransportGetSecretAction.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/TransportGetSecretAction.java @@ -31,12 +31,12 @@ public TransportGetSecretAction(TransportService transportService, ActionFilters } protected void doExecute(Task task, GetSecretRequest request, ActionListener listener) { - client.prepareGet(FLEET_SECRETS_INDEX_NAME, request.id()).execute(ActionListener.wrap(getResponse -> { + client.prepareGet(FLEET_SECRETS_INDEX_NAME, request.id()).execute(listener.delegateFailureAndWrap((delegate, getResponse) -> { if (getResponse.isSourceEmpty()) { - listener.onFailure(new ResourceNotFoundException("No secret with id [" + request.id() + "]")); + delegate.onFailure(new ResourceNotFoundException("No secret with id [" + request.id() + "]")); return; } - listener.onResponse(new GetSecretResponse(getResponse.getId(), getResponse.getSource().get("value").toString())); - }, listener::onFailure)); + delegate.onResponse(new GetSecretResponse(getResponse.getId(), getResponse.getSource().get("value").toString())); + })); } } diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/TransportPostSecretAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/TransportPostSecretAction.java index 2c3d6f7d98dc2..c87c4b58559ea 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/TransportPostSecretAction.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/TransportPostSecretAction.java @@ -36,10 +36,7 @@ protected void doExecute(Task task, PostSecretRequest request, ActionListener listener.onResponse(new PostSecretResponse(indexResponse.getId())), - listener::onFailure - ) + listener.delegateFailureAndWrap((l, indexResponse) -> l.onResponse(new PostSecretResponse(indexResponse.getId()))) ); } catch (Exception e) { listener.onFailure(e); diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/TransportDeleteSamlServiceProviderAction.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/TransportDeleteSamlServiceProviderAction.java index d86268db44f64..fa0c510fde5c0 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/TransportDeleteSamlServiceProviderAction.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/TransportDeleteSamlServiceProviderAction.java @@ -54,9 +54,9 @@ protected void doExecute( final ActionListener listener ) { final String entityId = request.getEntityId(); - index.findByEntityId(entityId, ActionListener.wrap(matchingDocuments -> { + index.findByEntityId(entityId, listener.delegateFailureAndWrap((delegate, matchingDocuments) -> { if (matchingDocuments.isEmpty()) { - listener.onResponse(new DeleteSamlServiceProviderResponse(null, entityId)); + delegate.onResponse(new DeleteSamlServiceProviderResponse(null, entityId)); } else if (matchingDocuments.size() == 1) { final SamlServiceProviderIndex.DocumentSupplier docInfo = Iterables.get(matchingDocuments, 0); final SamlServiceProviderDocument existingDoc = docInfo.getDocument(); @@ -66,9 +66,8 @@ protected void doExecute( index.deleteDocument( docInfo.version, request.getRefreshPolicy(), - ActionListener.wrap( - deleteResponse -> listener.onResponse(new DeleteSamlServiceProviderResponse(deleteResponse, entityId)), - listener::onFailure + delegate.delegateFailureAndWrap( + (l, deleteResponse) -> l.onResponse(new DeleteSamlServiceProviderResponse(deleteResponse, entityId)) ) ); } else { @@ -78,8 +77,8 @@ protected void doExecute( entityId, matchingDocuments.stream().map(d -> d.getDocument().docId).collect(Collectors.joining(",")) ); - listener.onFailure(new IllegalStateException("Multiple service providers exist with entity id [" + entityId + "]")); + delegate.onFailure(new IllegalStateException("Multiple service providers exist with entity id [" + entityId + "]")); } - }, listener::onFailure)); + })); } } diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/ApplicationActionsResolver.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/ApplicationActionsResolver.java index 09635557d0e0f..5dbba28a1e6fd 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/ApplicationActionsResolver.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/ApplicationActionsResolver.java @@ -134,14 +134,14 @@ public void getActions(String application, ActionListener> listener) private void loadActions(String applicationName, ActionListener> listener) { final GetPrivilegesRequest request = new GetPrivilegesRequest(); request.application(applicationName); - this.client.execute(GetPrivilegesAction.INSTANCE, request, ActionListener.wrap(response -> { + this.client.execute(GetPrivilegesAction.INSTANCE, request, listener.delegateFailureAndWrap((delegate, response) -> { final Set fixedActions = Stream.of(response.privileges()) .map(p -> p.getActions()) .flatMap(Collection::stream) .filter(s -> s.indexOf('*') == -1) .collect(Collectors.toUnmodifiableSet()); cache.put(applicationName, fixedActions); - listener.onResponse(fixedActions); - }, listener::onFailure)); + delegate.onResponse(fixedActions); + })); } } diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/UserPrivilegeResolver.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/UserPrivilegeResolver.java index eeb5e91f29ced..c4ffe65feae5a 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/UserPrivilegeResolver.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/UserPrivilegeResolver.java @@ -78,10 +78,10 @@ public UserPrivilegeResolver(Client client, SecurityContext securityContext, App * Requires that the active user is set in the {@link org.elasticsearch.xpack.core.security.SecurityContext}. */ public void resolve(ServiceProviderPrivileges service, ActionListener listener) { - buildResourcePrivilege(service, ActionListener.wrap(resourcePrivilege -> { + buildResourcePrivilege(service, listener.delegateFailureAndWrap((delegate, resourcePrivilege) -> { final String username = securityContext.requireUser().principal(); if (resourcePrivilege == null) { - listener.onResponse(UserPrivileges.noAccess(username)); + delegate.onResponse(UserPrivileges.noAccess(username)); return; } HasPrivilegesRequest request = new HasPrivilegesRequest(); @@ -89,7 +89,7 @@ public void resolve(ServiceProviderPrivileges service, ActionListener { + client.execute(HasPrivilegesAction.INSTANCE, request, delegate.delegateFailureAndWrap((l, response) -> { logger.debug( "Checking access for user [{}] to application [{}] resource [{}]", username, @@ -98,9 +98,9 @@ public void resolve(ServiceProviderPrivileges service, ActionListener listener ) { - actionsResolver.getActions(service.getApplicationName(), ActionListener.wrap(actions -> { + actionsResolver.getActions(service.getApplicationName(), listener.delegateFailureAndWrap((delegate, actions) -> { if (actions == null || actions.isEmpty()) { logger.warn("No application-privilege actions defined for application [{}]", service.getApplicationName()); - listener.onResponse(null); + delegate.onResponse(null); } else { logger.debug("Using actions [{}] for application [{}]", actions, service.getApplicationName()); final RoleDescriptor.ApplicationResourcePrivileges.Builder builder = RoleDescriptor.ApplicationResourcePrivileges.builder(); builder.application(service.getApplicationName()); builder.resources(service.getResource()); builder.privileges(actions); - listener.onResponse(builder.build()); + delegate.onResponse(builder.build()); } - }, listener::onFailure)); + })); } } diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/authn/SamlAuthnRequestValidator.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/authn/SamlAuthnRequestValidator.java index 0313040b7e8ae..9fc9f4a28d250 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/authn/SamlAuthnRequestValidator.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/authn/SamlAuthnRequestValidator.java @@ -101,16 +101,20 @@ public void processQueryString(String queryString, ActionListener { - try { - validateAuthnRequest(authnRequest, sp, parsedQueryString, listener); - } catch (ElasticsearchSecurityException e) { - logger.debug("Could not validate AuthnRequest", e); - listener.onFailure(e); - } catch (Exception e) { - logAndRespond("Could not validate AuthnRequest", e, listener); - } - }, listener::onFailure)); + getSpFromAuthnRequest( + authnRequest.getIssuer(), + authnRequest.getAssertionConsumerServiceURL(), + listener.delegateFailureAndWrap((l, sp) -> { + try { + validateAuthnRequest(authnRequest, sp, parsedQueryString, l); + } catch (ElasticsearchSecurityException e) { + logger.debug("Could not validate AuthnRequest", e); + l.onFailure(e); + } catch (Exception e) { + logAndRespond("Could not validate AuthnRequest", e, l); + } + }) + ); } catch (ElasticsearchSecurityException e) { logger.debug("Could not process AuthnRequest", e); listener.onFailure(e); @@ -277,7 +281,7 @@ private void getSpFromAuthnRequest(Issuer issuer, String acs, ActionListener { + idp.resolveServiceProvider(issuerString, acs, false, listener.delegateFailureAndWrap((delegate, serviceProvider) -> { if (null == serviceProvider) { throw new ElasticsearchSecurityException( "Service Provider with Entity ID [{}] and ACS [{}] is not known to this Identity Provider", @@ -286,8 +290,8 @@ private void getSpFromAuthnRequest(Issuer issuer, String acs, ActionListener listener ) { - serviceProviderResolver.resolve(spEntityId, ActionListener.wrap(sp -> { + serviceProviderResolver.resolve(spEntityId, listener.delegateFailureAndWrap((delegate, sp) -> { if (sp == null) { logger.debug("No explicitly registered service provider exists for entityId [{}]", spEntityId); - resolveWildcardService(spEntityId, acs, listener); + resolveWildcardService(spEntityId, acs, delegate); } else if (allowDisabled == false && sp.isEnabled() == false) { logger.info("Service provider [{}][{}] is not enabled", spEntityId, sp.getName()); - listener.onResponse(null); + delegate.onResponse(null); } else { logger.debug("Service provider for [{}] is [{}]", spEntityId, sp); - listener.onResponse(sp); + delegate.onResponse(sp); } - }, listener::onFailure)); + })); } private void resolveWildcardService(String spEntityId, String acs, ActionListener listener) { diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/idp/SamlMetadataGenerator.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/idp/SamlMetadataGenerator.java index dc4a9edbf22f4..13b2c461a6623 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/idp/SamlMetadataGenerator.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/idp/SamlMetadataGenerator.java @@ -43,10 +43,10 @@ public SamlMetadataGenerator(SamlFactory samlFactory, SamlIdentityProvider idp) } public void generateMetadata(String spEntityId, String acs, ActionListener listener) { - idp.resolveServiceProvider(spEntityId, acs, true, ActionListener.wrap(sp -> { + idp.resolveServiceProvider(spEntityId, acs, true, listener.delegateFailureAndWrap((delegate, sp) -> { try { if (null == sp) { - listener.onFailure( + delegate.onFailure( new IllegalArgumentException( "Service provider with Entity ID [" + spEntityId + "] is not registered with this Identity Provider" ) @@ -56,12 +56,12 @@ public void generateMetadata(String spEntityId, String acs, ActionListener listener) { client.execute( StartTrainedModelDeploymentAction.INSTANCE, startRequest, - ActionListener.wrap(r -> listener.onResponse(Boolean.TRUE), listener::onFailure) + listener.delegateFailureAndWrap((l, r) -> l.onResponse(Boolean.TRUE)) ); } @@ -181,9 +181,11 @@ public void infer(Model model, List input, Map taskSetti input, TimeValue.timeValueSeconds(10) // TODO get timeout from request ); - client.execute(InferTrainedModelDeploymentAction.INSTANCE, request, ActionListener.wrap(inferenceResult -> { - listener.onResponse(SparseEmbeddingResults.of(inferenceResult.getResults())); - }, listener::onFailure)); + client.execute( + InferTrainedModelDeploymentAction.INSTANCE, + request, + listener.delegateFailureAndWrap((l, inferenceResult) -> l.onResponse(SparseEmbeddingResults.of(inferenceResult.getResults()))) + ); } private static ElserMlNodeTaskSettings taskSettingsFromMap(TaskType taskType, Map config) { diff --git a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineAction.java b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineAction.java index b4b9c4fabac54..6d9a244c13dce 100644 --- a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineAction.java +++ b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineAction.java @@ -186,17 +186,16 @@ private void handleFilteringSearchResponse( client.prepareSearchScroll(searchResponse.getScrollId()) .setScroll(TimeValue.timeValueMinutes(1L)) .execute( - ActionListener.wrap( - searchResponse1 -> handleFilteringSearchResponse( + listener.delegateFailureAndWrap( + (delegate, searchResponse1) -> handleFilteringSearchResponse( searchResponse1, pipelineSources, explicitPipelineIds, wildcardPipelinePatterns, numberOfHitsSeenSoFar, clearScroll, - listener - ), - listener::onFailure + delegate + ) ) ); } diff --git a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportPutPipelineAction.java b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportPutPipelineAction.java index 701a7bed33916..7125cc12f6cfd 100644 --- a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportPutPipelineAction.java +++ b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportPutPipelineAction.java @@ -37,11 +37,6 @@ protected void doExecute(Task task, PutPipelineRequest request, ActionListener

    listener.onResponse(new PutPipelineResponse(indexResponse.status())), - listener::onFailure - ) - ); + .execute(listener.delegateFailureAndWrap((l, indexResponse) -> l.onResponse(new PutPipelineResponse(indexResponse.status())))); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java index 8739c446c084d..d7a50b5f87f04 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java @@ -124,7 +124,7 @@ public void onResponse(DeleteResponse response) { } else { jobManager.updateProcessOnCalendarChanged( calendar.getJobIds(), - ActionListener.wrap(r -> listener.onResponse(AcknowledgedResponse.TRUE), listener::onFailure) + listener.delegateFailureAndWrap((l, r) -> l.onResponse(AcknowledgedResponse.TRUE)) ); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDataFrameAnalyticsAction.java index 10679f447db15..efd65b5ac3282 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDataFrameAnalyticsAction.java @@ -107,9 +107,8 @@ private void forceDelete( ) { logger.debug("[{}] Force deleting data frame analytics job", request.getId()); - ActionListener stopListener = ActionListener.wrap( - stopResponse -> normalDelete(parentTaskClient, clusterService.state(), request, listener), - listener::onFailure + ActionListener stopListener = listener.delegateFailureAndWrap( + (l, stopResponse) -> normalDelete(parentTaskClient, clusterService.state(), request, l) ); stopJob(parentTaskClient, request, stopListener); @@ -168,10 +167,10 @@ private void normalDelete( // We clean up the memory tracker on delete because there is no stop; the task stops by itself memoryTracker.removeDataFrameAnalyticsJob(id); - configProvider.get(id, ActionListener.wrap(config -> { + configProvider.get(id, listener.delegateFailureAndWrap((l, config) -> { DataFrameAnalyticsDeleter deleter = new DataFrameAnalyticsDeleter(parentTaskClient, auditor); - deleter.deleteAllDocuments(config, request.timeout(), listener); - }, listener::onFailure)); + deleter.deleteAllDocuments(config, request.timeout(), l); + })); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java index f51498815c40e..b28d37022e171 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java @@ -192,16 +192,15 @@ void deleteExpiredData( ) { if (haveAllPreviousDeletionsCompleted && mlDataRemoversIterator.hasNext()) { MlDataRemover remover = mlDataRemoversIterator.next(); - ActionListener nextListener = ActionListener.wrap( - booleanResponse -> deleteExpiredData( + ActionListener nextListener = listener.delegateFailureAndWrap( + (delegate, booleanResponse) -> deleteExpiredData( request, mlDataRemoversIterator, requestsPerSecond, - listener, + delegate, isTimedOutSupplier, booleanResponse - ), - listener::onFailure + ) ); // Removing expired ML data and artifacts requires multiple operations. // These are queued up and executed sequentially in the action listener, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java index 7842af8b12993..f3b0fcd669637 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java @@ -167,13 +167,13 @@ protected void masterOperation( } ); - ActionListener markAsDeletingListener = ActionListener.wrap(response -> { + ActionListener markAsDeletingListener = finalListener.delegateFailureAndWrap((delegate, response) -> { if (request.isForce()) { - forceDeleteJob(parentTaskClient, request, state, finalListener); + forceDeleteJob(parentTaskClient, request, state, delegate); } else { - normalDeleteJob(parentTaskClient, request, state, finalListener); + normalDeleteJob(parentTaskClient, request, state, delegate); } - }, finalListener::onFailure); + }); ActionListener datafeedDeleteListener = ActionListener.wrap(response -> { auditor.info(request.getJobId(), Messages.getMessage(Messages.JOB_AUDIT_DELETING, taskId)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java index 4336489ce5d24..3865858f527b4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java @@ -146,13 +146,13 @@ private TypedChainTaskExecutor.ChainTask nextTask() { SearchRequest searchRequest = new SearchRequest(request.getIndices()).source(searchSourceBuilder); useSecondaryAuthIfAvailable( securityContext, - () -> client.execute(TransportSearchAction.TYPE, searchRequest, ActionListener.wrap(searchResponse -> { + () -> client.execute(TransportSearchAction.TYPE, searchRequest, listener.delegateFailureAndWrap((l, searchResponse) -> { evaluation.process(searchResponse); if (evaluation.hasAllResults() == false) { add(nextTask()); } - listener.onResponse(null); - }, listener::onFailure)) + l.onResponse(null); + })) ); }; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDatafeedsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDatafeedsAction.java index ebbe06e69ba63..db6c962abbf55 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDatafeedsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDatafeedsAction.java @@ -68,7 +68,7 @@ protected void masterOperation( datafeedManager.getDatafeeds( request, parentTaskId, - ActionListener.wrap(datafeeds -> listener.onResponse(new GetDatafeedsAction.Response(datafeeds)), listener::onFailure) + listener.delegateFailureAndWrap((l, datafeeds) -> l.onResponse(new GetDatafeedsAction.Response(datafeeds))) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java index 7fccc722d0c88..622d5ccab6940 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java @@ -76,9 +76,9 @@ public TransportUpdateFilterAction( @Override protected void doExecute(Task task, UpdateFilterAction.Request request, ActionListener listener) { - ActionListener filterListener = ActionListener.wrap(filterWithVersion -> { - updateFilter(filterWithVersion, request, listener); - }, listener::onFailure); + ActionListener filterListener = listener.delegateFailureAndWrap( + (l, filterWithVersion) -> updateFilter(filterWithVersion, request, l) + ); getFilterWithVersion(request.getFilterId(), filterListener); } @@ -142,7 +142,7 @@ public void onResponse(DocWriteResponse indexResponse) { filter, request.getAddItems(), request.getRemoveItems(), - ActionListener.wrap(response -> listener.onResponse(new PutFilterAction.Response(filter)), listener::onFailure) + listener.delegateFailureAndWrap((l, response) -> l.onResponse(new PutFilterAction.Response(filter))) ); } From 79c874fbd1a7ae6124c88b3e5db6cf321357e3f9 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Tue, 5 Dec 2023 17:41:11 +0000 Subject: [PATCH 171/181] [ML] Disable concurrency for frequent items aggregation (#102999) The frequent items aggregation suffers from a significant increase in garbage collection activity when executed in parallel across slices. It does not run much faster when parallelised, so the simplest fix is to disable parallelisation. --- .../frequentitemsets/FrequentItemSetsAggregationBuilder.java | 5 +++++ .../FrequentItemSetsAggregationBuilderTests.java | 5 +++++ 2 files changed, 10 insertions(+) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetsAggregationBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetsAggregationBuilder.java index 684d61dbdedb3..b6bb013e86421 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetsAggregationBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetsAggregationBuilder.java @@ -33,6 +33,7 @@ import java.io.IOException; import java.util.List; import java.util.Map; +import java.util.function.ToLongFunction; import static org.elasticsearch.common.Strings.format; @@ -264,4 +265,8 @@ public TransportVersion getMinimalSupportedVersion() { return TransportVersions.V_8_4_0; } + @Override + public boolean supportsParallelCollection(ToLongFunction fieldCardinalityResolver) { + return false; + } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetsAggregationBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetsAggregationBuilderTests.java index 9534ace3d3b9b..a2b7d0bfbe84c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetsAggregationBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetsAggregationBuilderTests.java @@ -248,4 +248,9 @@ private static IncludeExclude randomIncludeExclude() { return new IncludeExclude(null, null, null, new TreeSet<>(Set.of(newBytesRef("exclude")))); } } + + public void testSupportsParallelCollection() { + FrequentItemSetsAggregationBuilder frequentItemSetsAggregationBuilder = randomFrequentItemsSetsAggregationBuilder(); + assertFalse(frequentItemSetsAggregationBuilder.supportsParallelCollection(null)); + } } From e8907da8792ed919d4418ef83c5a1b68ee6d3891 Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Tue, 5 Dec 2023 19:28:22 +0100 Subject: [PATCH 172/181] [Connectors API] Enable as technical preview (#102994) --- docs/changelog/102994.yaml | 5 +++++ .../rest-api-spec/api/connector.check_in.json | 3 +-- .../rest-api-spec/api/connector.delete.json | 3 +-- .../resources/rest-api-spec/api/connector.get.json | 3 +-- .../rest-api-spec/api/connector.last_sync.json | 3 +-- .../resources/rest-api-spec/api/connector.list.json | 3 +-- .../resources/rest-api-spec/api/connector.post.json | 3 +-- .../resources/rest-api-spec/api/connector.put.json | 3 +-- .../api/connector.update_configuration.json | 3 +-- .../rest-api-spec/api/connector.update_error.json | 3 +-- .../api/connector.update_filtering.json | 3 +-- .../rest-api-spec/api/connector.update_name.json | 3 +-- .../rest-api-spec/api/connector.update_pipeline.json | 3 +-- .../api/connector.update_scheduling.json | 3 +-- .../rest-api-spec/api/connector_sync_job.cancel.json | 3 +-- .../api/connector_sync_job.check_in.json | 3 +-- .../rest-api-spec/api/connector_sync_job.delete.json | 3 +-- .../rest-api-spec/api/connector_sync_job.error.json | 3 +-- .../rest-api-spec/api/connector_sync_job.get.json | 3 +-- .../rest-api-spec/api/connector_sync_job.list.json | 3 +-- .../rest-api-spec/api/connector_sync_job.post.json | 3 +-- .../api/connector_sync_job.update_stats.json | 3 +-- .../application/connector/ConnectorAPIFeature.java | 12 +++++++++++- 23 files changed, 37 insertions(+), 43 deletions(-) create mode 100644 docs/changelog/102994.yaml diff --git a/docs/changelog/102994.yaml b/docs/changelog/102994.yaml new file mode 100644 index 0000000000000..c35baaefcb723 --- /dev/null +++ b/docs/changelog/102994.yaml @@ -0,0 +1,5 @@ +pr: 102994 +summary: Enable Connectors API as technical preview +area: Application +type: feature +issues: [] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.check_in.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.check_in.json index a9db92aa450e0..e95621d30fc16 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.check_in.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.check_in.json @@ -5,8 +5,7 @@ "description": "Updates the last_seen timestamp in the connector document." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.delete.json index 6cfc0ffcaf02b..dcb3a4f83c287 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.delete.json @@ -5,8 +5,7 @@ "description": "Deletes a connector." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.get.json index d866920324852..bcddef8cb5cb9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.get.json @@ -5,8 +5,7 @@ "description": "Returns the details about a connector." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.last_sync.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.last_sync.json index 43b7b078eef58..7bc1504253070 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.last_sync.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.last_sync.json @@ -5,8 +5,7 @@ "description": "Updates the stats of last sync in the connector document." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json index a1e5ddcc5d686..852a5fbd85998 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json @@ -5,8 +5,7 @@ "description": "Lists all connectors." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.post.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.post.json index 2dfaf150c455a..e76124bbecf7d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.post.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.post.json @@ -5,8 +5,7 @@ "description": "Creates a connector." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.put.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.put.json index 8511b870a2d12..0ab5c18671040 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.put.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.put.json @@ -5,8 +5,7 @@ "description": "Creates or updates a connector." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_configuration.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_configuration.json index 347418940b4c9..a82f9e0f29225 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_configuration.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_configuration.json @@ -5,8 +5,7 @@ "description": "Updates the connector configuration." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_error.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_error.json index 5d82a3729b501..51d5a1b25973b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_error.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_error.json @@ -5,8 +5,7 @@ "description": "Updates the error field in the connector document." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering.json index 6923dc88006e3..b9815fc111c06 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering.json @@ -5,8 +5,7 @@ "description": "Updates the filtering field in the connector document." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_name.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_name.json index e42d9b5766b0a..dabac5599932b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_name.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_name.json @@ -5,8 +5,7 @@ "description": "Updates the name and/or description fields in the connector document." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_pipeline.json index 2bd1acf7d28a6..25687e41a48de 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_pipeline.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_pipeline.json @@ -5,8 +5,7 @@ "description": "Updates the pipeline field in the connector document." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_scheduling.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_scheduling.json index 8d45e588a75ef..8d934b8025145 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_scheduling.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_scheduling.json @@ -5,8 +5,7 @@ "description": "Updates the scheduling field in the connector document." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.cancel.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.cancel.json index 883dd54bcb89b..dbea6935f8a87 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.cancel.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.cancel.json @@ -5,8 +5,7 @@ "description": "Cancels a connector sync job." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.check_in.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.check_in.json index 6c406a3a3d2c1..8193d92395255 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.check_in.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.check_in.json @@ -5,8 +5,7 @@ "description": "Checks in a connector sync job (refreshes 'last_seen')." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.delete.json index de8ffff861a98..ba9b5095a5275 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.delete.json @@ -5,8 +5,7 @@ "description": "Deletes a connector sync job." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.error.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.error.json index 5db061eda6e48..394e6e2fcb38f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.error.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.error.json @@ -5,8 +5,7 @@ "description": "Sets an error for a connector sync job." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.get.json index 6eb461ad62128..d0f14b0001bd8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.get.json @@ -5,8 +5,7 @@ "description": "Returns the details about a connector sync job." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.list.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.list.json index d09a720f748ec..86995477f060a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.list.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.list.json @@ -5,8 +5,7 @@ "description": "Lists all connector sync jobs." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.post.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.post.json index 563d0022d90d3..1db58c31dfa38 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.post.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.post.json @@ -5,8 +5,7 @@ "description": "Creates a connector sync job." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.update_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.update_stats.json index 52f5a55cc8458..825e5d8939e2d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.update_stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.update_stats.json @@ -5,8 +5,7 @@ "description": "Updates the stats fields in the connector sync job document." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorAPIFeature.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorAPIFeature.java index 40dcf02a2bf19..a3053e90335ad 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorAPIFeature.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorAPIFeature.java @@ -16,7 +16,17 @@ public class ConnectorAPIFeature { private static final FeatureFlag CONNECTOR_API_FEATURE_FLAG = new FeatureFlag("connector_api"); + /** + * Enables the Connectors API feature by default for the tech preview phase. + * As documented, the Connectors API is currently a tech preview feature, + * and customers should be aware that no SLAs or support are guaranteed during + * its pre-General Availability (GA) stage. + * + * Instead of removing the feature flag from the code, we enable it by default. + * This approach allows for the complete deactivation of the feature during the QA phase, + * should any critical bugs be discovered, with a single, trackable code change. + */ public static boolean isEnabled() { - return CONNECTOR_API_FEATURE_FLAG.isEnabled(); + return true; } } From 931c0fce8e5f2ad66a3645c469872c8638d02e16 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 5 Dec 2023 19:31:59 +0100 Subject: [PATCH 173/181] Mute ReverseNestedAggregatorTests (#103005) similar to https://github.com/elastic/elasticsearch/pull/102998, let's mute it until it gets fixed. relates https://github.com/elastic/elasticsearch/issues/102974 --- .../bucket/nested/ReverseNestedAggregatorTests.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java index 9c908f13d90bc..f6be5c2171193 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java @@ -16,6 +16,7 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; +import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.LuceneDocument; import org.elasticsearch.index.mapper.MappedFieldType; @@ -43,6 +44,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.reverseNested; import static org.hamcrest.Matchers.equalTo; +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102974") public class ReverseNestedAggregatorTests extends AggregatorTestCase { private static final String VALUE_FIELD_NAME = "number"; From c0edd65175db46711c8a40ae1f80eec2e8d31fb8 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 5 Dec 2023 19:04:23 +0000 Subject: [PATCH 174/181] AwaitsFix for #103012 --- .../repositories/blobstore/testkit/S3SnapshotRepoTestKitIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/s3/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/S3SnapshotRepoTestKitIT.java b/x-pack/plugin/snapshot-repo-test-kit/qa/s3/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/S3SnapshotRepoTestKitIT.java index 3b154cf4953be..af1af7dc53d19 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/s3/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/S3SnapshotRepoTestKitIT.java +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/s3/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/S3SnapshotRepoTestKitIT.java @@ -63,6 +63,7 @@ protected Settings repositorySettings() { } @Override + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103012") public void testRepositoryAnalysis() throws Exception { super.testRepositoryAnalysis(); } From 03dd28dced1ab53c3620168901d9d2b736b70835 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 5 Dec 2023 11:15:37 -0800 Subject: [PATCH 175/181] Capture JVM compiler replay data in build results archive (#103007) We want to capture compiler replay data so we can better troubleshoot the root cause of https://github.com/elastic/elasticsearch/issues/103004. --- .../gradle/internal/ElasticsearchBuildCompletePlugin.java | 1 + 1 file changed, 1 insertion(+) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java index 4902168d9b4ff..bad3ebb11a0dd 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java @@ -86,6 +86,7 @@ private List resolveProjectLogs(File projectDir) { projectDirFiles.include("**/build/testclusters/**"); projectDirFiles.include("**/build/testrun/*/temp/**"); projectDirFiles.include("**/build/**/hs_err_pid*.log"); + projectDirFiles.include("**/build/**/replay_pid*.log"); projectDirFiles.exclude("**/build/testclusters/**/data/**"); projectDirFiles.exclude("**/build/testclusters/**/distro/**"); projectDirFiles.exclude("**/build/testclusters/**/repo/**"); From 6b6fd7b95766be6520d5907b3ad2ab729122a9a8 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Tue, 5 Dec 2023 15:43:48 -0500 Subject: [PATCH 176/181] Adding new DynamicMapperBenchmark to exercise dynamic mapping parsing (#103015) --- .../index/mapper/DynamicMapperBenchmark.java | 205 ++++++++++++++++++ 1 file changed, 205 insertions(+) create mode 100644 benchmarks/src/main/java/org/elasticsearch/benchmark/index/mapper/DynamicMapperBenchmark.java diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/index/mapper/DynamicMapperBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/index/mapper/DynamicMapperBenchmark.java new file mode 100644 index 0000000000000..eae233e276038 --- /dev/null +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/index/mapper/DynamicMapperBenchmark.java @@ -0,0 +1,205 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.benchmark.index.mapper; + +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.LuceneDocument; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.Mapping; +import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.xcontent.XContentType; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Warmup; + +import java.util.Arrays; +import java.util.List; +import java.util.Random; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.DoubleStream; +import java.util.stream.IntStream; +import java.util.stream.Stream; + +@Fork(value = 3) +@Warmup(iterations = 3) +@Measurement(iterations = 5) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@State(Scope.Benchmark) +public class DynamicMapperBenchmark { + + @Param({ "1600172297" }) + private long seed; + + private Random random; + private SourceToParse[] sources; + + @Setup + public void setUp() { + this.random = new Random(seed); + this.sources = generateRandomDocuments(500); + } + + private SourceToParse[] generateRandomDocuments(int count) { + var docs = new SourceToParse[count]; + for (int i = 0; i < count; i++) { + docs[i] = generateRandomDocument(); + } + return docs; + } + + private SourceToParse generateRandomDocument() { + int textFields = 50; + int intFields = 50; + int floatFields = 50; + int objFields = 10; + int objFieldDepth = 10; + int fieldValueCountMax = 25; + StringBuilder builder = new StringBuilder(); + builder.append("{"); + for (int i = 0; i < textFields; i++) { + if (random.nextBoolean()) { + StringBuilder fieldValueBuilder = generateTextField(fieldValueCountMax); + builder.append("\"text_field_").append(i).append("\":").append(fieldValueBuilder).append(","); + } + } + for (int i = 0; i < intFields; i++) { + if (random.nextBoolean()) { + int fieldValueCount = random.nextInt(fieldValueCountMax); + builder.append("\"int_field_") + .append(i) + .append("\":") + .append(Arrays.toString(IntStream.generate(() -> random.nextInt()).limit(fieldValueCount).toArray())) + .append(","); + } + } + for (int i = 0; i < floatFields; i++) { + if (random.nextBoolean()) { + int fieldValueCount = random.nextInt(fieldValueCountMax); + builder.append("\"float_field_") + .append(i) + .append("\":") + .append(Arrays.toString(DoubleStream.generate(() -> random.nextFloat()).limit(fieldValueCount).toArray())) + .append(","); + } + } + for (int i = 0; i < objFields; i++) { + final int idx = i; + if (random.nextBoolean()) { + continue; + } + String objFieldPrefix = Stream.generate(() -> "obj_field_" + idx).limit(objFieldDepth).collect(Collectors.joining(".")); + for (int j = 0; j < textFields; j++) { + if (random.nextBoolean()) { + StringBuilder fieldValueBuilder = generateTextField(fieldValueCountMax); + builder.append("\"") + .append(objFieldPrefix) + .append(".text_field_") + .append(j) + .append("\":") + .append(fieldValueBuilder) + .append(","); + } + } + for (int j = 0; j < intFields; j++) { + if (random.nextBoolean()) { + int fieldValueCount = random.nextInt(fieldValueCountMax); + builder.append("\"") + .append(objFieldPrefix) + .append(".int_field_") + .append(j) + .append("\":") + .append(Arrays.toString(IntStream.generate(() -> random.nextInt()).limit(fieldValueCount).toArray())) + .append(","); + } + } + for (int j = 0; j < floatFields; j++) { + if (random.nextBoolean()) { + int fieldValueCount = random.nextInt(fieldValueCountMax); + builder.append("\"") + .append(objFieldPrefix) + .append(".float_field_") + .append(j) + .append("\":") + .append(Arrays.toString(DoubleStream.generate(() -> random.nextFloat()).limit(fieldValueCount).toArray())) + .append(","); + } + } + } + if (builder.charAt(builder.length() - 1) == ',') { + builder.deleteCharAt(builder.length() - 1); + } + builder.append("}"); + return new SourceToParse(UUIDs.randomBase64UUID(), new BytesArray(builder.toString()), XContentType.JSON); + } + + private StringBuilder generateTextField(int fieldValueCountMax) { + int fieldValueCount = random.nextInt(fieldValueCountMax); + StringBuilder fieldValueBuilder = new StringBuilder(); + fieldValueBuilder.append("["); + for (int j = 0; j < fieldValueCount - 1; j++) { + fieldValueBuilder.append("\"").append(randomString(6)).append("\"").append(","); + } + return fieldValueBuilder.append("\"").append(randomString(6)).append("\"").append("]"); + } + + private String randomString(int maxLength) { + var length = random.nextInt(maxLength); + var builder = new StringBuilder(length); + for (int i = 0; i < length; i++) { + builder.append((byte) (32 + random.nextInt(94))); + } + return builder.toString(); + } + + @SafeVarargs + @SuppressWarnings("varargs") + private T randomFrom(T... items) { + return items[random.nextInt(items.length)]; + } + + @Benchmark + public List benchmarkDynamicallyCreatedFields() throws Exception { + MapperService mapperService = MapperServiceFactory.create("{}"); + for (int i = 0; i < 25; i++) { + DocumentMapper documentMapper = mapperService.documentMapper(); + Mapping mapping = null; + if (documentMapper == null) { + documentMapper = DocumentMapper.createEmpty(mapperService); + mapping = documentMapper.mapping(); + } + ParsedDocument doc = documentMapper.parse(randomFrom(sources)); + if (mapping != null) { + doc.addDynamicMappingsUpdate(mapping); + } + if (doc.dynamicMappingsUpdate() != null) { + mapperService.merge( + "_doc", + new CompressedXContent(XContentHelper.toXContent(doc.dynamicMappingsUpdate(), XContentType.JSON, false)), + MapperService.MergeReason.MAPPING_UPDATE + ); + } + } + return mapperService.documentMapper().parse(randomFrom(sources)).docs(); + } +} From c3bc39a95ab8f84b79b64bff5e2985ff99decf4e Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Tue, 5 Dec 2023 15:49:35 -0500 Subject: [PATCH 177/181] Log more information in debug when synonyms fail updates (#102946) Related to: https://github.com/elastic/elasticsearch/issues/102261 In test failures, we are not receiving any information around the bulk indexing cause stacktrace, just the message. This adds debug logging and grabs the first stacktrace over all indices. Additionally, the logger groups by the failure message in an effort to find unique failures over all the indices. --- .../test/synonyms/40_synonyms_sets_get.yml | 13 +++++++++ .../SynonymsManagementAPIService.java | 29 +++++++++++++++++++ 2 files changed, 42 insertions(+) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml index d13c8e9ffcc65..f2d29bf863a8f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml @@ -23,8 +23,21 @@ setup: body: synonyms_set: - synonyms: "pc, computer" + # set logging to debug for issue: https://github.com/elastic/elasticsearch/issues/102261 + - do: + cluster.put_settings: + body: + persistent: + logger.org.elasticsearch.synonyms: DEBUG --- +teardown: + - do: + cluster.put_settings: + body: + persistent: + logger.org.elasticsearch.synonyms: null +--- "List synonyms set": - do: synonyms.get_synonyms_sets: { } diff --git a/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java b/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java index 848f103aaaf56..20aac833190a7 100644 --- a/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java +++ b/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java @@ -8,6 +8,8 @@ package org.elasticsearch.synonyms; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; @@ -62,6 +64,8 @@ * Manages synonyms performing operations on the system index */ public class SynonymsManagementAPIService { + + private static final Logger logger = LogManager.getLogger(SynonymsManagementAPIService.class); private static final String SYNONYMS_INDEX_NAME_PATTERN = ".synonyms-*"; private static final int SYNONYMS_INDEX_FORMAT = 2; private static final String SYNONYMS_INDEX_CONCRETE_NAME = ".synonyms-" + SYNONYMS_INDEX_FORMAT; @@ -234,11 +238,30 @@ private static SynonymRule sourceMapToSynonymRule(Map docSourceA return new SynonymRule((String) docSourceAsMap.get(SYNONYM_RULE_ID_FIELD), (String) docSourceAsMap.get(SYNONYMS_FIELD)); } + private static void logUniqueFailureMessagesWithIndices(List bulkFailures) { + // check if logger is at least debug + if (logger.isDebugEnabled() == false) { + return; + } + Map> uniqueFailureMessages = bulkFailures.stream() + .collect(Collectors.groupingBy(BulkItemResponse.Failure::getMessage)); + // log each unique failure with their associated indices and the first stacktrace + uniqueFailureMessages.forEach((failureMessage, failures) -> { + logger.debug( + "Error updating synonyms: [{}], indices: [{}], stacktrace: [{}]", + failureMessage, + failures.stream().map(BulkItemResponse.Failure::getIndex).collect(Collectors.joining(",")), + ExceptionsHelper.formatStackTrace(failures.get(0).getCause().getStackTrace()) + ); + }); + } + public void putSynonymsSet(String synonymSetId, SynonymRule[] synonymsSet, ActionListener listener) { deleteSynonymsSetObjects(synonymSetId, listener.delegateFailure((deleteByQueryResponseListener, bulkDeleteResponse) -> { boolean created = bulkDeleteResponse.getDeleted() == 0; final List bulkDeleteFailures = bulkDeleteResponse.getBulkFailures(); if (bulkDeleteFailures.isEmpty() == false) { + logUniqueFailureMessagesWithIndices(bulkDeleteFailures); listener.onFailure( new ElasticsearchException( "Error updating synonyms: " @@ -264,6 +287,12 @@ public void putSynonymsSet(String synonymSetId, SynonymRule[] synonymsSet, Actio bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .execute(deleteByQueryResponseListener.delegateFailure((bulkInsertResponseListener, bulkInsertResponse) -> { if (bulkInsertResponse.hasFailures()) { + logUniqueFailureMessagesWithIndices( + Arrays.stream(bulkInsertResponse.getItems()) + .filter(BulkItemResponse::isFailed) + .map(BulkItemResponse::getFailure) + .collect(Collectors.toList()) + ); bulkInsertResponseListener.onFailure( new ElasticsearchException("Error updating synonyms: " + bulkInsertResponse.buildFailureMessage()) ); From 714611e1efaba13beba25999613b957bb5d83886 Mon Sep 17 00:00:00 2001 From: Jake Landis Date: Tue, 5 Dec 2023 14:52:15 -0600 Subject: [PATCH 178/181] Clarify javadoc for AckedClusterStateUpdateTask (#103001) Javadoc only change to clarify "acknowledged" --- .../elasticsearch/cluster/AckedClusterStateUpdateTask.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/AckedClusterStateUpdateTask.java b/server/src/main/java/org/elasticsearch/cluster/AckedClusterStateUpdateTask.java index 60ee105ade461..b81fde1156bd1 100644 --- a/server/src/main/java/org/elasticsearch/cluster/AckedClusterStateUpdateTask.java +++ b/server/src/main/java/org/elasticsearch/cluster/AckedClusterStateUpdateTask.java @@ -15,8 +15,9 @@ import org.elasticsearch.core.TimeValue; /** - * An extension interface to {@link ClusterStateUpdateTask} that allows to be notified when - * all the nodes have acknowledged a cluster state update request + * An extension interface to {@link ClusterStateUpdateTask} that allows the caller to be notified after the master has + * computed, published, accepted, committed, and applied the cluster state update AND only after the rest of the nodes + * (or a specified subset) have also accepted and applied the cluster state update. */ public abstract class AckedClusterStateUpdateTask extends ClusterStateUpdateTask implements ClusterStateAckListener { From 4934d083647ea8bd7a59c9fde907c5f7a4c26ba6 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Tue, 5 Dec 2023 17:01:22 -0500 Subject: [PATCH 179/181] Deprecate the unused elasticsearch_version field of enrich policy json (#103013) --- docs/changelog/103013.yaml | 5 ++ .../org/elasticsearch/TransportVersions.java | 1 + .../xpack/core/enrich/EnrichPolicy.java | 81 ++++++++++--------- .../enrich/action/PutEnrichPolicyAction.java | 6 -- .../xpack/enrich/EnrichStore.java | 20 +---- .../xpack/enrich/EnrichPolicyRunnerTests.java | 2 +- .../rest-api-spec/test/enrich/10_basic.yml | 32 +++++++- 7 files changed, 81 insertions(+), 66 deletions(-) create mode 100644 docs/changelog/103013.yaml diff --git a/docs/changelog/103013.yaml b/docs/changelog/103013.yaml new file mode 100644 index 0000000000000..bb8eb99088856 --- /dev/null +++ b/docs/changelog/103013.yaml @@ -0,0 +1,5 @@ +pr: 103013 +summary: Deprecate the unused `elasticsearch_version` field of enrich policy json +area: Ingest Node +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 0b899a863e197..5c19edc14075b 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -190,6 +190,7 @@ static TransportVersion def(int id) { public static final TransportVersion SOURCE_IN_SINGLE_VALUE_QUERY_ADDED = def(8_557_00_0); public static final TransportVersion MISSED_INDICES_UPDATE_EXCEPTION_ADDED = def(8_558_00_0); public static final TransportVersion INFERENCE_SERVICE_EMBEDDING_SIZE_ADDED = def(8_559_00_0); + public static final TransportVersion ENRICH_ELASTICSEARCH_VERSION_REMOVED = def(8_560_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichPolicy.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichPolicy.java index 600e065900d30..74b274f2fd387 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichPolicy.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichPolicy.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.core.enrich; +import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; @@ -13,9 +14,10 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; @@ -33,6 +35,11 @@ */ public final class EnrichPolicy implements Writeable, ToXContentFragment { + private static final String ELASTICEARCH_VERSION_DEPRECATION_MESSAGE = + "the [elasticsearch_version] field of an enrich policy has no effect and will be removed in Elasticsearch 9.0"; + + private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(EnrichPolicy.class); + public static final String ENRICH_INDEX_NAME_BASE = ".enrich-"; public static final String ENRICH_INDEX_PATTERN = ENRICH_INDEX_NAME_BASE + "*"; @@ -57,7 +64,7 @@ public final class EnrichPolicy implements Writeable, ToXContentFragment { (List) args[1], (String) args[2], (List) args[3], - (Version) args[4] + (String) args[4] ) ); @@ -74,12 +81,7 @@ private static void declareCommonConstructorParsingOptions(ConstructingObjec parser.declareStringArray(ConstructingObjectParser.constructorArg(), INDICES); parser.declareString(ConstructingObjectParser.constructorArg(), MATCH_FIELD); parser.declareStringArray(ConstructingObjectParser.constructorArg(), ENRICH_FIELDS); - parser.declareField( - ConstructingObjectParser.optionalConstructorArg(), - ((p, c) -> Version.fromString(p.text())), - ELASTICSEARCH_VERSION, - ValueType.STRING - ); + parser.declareString(ConstructingObjectParser.optionalConstructorArg(), ELASTICSEARCH_VERSION); } public static EnrichPolicy fromXContent(XContentParser parser) throws IOException { @@ -108,37 +110,45 @@ public static EnrichPolicy fromXContent(XContentParser parser) throws IOExceptio private final List indices; private final String matchField; private final List enrichFields; - private final Version elasticsearchVersion; public EnrichPolicy(StreamInput in) throws IOException { - this( - in.readString(), - in.readOptionalWriteable(QuerySource::new), - in.readStringCollectionAsList(), - in.readString(), - in.readStringCollectionAsList(), - Version.readVersion(in) - ); + this.type = in.readString(); + this.query = in.readOptionalWriteable(QuerySource::new); + this.indices = in.readStringCollectionAsList(); + this.matchField = in.readString(); + this.enrichFields = in.readStringCollectionAsList(); + if (in.getTransportVersion().before(TransportVersions.ENRICH_ELASTICSEARCH_VERSION_REMOVED)) { + // consume the passed-in meaningless version that old elasticsearch clusters will send + Version.readVersion(in); + } } public EnrichPolicy(String type, QuerySource query, List indices, String matchField, List enrichFields) { - this(type, query, indices, matchField, enrichFields, Version.CURRENT); + this.type = type; + this.query = query; + this.indices = indices; + this.matchField = matchField; + this.enrichFields = enrichFields; } - public EnrichPolicy( + private EnrichPolicy( String type, QuerySource query, List indices, String matchField, List enrichFields, - Version elasticsearchVersion + String elasticsearchVersion ) { - this.type = type; - this.query = query; - this.indices = indices; - this.matchField = matchField; - this.enrichFields = enrichFields; - this.elasticsearchVersion = elasticsearchVersion != null ? elasticsearchVersion : Version.CURRENT; + this(type, query, indices, matchField, enrichFields); + // for backwards compatibility reasons, it is possible to pass in an elasticsearchVersion -- that version is + // completely ignored and does nothing. we'll fix that in a future version, so send a deprecation warning. + if (elasticsearchVersion != null) { + deprecationLogger.warn( + DeprecationCategory.OTHER, + "enrich_policy_with_elasticsearch_version", + ELASTICEARCH_VERSION_DEPRECATION_MESSAGE + ); + } } public String getType() { @@ -161,10 +171,6 @@ public List getEnrichFields() { return enrichFields; } - public Version getElasticsearchVersion() { - return elasticsearchVersion; - } - public static String getBaseName(String policyName) { return ENRICH_INDEX_NAME_BASE + policyName; } @@ -202,7 +208,10 @@ public void writeTo(StreamOutput out) throws IOException { out.writeStringCollection(indices); out.writeString(matchField); out.writeStringCollection(enrichFields); - Version.writeVersion(elasticsearchVersion, out); + if (out.getTransportVersion().before(TransportVersions.ENRICH_ELASTICSEARCH_VERSION_REMOVED)) { + // emit the current version of elasticsearch for bwc serialization reasons + Version.writeVersion(Version.CURRENT, out); + } } @Override @@ -222,9 +231,6 @@ private void toInnerXContent(XContentBuilder builder, Params params) throws IOEx builder.array(INDICES.getPreferredName(), indices.toArray(new String[0])); builder.field(MATCH_FIELD.getPreferredName(), matchField); builder.array(ENRICH_FIELDS.getPreferredName(), enrichFields.toArray(new String[0])); - if (params.paramAsBoolean("include_version", false) && elasticsearchVersion != null) { - builder.field(ELASTICSEARCH_VERSION.getPreferredName(), elasticsearchVersion.toString()); - } } @Override @@ -236,13 +242,12 @@ public boolean equals(Object o) { && Objects.equals(query, policy.query) && indices.equals(policy.indices) && matchField.equals(policy.matchField) - && enrichFields.equals(policy.enrichFields) - && elasticsearchVersion.equals(policy.elasticsearchVersion); + && enrichFields.equals(policy.enrichFields); } @Override public int hashCode() { - return Objects.hash(type, query, indices, matchField, enrichFields, elasticsearchVersion); + return Objects.hash(type, query, indices, matchField, enrichFields); } public String toString() { @@ -310,7 +315,7 @@ public static class NamedPolicy implements Writeable, ToXContentFragment { (List) args[2], (String) args[3], (List) args[4], - (Version) args[5] + (String) args[5] ) ) ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java index e42a5a05022d2..ec1b04e453bb5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.core.enrich.action; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -39,11 +38,6 @@ public static class Request extends MasterNodeRequest { - for (String indexExpression : finalPolicy.getIndices()) { + for (String indexExpression : policy.getIndices()) { // indices field in policy can contain wildcards, aliases etc. String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames( current, @@ -110,12 +96,12 @@ public static void putPolicy( throw new IllegalArgumentException("source index [" + concreteIndex + "] has no mapping"); } Map mappingSource = mapping.getSourceAsMap(); - EnrichPolicyRunner.validateMappings(name, finalPolicy, concreteIndex, mappingSource); + EnrichPolicyRunner.validateMappings(name, policy, concreteIndex, mappingSource); } } final Map policies = getPolicies(current); - EnrichPolicy existing = policies.putIfAbsent(name, finalPolicy); + EnrichPolicy existing = policies.putIfAbsent(name, policy); if (existing != null) { throw new ResourceAlreadyExistsException("policy [{}] already exists", name); } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java index f95c4959be771..aac9f5e74cf0e 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java @@ -313,7 +313,7 @@ private void testNumberRangeMatchType(String rangeType) throws Exception { } ); List enrichFields = List.of("zipcode"); - EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.RANGE_TYPE, null, List.of(sourceIndex), "range", enrichFields, null); + EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.RANGE_TYPE, null, List.of(sourceIndex), "range", enrichFields); String policyName = "test1"; final long createTime = randomNonNegativeLong(); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/enrich/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/enrich/10_basic.yml index e580b188c9ba4..afe4bcabee3d9 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/enrich/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/enrich/10_basic.yml @@ -1,6 +1,4 @@ ---- -"Test enrich crud apis": - +setup: - do: indices.create: index: bar @@ -13,8 +11,9 @@ type: keyword b: type: keyword - - is_true: acknowledged +--- +"Test enrich crud apis": - do: enrich.put_policy: name: policy-crud @@ -60,3 +59,28 @@ enrich.delete_policy: name: policy-crud - is_true: acknowledged + +--- +"Test using the deprecated elasticsearch_version field results in a warning": + - skip: + version: " - 8.11.99" + reason: "elasticsearch_version field deprecated in 8.12.0, to be removed in 9.0" + features: warnings + + - do: + warnings: + - "the [elasticsearch_version] field of an enrich policy has no effect and will be removed in Elasticsearch 9.0" + enrich.put_policy: + name: policy-crud-warning + body: + match: + indices: ["bar*"] + match_field: baz + enrich_fields: ["a", "b"] + elasticsearch_version: "any string here is acceptable" + - is_true: acknowledged + + - do: + enrich.delete_policy: + name: policy-crud-warning + - is_true: acknowledged From 5134fab2b550ba9f00dd5b3245288a0227b23a49 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 5 Dec 2023 14:36:51 -0800 Subject: [PATCH 180/181] Mute ESQL test --- .../plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec index 02e9db6ededf1..e0167ce451e80 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec @@ -257,7 +257,8 @@ eth1 |alpha |::1 |::1 eth0 |beta |127.0.0.1 |::1 ; -pushDownIPWithComparision#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] +// AwaitsFix: https://github.com/elastic/elasticsearch/issues/103028 +pushDownIPWithComparision#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only]-Ignore from hosts | where ip1 > to_ip("127.0.0.1") | keep card, ip1; ignoreOrder:true warning:Line 1:20: evaluation of [ip1 > to_ip(\"127.0.0.1\")] failed, treating result as null. Only first 20 failures recorded. From d795d82253c274fd8989a22146ae4601c6a72104 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 5 Dec 2023 15:20:28 -0800 Subject: [PATCH 181/181] Use extra properties for configuring test output dump on error (#103023) --- .../gradle/internal/ElasticsearchTestBasePlugin.java | 2 +- .../gradle/internal/test/ErrorReportingTestListener.java | 3 ++- .../gradle/internal/test/rest/RestTestBasePlugin.java | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java index 50f4000612981..31b62c4ac700f 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java @@ -57,7 +57,7 @@ public void apply(Project project) { File testOutputDir = new File(test.getReports().getJunitXml().getOutputLocation().getAsFile().get(), "output"); ErrorReportingTestListener listener = new ErrorReportingTestListener(test, testOutputDir); - test.getInputs().property(DUMP_OUTPUT_ON_FAILURE_PROP_NAME, true); + test.getExtensions().getExtraProperties().set(DUMP_OUTPUT_ON_FAILURE_PROP_NAME, true); test.getExtensions().add("errorReportingTestListener", listener); test.addTestOutputListener(listener); test.addTestListener(listener); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ErrorReportingTestListener.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ErrorReportingTestListener.java index 15cae4868034e..e3149d63e5c5b 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ErrorReportingTestListener.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ErrorReportingTestListener.java @@ -258,7 +258,8 @@ public void close() throws IOException { } private boolean isDumpOutputEnabled() { - return (Boolean) testTask.getInputs() + return (Boolean) testTask.getExtensions() + .getExtraProperties() .getProperties() .getOrDefault(ElasticsearchTestBasePlugin.DUMP_OUTPUT_ON_FAILURE_PROP_NAME, true); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java index a7e72b55f9117..b51842bbdcbf7 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java @@ -168,7 +168,7 @@ public void apply(Project project) { nonInputSystemProperties.systemProperty(TESTS_MAX_PARALLEL_FORKS_SYSPROP, () -> String.valueOf(task.getMaxParallelForks())); // Disable test failure reporting since this stuff is now captured in build scans - task.getInputs().property(ElasticsearchTestBasePlugin.DUMP_OUTPUT_ON_FAILURE_PROP_NAME, false); + task.getExtensions().getExtraProperties().set(ElasticsearchTestBasePlugin.DUMP_OUTPUT_ON_FAILURE_PROP_NAME, false); // Disable the security manager and syscall filter since the test framework needs to fork processes task.systemProperty("tests.security.manager", "false");