From 2d45e47bb5abf66ad80a06a7abf60de26ce5f147 Mon Sep 17 00:00:00 2001 From: Huaixinww <141887897+Huaixinww@users.noreply.github.com> Date: Thu, 27 Jun 2024 15:12:28 +0800 Subject: [PATCH 001/216] [ML] Add InferenceAction request query validation (#110147) --- .../inference/action/InferenceAction.java | 14 +++ .../action/InferenceActionRequestTests.java | 89 +++++++++++++++++++ 2 files changed, 103 insertions(+) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java index cfd4da0d59e31..16d0b940d40e6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java @@ -38,6 +38,8 @@ import java.util.Map; import java.util.Objects; +import static org.elasticsearch.core.Strings.format; + public class InferenceAction extends ActionType { public static final InferenceAction INSTANCE = new InferenceAction(); @@ -173,6 +175,18 @@ public ActionRequestValidationException validate() { e.addValidationError("input array is empty"); return e; } + if (taskType.equals(TaskType.RERANK)) { + if (query == null) { + var e = new ActionRequestValidationException(); + e.addValidationError(format("Field [query] cannot be null for task type [%s]", TaskType.RERANK)); + return e; + } + if (query.isEmpty()) { + var e = new ActionRequestValidationException(); + e.addValidationError(format("Field [query] cannot be empty for task type [%s]", TaskType.RERANK)); + return e; + } + } return null; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java index 476167c5db0fb..fa7044ffd8c8b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; @@ -71,6 +72,94 @@ public void testParsing() throws IOException { } } + public void testValidation_TextEmbedding() { + InferenceAction.Request request = new InferenceAction.Request( + TaskType.TEXT_EMBEDDING, + "model", + null, + List.of("input"), + null, + null, + null + ); + ActionRequestValidationException e = request.validate(); + assertNull(e); + } + + public void testValidation_Rerank() { + InferenceAction.Request request = new InferenceAction.Request( + TaskType.RERANK, + "model", + "query", + List.of("input"), + null, + null, + null + ); + ActionRequestValidationException e = request.validate(); + assertNull(e); + } + + public void testValidation_TextEmbedding_Null() { + InferenceAction.Request inputNullRequest = new InferenceAction.Request( + TaskType.TEXT_EMBEDDING, + "model", + null, + null, + null, + null, + null + ); + ActionRequestValidationException inputNullError = inputNullRequest.validate(); + assertNotNull(inputNullError); + assertThat(inputNullError.getMessage(), is("Validation Failed: 1: missing input;")); + } + + public void testValidation_TextEmbedding_Empty() { + InferenceAction.Request inputEmptyRequest = new InferenceAction.Request( + TaskType.TEXT_EMBEDDING, + "model", + null, + List.of(), + null, + null, + null + ); + ActionRequestValidationException inputEmptyError = inputEmptyRequest.validate(); + assertNotNull(inputEmptyError); + assertThat(inputEmptyError.getMessage(), is("Validation Failed: 1: input array is empty;")); + } + + public void testValidation_Rerank_Null() { + InferenceAction.Request queryNullRequest = new InferenceAction.Request( + TaskType.RERANK, + "model", + null, + List.of("input"), + null, + null, + null + ); + ActionRequestValidationException queryNullError = queryNullRequest.validate(); + assertNotNull(queryNullError); + assertThat(queryNullError.getMessage(), is("Validation Failed: 1: Field [query] cannot be null for task type [rerank];")); + } + + public void testValidation_Rerank_Empty() { + InferenceAction.Request queryEmptyRequest = new InferenceAction.Request( + TaskType.RERANK, + "model", + "", + List.of("input"), + null, + null, + null + ); + ActionRequestValidationException queryEmptyError = queryEmptyRequest.validate(); + assertNotNull(queryEmptyError); + assertThat(queryEmptyError.getMessage(), is("Validation Failed: 1: Field [query] cannot be empty for task type [rerank];")); + } + public void testParseRequest_DefaultsInputTypeToIngest() throws IOException { String singleInputRequest = """ { From b7c18bcfe1090033f88b2afc00ece1e30a01f5d3 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Thu, 27 Jun 2024 10:15:59 +0200 Subject: [PATCH 002/216] ES|QL: Fix DISSECT that overwrites input (#110201) Fixes https://github.com/elastic/elasticsearch/issues/110184 When a DISSECT command overwrites the input, eg. ``` FROM idx | DISSECT foo "%{foo} %{bar}" | KEEP foo, bar ``` The input field (`foo` in this case) could be excluded from the index resolution (incorrectly masked by the `foo` that is the result of the DISSECT). This PR makes sure that the input field does not get lost and is correctly passed to the indexResolver. --- docs/changelog/110201.yaml | 6 ++++++ .../testFixtures/src/main/resources/dissect.csv-spec | 11 +++++++++++ .../qa/testFixtures/src/main/resources/grok.csv-spec | 8 ++++++++ .../xpack/esql/action/EsqlCapabilities.java | 8 +++++++- .../elasticsearch/xpack/esql/session/EsqlSession.java | 8 +++----- .../esql/session/IndexResolverFieldNamesTests.java | 8 ++++++++ 6 files changed, 43 insertions(+), 6 deletions(-) create mode 100644 docs/changelog/110201.yaml diff --git a/docs/changelog/110201.yaml b/docs/changelog/110201.yaml new file mode 100644 index 0000000000000..a880638881948 --- /dev/null +++ b/docs/changelog/110201.yaml @@ -0,0 +1,6 @@ +pr: 110201 +summary: "ES|QL: Fix DISSECT that overwrites input" +area: ES|QL +type: bug +issues: + - 110184 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/dissect.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/dissect.csv-spec index f8a49c3a59f98..812198c324217 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/dissect.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/dissect.csv-spec @@ -175,6 +175,17 @@ Parto Bamford | Parto | Bamford ; +// different from shadowingSelf because in this case we dissect an indexed field +// see https://github.com/elastic/elasticsearch/issues/110184 +overwriteInputName +required_capability: grok_dissect_masking +from employees | sort emp_no asc | dissect first_name "%{first_name}o%{rest}" | keep emp_no, first_name, rest | limit 1; + +emp_no:integer | first_name:keyword | rest:keyword +10001 | Ge | rgi +; + + overwriteNameWhere from employees | sort emp_no asc | eval full_name = concat(first_name, " ", last_name) | dissect full_name "%{emp_no} %{b}" | where emp_no == "Bezalel" | keep full_name, emp_no, b | limit 3; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/grok.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/grok.csv-spec index 49a8085e0c186..9d574eed7be6b 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/grok.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/grok.csv-spec @@ -229,3 +229,11 @@ emp_no:integer | a:keyword | b:keyword 10004 | [Head, Reporting, Support, Tech] | [Human, Analyst, Engineer, Lead] | Resources | [Head Human Resources, Reporting Analyst, Support Engineer, Tech Lead] 10005 | null | null | null | null ; + +overwriteInputName +required_capability: grok_dissect_masking +row text = "123 abc", int = 5 | sort int asc | grok text "%{NUMBER:text:int} %{WORD:description}" | keep text, int, description; + +text:integer | int:integer | description:keyword +123 | 5 | abc +; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 1caf94dde5c30..ecbe25227616b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -89,7 +89,13 @@ public enum Cap { /** * Support for function {@code ST_DISTANCE}. Done in #108764. */ - ST_DISTANCE; + ST_DISTANCE, + + /** + * Fix to GROK and DISSECT that allows extracting attributes with the same name as the input + * https://github.com/elastic/elasticsearch/issues/110184 + */ + GROK_DISSECT_MASKING; Cap() { snapshotOnly = false; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 0589424b37d1e..44c08fc5fd60b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -235,14 +235,12 @@ static Set fieldNames(LogicalPlan parsed, Set enrichPolicyMatchF parsed.forEachDown(p -> {// go over each plan top-down if (p instanceof RegexExtract re) { // for Grok and Dissect - AttributeSet dissectRefs = p.references(); - // don't add to the list of fields the extracted ones (they are not real fields in mappings) - dissectRefs.removeAll(re.extractedFields()); - references.addAll(dissectRefs); - // also remove other down-the-tree references to the extracted fields + // remove other down-the-tree references to the extracted fields for (Attribute extracted : re.extractedFields()) { references.removeIf(attr -> matchByName(attr, extracted.qualifiedName(), false)); } + // but keep the inputs needed by Grok/Dissect + references.addAll(re.input().references()); } else if (p instanceof Enrich) { AttributeSet enrichRefs = p.references(); // Enrich adds an EmptyAttribute if no match field is specified diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java index 17dca8096de0f..925601bded425 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java @@ -1212,6 +1212,14 @@ public void testEnrichOnDefaultFieldWithKeep() { assertThat(fieldNames, equalTo(Set.of("emp_no", "emp_no.*", "language_name", "language_name.*"))); } + public void testDissectOverwriteName() { + Set fieldNames = EsqlSession.fieldNames(parser.createStatement(""" + from employees + | dissect first_name "%{first_name} %{more}" + | keep emp_no, first_name, more"""), Set.of()); + assertThat(fieldNames, equalTo(Set.of("emp_no", "emp_no.*", "first_name", "first_name.*"))); + } + public void testEnrichOnDefaultField() { Set fieldNames = EsqlSession.fieldNames(parser.createStatement(""" from employees From 186edb2c352c1f3000a5bdf5bfe41b23d7e63396 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20R=C3=BChsen?= Date: Thu, 27 Jun 2024 10:22:04 +0200 Subject: [PATCH 003/216] [Profiling] Add field env_https_proxy to profiling-hosts (#110219) --- .../profiling/component-template/profiling-hosts.json | 3 +++ .../profiling/persistence/ProfilingIndexTemplateRegistry.java | 3 ++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json index d9b92f5cd4f0c..e58a3cbd39f97 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json @@ -79,6 +79,9 @@ "protocol": { "type": "keyword" }, + "env_https_proxy": { + "type": "keyword" + }, "config.bpf_log_level": { "type": "long" }, diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java index 647f4b64090b3..3b361748abf67 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/persistence/ProfilingIndexTemplateRegistry.java @@ -52,7 +52,8 @@ public class ProfilingIndexTemplateRegistry extends IndexTemplateRegistry { // version 9: Changed sort order for profiling-events-* // version 10: changed mapping profiling-events @timestamp to 'date_nanos' from 'date' // version 11: Added 'profiling.agent.protocol' keyword mapping to profiling-hosts - public static final int INDEX_TEMPLATE_VERSION = 11; + // version 12: Added 'profiling.agent.env_https_proxy' keyword mapping to profiling-hosts + public static final int INDEX_TEMPLATE_VERSION = 12; // history for individual indices / index templates. Only bump these for breaking changes that require to create a new index public static final int PROFILING_EVENTS_VERSION = 4; From 7e81229b7fdd9e5d4924f6cb11660ac7bda1cada Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Thu, 27 Jun 2024 11:18:27 +0200 Subject: [PATCH 004/216] Wait for dynamic mapping update more precisely (#110187) We ran into a situation where dynamic mapping updates where retried in a fairly hot loop. The problem that triggered this was waiting for any cluster state update in this logic. This is mostly fine but adds a lot of overhead for retries when there's other actions running at a higher priority than the mapping update. Lets make it specific so that we at least wait for there to be any mapping and for its version to be different from the version that made us request a mapping update in the first place. Also added a breakout in case the index got concurrently deleted so we don't run out the clock in that case. --- .../action/bulk/TransportShardBulkAction.java | 17 +++++--- .../bulk/TransportShardBulkActionTests.java | 42 +++++++++---------- .../authz/AuthorizationServiceTests.java | 4 +- 3 files changed, 34 insertions(+), 29 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index cafc25438e98b..7591ef402847e 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -70,6 +70,7 @@ import java.util.concurrent.Executor; import java.util.function.Consumer; import java.util.function.LongSupplier; +import java.util.function.ObjLongConsumer; import static org.elasticsearch.core.Strings.format; @@ -150,7 +151,7 @@ protected void dispatchedShardOperationOnPrimary( assert update != null; assert shardId != null; mappingUpdatedAction.updateMappingOnMaster(shardId.getIndex(), update, mappingListener); - }, mappingUpdateListener -> observer.waitForNextChange(new ClusterStateObserver.Listener() { + }, (mappingUpdateListener, initialMappingVersion) -> observer.waitForNextChange(new ClusterStateObserver.Listener() { @Override public void onNewClusterState(ClusterState state) { mappingUpdateListener.onResponse(null); @@ -165,6 +166,9 @@ public void onClusterServiceClose() { public void onTimeout(TimeValue timeout) { mappingUpdateListener.onFailure(new MapperException("timed out while waiting for a dynamic mapping update")); } + }, clusterState -> { + var indexMetadata = clusterState.metadata().index(primary.shardId().getIndex()); + return indexMetadata == null || (indexMetadata.mapping() != null && indexMetadata.getMappingVersion() != initialMappingVersion); }), listener, executor(primary), postWriteRefresh, postWriteAction, documentParsingProvider); } @@ -184,7 +188,7 @@ public static void performOnPrimary( UpdateHelper updateHelper, LongSupplier nowInMillisSupplier, MappingUpdatePerformer mappingUpdater, - Consumer> waitForMappingUpdate, + ObjLongConsumer> waitForMappingUpdate, ActionListener> listener, Executor executor ) { @@ -209,7 +213,7 @@ public static void performOnPrimary( UpdateHelper updateHelper, LongSupplier nowInMillisSupplier, MappingUpdatePerformer mappingUpdater, - Consumer> waitForMappingUpdate, + ObjLongConsumer> waitForMappingUpdate, ActionListener> listener, Executor executor, @Nullable PostWriteRefresh postWriteRefresh, @@ -308,7 +312,7 @@ static boolean executeBulkItemRequest( UpdateHelper updateHelper, LongSupplier nowInMillisSupplier, MappingUpdatePerformer mappingUpdater, - Consumer> waitForMappingUpdate, + ObjLongConsumer> waitForMappingUpdate, ActionListener itemDoneListener, DocumentParsingProvider documentParsingProvider ) throws Exception { @@ -398,7 +402,7 @@ static boolean executeBulkItemRequest( private static boolean handleMappingUpdateRequired( BulkPrimaryExecutionContext context, MappingUpdatePerformer mappingUpdater, - Consumer> waitForMappingUpdate, + ObjLongConsumer> waitForMappingUpdate, ActionListener itemDoneListener, IndexShard primary, Engine.Result result, @@ -406,6 +410,7 @@ private static boolean handleMappingUpdateRequired( UpdateHelper.Result updateResult ) { final var mapperService = primary.mapperService(); + final long initialMappingVersion = mapperService.mappingVersion(); try { CompressedXContent mergedSource = mapperService.merge( MapperService.SINGLE_MAPPING_NAME, @@ -439,7 +444,7 @@ public void onResponse(Void v) { public void onFailure(Exception e) { context.failOnMappingUpdate(e); } - }, () -> itemDoneListener.onResponse(null))); + }, () -> itemDoneListener.onResponse(null)), initialMappingVersion); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java index 18418dda59a3b..3b18c541bd80c 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java @@ -120,7 +120,7 @@ public void testExecuteBulkIndexRequest() throws Exception { null, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer(), - listener -> {}, + (listener, mappingVersion) -> {}, ASSERTING_DONE_LISTENER, DocumentParsingProvider.EMPTY_INSTANCE ); @@ -152,7 +152,7 @@ public void testExecuteBulkIndexRequest() throws Exception { null, threadPool::absoluteTimeInMillis, new ThrowingMappingUpdatePerformer(new RuntimeException("fail")), - listener -> {}, + (listener, mappingVersion) -> {}, ASSERTING_DONE_LISTENER, DocumentParsingProvider.EMPTY_INSTANCE ); @@ -209,7 +209,7 @@ public void testSkipBulkIndexRequestIfAborted() throws Exception { null, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer(), - listener -> {}, + (listener, mappingVersion) -> {}, ActionListener.runAfter(ActionTestUtils.assertNoFailureListener(result -> { // since at least 1 item passed, the tran log location should exist, assertThat(((WritePrimaryResult) result).location, notNullValue()); @@ -285,7 +285,7 @@ public void testExecuteBulkIndexRequestWithMappingUpdates() throws Exception { assertNotNull(update); updateCalled.incrementAndGet(); listener.onResponse(null); - }, listener -> listener.onResponse(null), ASSERTING_DONE_LISTENER, DocumentParsingProvider.EMPTY_INSTANCE); + }, (listener, mappingVersion) -> listener.onResponse(null), ASSERTING_DONE_LISTENER, DocumentParsingProvider.EMPTY_INSTANCE); assertTrue(context.isInitial()); assertTrue(context.hasMoreOperationsToExecute()); assertThat(context.getUpdateRetryCounter(), equalTo(0)); @@ -304,7 +304,7 @@ public void testExecuteBulkIndexRequestWithMappingUpdates() throws Exception { null, threadPool::absoluteTimeInMillis, (update, shardId, listener) -> fail("should not have had to update the mappings"), - listener -> {}, + (listener, mappingVersion) -> {}, ASSERTING_DONE_LISTENER, DocumentParsingProvider.EMPTY_INSTANCE ); @@ -345,7 +345,7 @@ public void testExecuteBulkIndexRequestWithErrorWhileUpdatingMapping() throws Ex null, threadPool::absoluteTimeInMillis, errorOnWait == false ? new ThrowingMappingUpdatePerformer(err) : new NoopMappingUpdatePerformer(), - errorOnWait ? listener -> listener.onFailure(err) : listener -> listener.onResponse(null), + errorOnWait ? (listener, mappingVersion) -> listener.onFailure(err) : (listener, mappingVersion) -> listener.onResponse(null), new LatchedActionListener<>(new ActionListener() { @Override public void onResponse(Void aVoid) {} @@ -398,7 +398,7 @@ public void testExecuteBulkDeleteRequest() throws Exception { null, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer(), - listener -> {}, + (listener, mappingVersion) -> {}, ASSERTING_DONE_LISTENER, DocumentParsingProvider.EMPTY_INSTANCE ); @@ -446,7 +446,7 @@ public void testExecuteBulkDeleteRequest() throws Exception { null, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer(), - listener -> {}, + (listener, mappingVersion) -> {}, ASSERTING_DONE_LISTENER, DocumentParsingProvider.EMPTY_INSTANCE ); @@ -510,7 +510,7 @@ public void testNoopUpdateRequest() throws Exception { updateHelper, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer(), - listener -> {}, + (listener, mappingVersion) -> {}, ASSERTING_DONE_LISTENER, DocumentParsingProvider.EMPTY_INSTANCE ); @@ -566,7 +566,7 @@ public void testUpdateRequestWithFailure() throws Exception { updateHelper, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer(), - listener -> {}, + (listener, mappingVersion) -> {}, ASSERTING_DONE_LISTENER, DocumentParsingProvider.EMPTY_INSTANCE ); @@ -631,7 +631,7 @@ public void testUpdateRequestWithConflictFailure() throws Exception { updateHelper, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer(), - listener -> listener.onResponse(null), + (listener, mappingVersion) -> listener.onResponse(null), ASSERTING_DONE_LISTENER, documentParsingProvider ); @@ -697,7 +697,7 @@ public void testUpdateRequestWithSuccess() throws Exception { updateHelper, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer(), - listener -> {}, + (listener, mappingVersion) -> {}, ASSERTING_DONE_LISTENER, documentParsingProvider ); @@ -756,7 +756,7 @@ public void testUpdateWithDelete() throws Exception { updateHelper, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer(), - listener -> listener.onResponse(null), + (listener, mappingVersion) -> listener.onResponse(null), ASSERTING_DONE_LISTENER, DocumentParsingProvider.EMPTY_INSTANCE ); @@ -794,7 +794,7 @@ public void testFailureDuringUpdateProcessing() throws Exception { updateHelper, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer(), - listener -> {}, + (listener, mappingVersion) -> {}, ASSERTING_DONE_LISTENER, DocumentParsingProvider.EMPTY_INSTANCE ); @@ -834,7 +834,7 @@ public void testTranslogPositionToSync() throws Exception { null, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer(), - listener -> {}, + (listener, mappingVersion) -> {}, ASSERTING_DONE_LISTENER, DocumentParsingProvider.EMPTY_INSTANCE ); @@ -937,7 +937,7 @@ public void testRetries() throws Exception { updateHelper, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer(), - listener -> listener.onResponse(null), + (listener, mappingVersion) -> listener.onResponse(null), new LatchedActionListener<>(ActionTestUtils.assertNoFailureListener(result -> { assertThat(((WritePrimaryResult) result).location, equalTo(resultLocation)); BulkItemResponse primaryResponse = result.replicaRequest().items()[0].getPrimaryResponse(); @@ -1034,7 +1034,7 @@ public void testForceExecutionOnRejectionAfterMappingUpdate() throws Exception { throw new IllegalStateException(e); } }, - listener -> listener.onResponse(null), + (listener, mappingVersion) -> listener.onResponse(null), new LatchedActionListener<>(ActionTestUtils.assertNoFailureListener(result -> // Assert that we still need to fsync the location that was successfully written assertThat(((WritePrimaryResult) result).location, equalTo(resultLocation1))), latch), @@ -1096,7 +1096,7 @@ public void testPerformOnPrimaryReportsBulkStats() throws Exception { listener.onResponse(null); } }, - listener -> listener.onFailure(new IllegalStateException("no failure expected")), + (listener, mappingVersion) -> listener.onFailure(new IllegalStateException("no failure expected")), new LatchedActionListener<>(ActionTestUtils.assertNoFailureListener(result -> { try { BulkStats bulkStats = shard.bulkStats(); @@ -1156,7 +1156,7 @@ public void testNoopMappingUpdateInfiniteLoopPrevention() throws Exception { updateHelper, threadPool::absoluteTimeInMillis, (update, shardId, listener) -> fail("the master should not be contacted as the operation yielded a noop mapping update"), - listener -> listener.onResponse(null), + (listener, mappingVersion) -> listener.onResponse(null), ActionTestUtils.assertNoFailureListener(result -> {}), threadPool.executor(Names.WRITE) ) @@ -1200,7 +1200,7 @@ public void testNoopMappingUpdateSuccessOnRetry() throws Exception { when(mapperService.merge(any(), any(CompressedXContent.class), any())).thenReturn(documentMapper); // on the second invocation, the mapping version is incremented // so that the second mapping update attempt doesn't trigger the infinite loop prevention - when(mapperService.mappingVersion()).thenReturn(0L, 1L); + when(mapperService.mappingVersion()).thenReturn(0L, 0L, 1L); UpdateHelper updateHelper = mock(UpdateHelper.class); when(updateHelper.prepare(any(), eq(shard), any())).thenReturn( @@ -1223,7 +1223,7 @@ public void testNoopMappingUpdateSuccessOnRetry() throws Exception { updateHelper, threadPool::absoluteTimeInMillis, (update, shardId, listener) -> fail("the master should not be contacted as the operation yielded a noop mapping update"), - listener -> listener.onFailure(new IllegalStateException("no failure expected")), + (listener, mappingVersion) -> listener.onFailure(new IllegalStateException("no failure expected")), new LatchedActionListener<>(ActionTestUtils.assertNoFailureListener(result -> { BulkItemResponse primaryResponse = result.replicaRequest().items()[0].getPrimaryResponse(); assertFalse(primaryResponse.isFailed()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index 9d9528ec6f48b..5f878480a7d0d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -203,7 +203,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiFunction; -import java.util.function.Consumer; +import java.util.function.ObjLongConsumer; import java.util.function.Predicate; import java.util.function.Supplier; @@ -1571,7 +1571,7 @@ public void testDenialErrorMessagesForBulkIngest() throws Exception { authorize(authentication, TransportShardBulkAction.ACTION_NAME, request); MappingUpdatePerformer mappingUpdater = (m, s, l) -> l.onResponse(null); - Consumer> waitForMappingUpdate = l -> l.onResponse(null); + ObjLongConsumer> waitForMappingUpdate = (l, mappingVersion) -> l.onResponse(null); PlainActionFuture> future = new PlainActionFuture<>(); IndexShard indexShard = mock(IndexShard.class); when(indexShard.getBulkOperationListener()).thenReturn(new BulkOperationListener() { From 5179b0db29b8fa06f904d17017fcf17d4dfedc0e Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Thu, 27 Jun 2024 12:17:33 +0200 Subject: [PATCH 005/216] [Connector API] Update status when setting/resetting connector error (#110192) --- .../apis/update-connector-error-api.asciidoc | 5 ++++ .../connector/100_connector_update_error.yml | 2 ++ .../connector/ConnectorIndexService.java | 7 +++++- .../connector/ConnectorIndexServiceTests.java | 23 ++++++++----------- 4 files changed, 22 insertions(+), 15 deletions(-) diff --git a/docs/reference/connector/apis/update-connector-error-api.asciidoc b/docs/reference/connector/apis/update-connector-error-api.asciidoc index 67ea6b6d17cf0..c6ac0c9a1ac22 100644 --- a/docs/reference/connector/apis/update-connector-error-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-error-api.asciidoc @@ -21,6 +21,11 @@ To get started with Connector APIs, check out the {enterprise-search-ref}/connec * To sync data using self-managed connectors, you need to deploy the {enterprise-search-ref}/build-connector.html[Elastic connector service] on your own infrastructure. This service runs automatically on Elastic Cloud for native connectors. * The `connector_id` parameter should reference an existing connector. +[[update-connector-error-api-desc]] +==== {api-description-title} + +Sets the `error` field for the specified connector. If the `error` provided in the request body is non-null, the connector's status is updated to `error`. Otherwise, if the `error` is reset to null, the connector status is updated to `connected`. + [[update-connector-error-api-path-params]] ==== {api-path-parms-title} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/100_connector_update_error.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/100_connector_update_error.yml index a58f2399301d3..5943f9208c50f 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/100_connector_update_error.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/connector/100_connector_update_error.yml @@ -29,6 +29,7 @@ setup: connector_id: test-connector - match: { error: "some error" } + - match: { status: error } --- @@ -59,6 +60,7 @@ setup: connector_id: test-connector - match: { error: null } + - match: { status: connected } --- "Update Connector Error - 404 when connector doesn't exist": diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index bb03d3c69c74a..cd98b43adc159 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -467,7 +467,8 @@ else if (configurationValues != null) { } /** - * Updates the error property of a {@link Connector}. + * Updates the error property of a {@link Connector}. If error is non-null the resulting {@link ConnectorStatus} + * is 'error', otherwise it's 'connected'. * * @param connectorId The ID of the {@link Connector} to be updated. * @param error An instance of error property of {@link Connector}, can be reset to [null]. @@ -475,6 +476,9 @@ else if (configurationValues != null) { */ public void updateConnectorError(String connectorId, String error, ActionListener listener) { try { + + ConnectorStatus connectorStatus = Strings.isNullOrEmpty(error) ? ConnectorStatus.CONNECTED : ConnectorStatus.ERROR; + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) .id(connectorId) @@ -482,6 +486,7 @@ public void updateConnectorError(String connectorId, String error, ActionListene .source(new HashMap<>() { { put(Connector.ERROR_FIELD.getPreferredName(), error); + put(Connector.STATUS_FIELD.getPreferredName(), connectorStatus.toString()); } }) ); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index e7de5b073b114..12abca3a78591 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.xpack.application.connector.action.ConnectorCreateActionResponse; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorApiKeyIdAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorConfigurationAction; -import org.elasticsearch.xpack.application.connector.action.UpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorIndexNameAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSyncStatsAction; @@ -712,17 +711,14 @@ public void testUpdateConnectorError() throws Exception { String connectorId = randomUUID(); ConnectorCreateActionResponse resp = awaitCreateConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + String error = randomAlphaOfLengthBetween(5, 15); - UpdateConnectorErrorAction.Request updateErrorRequest = new UpdateConnectorErrorAction.Request( - connectorId, - randomAlphaOfLengthBetween(5, 15) - ); - - DocWriteResponse updateResponse = awaitUpdateConnectorError(updateErrorRequest); + DocWriteResponse updateResponse = awaitUpdateConnectorError(connectorId, error); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); Connector indexedConnector = awaitGetConnector(connectorId); - assertThat(updateErrorRequest.getError(), equalTo(indexedConnector.getError())); + assertThat(indexedConnector.getError(), equalTo(error)); + assertThat(indexedConnector.getStatus(), equalTo(ConnectorStatus.ERROR)); } public void testUpdateConnectorError_resetWithNull() throws Exception { @@ -731,13 +727,12 @@ public void testUpdateConnectorError_resetWithNull() throws Exception { ConnectorCreateActionResponse resp = awaitCreateConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); - UpdateConnectorErrorAction.Request updateErrorRequest = new UpdateConnectorErrorAction.Request(connectorId, null); - - DocWriteResponse updateResponse = awaitUpdateConnectorError(updateErrorRequest); + DocWriteResponse updateResponse = awaitUpdateConnectorError(connectorId, null); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); Connector indexedConnector = awaitGetConnector(connectorId); - assertThat(updateErrorRequest.getError(), equalTo(indexedConnector.getError())); + assertNull(indexedConnector.getError()); + assertThat(indexedConnector.getStatus(), equalTo(ConnectorStatus.CONNECTED)); } public void testUpdateConnectorNameOrDescription() throws Exception { @@ -1347,11 +1342,11 @@ public void onFailure(Exception e) { return resp.get(); } - private UpdateResponse awaitUpdateConnectorError(UpdateConnectorErrorAction.Request updatedError) throws Exception { + private UpdateResponse awaitUpdateConnectorError(String connectorId, String error) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); final AtomicReference exc = new AtomicReference<>(null); - connectorIndexService.updateConnectorError(updatedError.getConnectorId(), updatedError.getError(), new ActionListener<>() { + connectorIndexService.updateConnectorError(connectorId, error, new ActionListener<>() { @Override public void onResponse(UpdateResponse indexResponse) { resp.set(indexResponse); From abcc38388d4a7c3c63ca0bcc6732e63f9b29090a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Thu, 27 Jun 2024 12:18:52 +0200 Subject: [PATCH 006/216] Add request metric to RestController to track success/failure (by status code) (#109957) * Propagate TelemetryProvider in place of Tracer * Add counter for rest requests, with RestHandler name and response status-code attributes --- docs/changelog/109957.yaml | 6 + .../elasticsearch/test/CustomRestPlugin.java | 10 +- .../elasticsearch/rest/RestControllerIT.java | 123 +++++++++++- .../elasticsearch/action/ActionModule.java | 8 +- .../elasticsearch/node/NodeConstruction.java | 2 +- .../interceptor/RestServerActionPlugin.java | 4 +- .../elasticsearch/rest/RestController.java | 129 ++++++++++--- .../org/elasticsearch/rest/RestHandler.java | 4 + .../action/ActionModuleTests.java | 14 +- .../AbstractHttpServerTransportTests.java | 3 +- .../rest/RestControllerTests.java | 178 ++++++++++++++++-- .../rest/RestHttpResponseHeadersTests.java | 4 +- .../indices/RestValidateQueryActionTests.java | 10 +- .../test/rest/RestActionTestCase.java | 4 +- .../action/RestTermsEnumActionTests.java | 10 +- .../xpack/security/SecurityTests.java | 3 +- 16 files changed, 437 insertions(+), 75 deletions(-) create mode 100644 docs/changelog/109957.yaml diff --git a/docs/changelog/109957.yaml b/docs/changelog/109957.yaml new file mode 100644 index 0000000000000..6bbcd8175501c --- /dev/null +++ b/docs/changelog/109957.yaml @@ -0,0 +1,6 @@ +pr: 109957 +summary: Add request metric to `RestController` to track success/failure (by status + code) +area: Infra/Metrics +type: enhancement +issues: [] diff --git a/qa/custom-rest-controller/src/javaRestTest/java/co/elastic/elasticsearch/test/CustomRestPlugin.java b/qa/custom-rest-controller/src/javaRestTest/java/co/elastic/elasticsearch/test/CustomRestPlugin.java index 4fbdfa65d40ba..e978e7f2a5c11 100644 --- a/qa/custom-rest-controller/src/javaRestTest/java/co/elastic/elasticsearch/test/CustomRestPlugin.java +++ b/qa/custom-rest-controller/src/javaRestTest/java/co/elastic/elasticsearch/test/CustomRestPlugin.java @@ -21,7 +21,7 @@ import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestInterceptor; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.telemetry.tracing.Tracer; +import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.usage.UsageService; public class CustomRestPlugin extends Plugin implements RestServerActionPlugin { @@ -59,9 +59,9 @@ public CustomController( NodeClient client, CircuitBreakerService circuitBreakerService, UsageService usageService, - Tracer tracer + TelemetryProvider telemetryProvider ) { - super(interceptor, client, circuitBreakerService, usageService, tracer); + super(interceptor, client, circuitBreakerService, usageService, telemetryProvider); } @Override @@ -83,9 +83,9 @@ public RestController getRestController( NodeClient client, CircuitBreakerService circuitBreakerService, UsageService usageService, - Tracer tracer + TelemetryProvider telemetryProvider ) { - return new CustomController(interceptor, client, circuitBreakerService, usageService, tracer); + return new CustomController(interceptor, client, circuitBreakerService, usageService, telemetryProvider); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/rest/RestControllerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/rest/RestControllerIT.java index b76bec0652732..7ad464fee92ba 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/rest/RestControllerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/rest/RestControllerIT.java @@ -9,6 +9,7 @@ package org.elasticsearch.rest; import org.elasticsearch.client.Request; +import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; @@ -18,18 +19,28 @@ import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.telemetry.Measurement; +import org.elasticsearch.telemetry.TestTelemetryPlugin; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; +import java.util.ArrayList; import java.util.Collection; import java.util.List; +import java.util.function.Consumer; import java.util.function.Predicate; import java.util.function.Supplier; +import java.util.stream.Stream; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numClientNodes = 1, numDataNodes = 0) public class RestControllerIT extends ESIntegTestCase { @Override protected boolean addMockHttpTransport() { @@ -43,9 +54,117 @@ public void testHeadersEmittedWithChunkedResponses() throws IOException { assertEquals(ChunkedResponseWithHeadersPlugin.HEADER_VALUE, response.getHeader(ChunkedResponseWithHeadersPlugin.HEADER_NAME)); } + public void testMetricsEmittedOnSuccess() throws IOException { + final var client = getRestClient(); + final var request = new Request("GET", TestEchoStatusCodePlugin.ROUTE); + request.addParameter("status_code", "200"); + final var response = client.performRequest(request); + + assertEquals(200, response.getStatusLine().getStatusCode()); + + assertMeasurement(metric -> { + assertThat(metric.getLong(), is(1L)); + assertThat(metric.attributes(), hasEntry(RestController.HANDLER_NAME_KEY, TestEchoStatusCodePlugin.NAME)); + assertThat(metric.attributes(), hasEntry(RestController.REQUEST_METHOD_KEY, "GET")); + assertThat(metric.attributes(), hasEntry(RestController.STATUS_CODE_KEY, 200)); + }); + } + + public void testMetricsEmittedOnRestError() throws IOException { + final var client = getRestClient(); + final var request = new Request("GET", TestEchoStatusCodePlugin.ROUTE); + request.addParameter("status_code", "503"); + final var response = expectThrows(ResponseException.class, () -> client.performRequest(request)); + + assertEquals(503, response.getResponse().getStatusLine().getStatusCode()); + assertMeasurement(metric -> { + assertThat(metric.getLong(), is(1L)); + assertThat(metric.attributes(), hasEntry(RestController.HANDLER_NAME_KEY, TestEchoStatusCodePlugin.NAME)); + assertThat(metric.attributes(), hasEntry(RestController.REQUEST_METHOD_KEY, "GET")); + assertThat(metric.attributes(), hasEntry(RestController.STATUS_CODE_KEY, 503)); + }); + } + + public void testMetricsEmittedOnWrongMethod() throws IOException { + final var client = getRestClient(); + final var request = new Request("DELETE", TestEchoStatusCodePlugin.ROUTE); + final var response = expectThrows(ResponseException.class, () -> client.performRequest(request)); + + assertEquals(405, response.getResponse().getStatusLine().getStatusCode()); + assertMeasurement(metric -> { + assertThat(metric.getLong(), is(1L)); + assertThat(metric.attributes(), hasEntry(RestController.STATUS_CODE_KEY, RestStatus.METHOD_NOT_ALLOWED.getStatus())); + }); + } + + private static void assertMeasurement(Consumer measurementConsumer) { + var measurements = new ArrayList(); + for (PluginsService pluginsService : internalCluster().getInstances(PluginsService.class)) { + final TestTelemetryPlugin telemetryPlugin = pluginsService.filterPlugins(TestTelemetryPlugin.class).findFirst().orElseThrow(); + telemetryPlugin.collect(); + + final var metrics = telemetryPlugin.getLongCounterMeasurement(RestController.METRIC_REQUESTS_TOTAL); + measurements.addAll(metrics); + } + assertThat(measurements, hasSize(1)); + measurementConsumer.accept(measurements.get(0)); + } + @Override protected Collection> nodePlugins() { - return CollectionUtils.appendToCopy(super.nodePlugins(), ChunkedResponseWithHeadersPlugin.class); + return Stream.concat( + super.nodePlugins().stream(), + Stream.of(ChunkedResponseWithHeadersPlugin.class, TestEchoStatusCodePlugin.class, TestTelemetryPlugin.class) + ).toList(); + } + + public static class TestEchoStatusCodePlugin extends Plugin implements ActionPlugin { + static final String ROUTE = "/_test/echo_status_code"; + static final String NAME = "test_echo_status_code"; + + @Override + public Collection getRestHandlers( + Settings settings, + NamedWriteableRegistry namedWriteableRegistry, + RestController restController, + ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, + SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster, + Predicate clusterSupportsFeature + ) { + return List.of(new BaseRestHandler() { + @Override + public String getName() { + return NAME; + } + + @Override + public List routes() { + return List.of(new Route(RestRequest.Method.GET, ROUTE), new Route(RestRequest.Method.POST, ROUTE)); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { + var statusCode = request.param("status_code"); + client.getLocalNodeId(); + var restStatus = RestStatus.fromCode(Integer.parseInt(statusCode)); + return channel -> { + final var response = RestResponse.chunked( + restStatus, + ChunkedRestResponseBodyPart.fromXContent( + params -> Iterators.single((b, p) -> b.startObject().endObject()), + request, + channel + ), + null + ); + channel.sendResponse(response); + }; + } + }); + } } public static class ChunkedResponseWithHeadersPlugin extends Plugin implements ActionPlugin { diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 1c41f2cdff37d..b550755ce7bdd 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -405,7 +405,7 @@ import org.elasticsearch.rest.action.synonyms.RestPutSynonymRuleAction; import org.elasticsearch.rest.action.synonyms.RestPutSynonymsAction; import org.elasticsearch.tasks.Task; -import org.elasticsearch.telemetry.tracing.Tracer; +import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.usage.UsageService; @@ -470,7 +470,7 @@ public ActionModule( CircuitBreakerService circuitBreakerService, UsageService usageService, SystemIndices systemIndices, - Tracer tracer, + TelemetryProvider telemetryProvider, ClusterService clusterService, RerouteService rerouteService, List> reservedStateHandlers, @@ -513,12 +513,12 @@ public ActionModule( var customController = getRestServerComponent( "REST controller", actionPlugins, - restPlugin -> restPlugin.getRestController(restInterceptor, nodeClient, circuitBreakerService, usageService, tracer) + restPlugin -> restPlugin.getRestController(restInterceptor, nodeClient, circuitBreakerService, usageService, telemetryProvider) ); if (customController != null) { restController = customController; } else { - restController = new RestController(restInterceptor, nodeClient, circuitBreakerService, usageService, tracer); + restController = new RestController(restInterceptor, nodeClient, circuitBreakerService, usageService, telemetryProvider); } reservedClusterStateService = new ReservedClusterStateService(clusterService, rerouteService, reservedStateHandlers); this.restExtension = restExtension; diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index bcf8451e5fe54..aa0f9b8552d22 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -883,7 +883,7 @@ record PluginServiceInstances( circuitBreakerService, createUsageService(), systemIndices, - telemetryProvider.getTracer(), + telemetryProvider, clusterService, rerouteService, buildReservedStateHandlers( diff --git a/server/src/main/java/org/elasticsearch/plugins/interceptor/RestServerActionPlugin.java b/server/src/main/java/org/elasticsearch/plugins/interceptor/RestServerActionPlugin.java index 44653dcf8b5fe..29e4efe576116 100644 --- a/server/src/main/java/org/elasticsearch/plugins/interceptor/RestServerActionPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/interceptor/RestServerActionPlugin.java @@ -15,7 +15,7 @@ import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestInterceptor; -import org.elasticsearch.telemetry.tracing.Tracer; +import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.usage.UsageService; import java.util.function.UnaryOperator; @@ -58,7 +58,7 @@ default RestController getRestController( NodeClient client, CircuitBreakerService circuitBreakerService, UsageService usageService, - Tracer tracer + TelemetryProvider telemetryProvider ) { return null; } diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index b08f6ed81017a..3f9c0dbaa11d6 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -39,6 +39,8 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.rest.RestHandler.Route; import org.elasticsearch.tasks.Task; +import org.elasticsearch.telemetry.TelemetryProvider; +import org.elasticsearch.telemetry.metric.LongCounter; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.transport.Transports; import org.elasticsearch.usage.SearchUsageHolder; @@ -86,6 +88,9 @@ public class RestController implements HttpServerTransport.Dispatcher { static final String ELASTIC_PRODUCT_HTTP_HEADER_VALUE = "Elasticsearch"; static final Set RESERVED_PATHS = Set.of("/__elb_health__", "/__elb_health__/zk", "/_health", "/_health/zk"); private static final BytesReference FAVICON_RESPONSE; + public static final String STATUS_CODE_KEY = "es_rest_status_code"; + public static final String HANDLER_NAME_KEY = "es_rest_handler_name"; + public static final String REQUEST_METHOD_KEY = "es_rest_request_method"; static { try (InputStream stream = RestController.class.getResourceAsStream("/config/favicon.ico")) { @@ -107,18 +112,23 @@ public class RestController implements HttpServerTransport.Dispatcher { private final UsageService usageService; private final Tracer tracer; + private final LongCounter requestsCounter; // If true, the ServerlessScope annotations will be enforced private final ServerlessApiProtections apiProtections; + public static final String METRIC_REQUESTS_TOTAL = "es.rest.requests.total"; + public RestController( RestInterceptor restInterceptor, NodeClient client, CircuitBreakerService circuitBreakerService, UsageService usageService, - Tracer tracer + TelemetryProvider telemetryProvider ) { this.usageService = usageService; - this.tracer = tracer; + this.tracer = telemetryProvider.getTracer(); + this.requestsCounter = telemetryProvider.getMeterRegistry() + .registerLongCounter(METRIC_REQUESTS_TOTAL, "The total number of rest requests/responses processed", "unit"); if (restInterceptor == null) { restInterceptor = (request, channel, targetHandler, listener) -> listener.onResponse(Boolean.TRUE); } @@ -355,6 +365,7 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th sendFailure(channel, (Exception) e.getCause()); } else { channel.sendResponse(new RestResponse(channel, BAD_REQUEST, e)); + recordRequestMetric(BAD_REQUEST, requestsCounter); } } catch (final IOException e) { if (cause != null) { @@ -362,6 +373,7 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th } logger.warn("failed to send bad request response", e); channel.sendResponse(new RestResponse(INTERNAL_SERVER_ERROR, RestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY)); + recordRequestMetric(INTERNAL_SERVER_ERROR, requestsCounter); } } @@ -502,8 +514,10 @@ public void onFailure(Exception e) { @SuppressWarnings("unused") protected void validateRequest(RestRequest request, RestHandler handler, NodeClient client) throws ElasticsearchStatusException {} - private static void sendFailure(RestChannel responseChannel, Exception e) throws IOException { - responseChannel.sendResponse(new RestResponse(responseChannel, e)); + private void sendFailure(RestChannel responseChannel, Exception e) throws IOException { + var restResponse = new RestResponse(responseChannel, e); + responseChannel.sendResponse(restResponse); + recordRequestMetric(restResponse.status(), requestsCounter); } /** @@ -602,6 +616,7 @@ private void tryAllHandlers(final RestRequest request, final RestChannel channel } catch (IllegalArgumentException e) { startTrace(threadContext, channel); channel.sendResponse(RestResponse.createSimpleErrorResponse(channel, BAD_REQUEST, e.getMessage())); + recordRequestMetric(BAD_REQUEST, requestsCounter); return; } @@ -629,7 +644,8 @@ private void tryAllHandlers(final RestRequest request, final RestChannel channel } } else { startTrace(threadContext, channel, handlers.getPath()); - dispatchRequest(request, channel, handler, handlers, threadContext); + var decoratedChannel = new MeteringRestChannelDecorator(channel, requestsCounter, handler.getConcreteRestHandler()); + dispatchRequest(request, decoratedChannel, handler, handlers, threadContext); return; } } @@ -689,7 +705,7 @@ public SearchUsageHolder getSearchUsageHolder() { * HTTP/1.1 - * 10.4.6 - 405 Method Not Allowed). */ - private static void handleUnsupportedHttpMethod( + private void handleUnsupportedHttpMethod( String uri, @Nullable RestRequest.Method method, final RestChannel channel, @@ -712,9 +728,11 @@ private static void handleUnsupportedHttpMethod( restResponse.addHeader("Allow", Strings.collectionToDelimitedString(validMethodSet, ",")); } channel.sendResponse(restResponse); + recordRequestMetric(METHOD_NOT_ALLOWED, requestsCounter); } catch (final IOException e) { logger.warn("failed to send bad request response", e); channel.sendResponse(new RestResponse(INTERNAL_SERVER_ERROR, RestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY)); + recordRequestMetric(INTERNAL_SERVER_ERROR, requestsCounter); } } @@ -725,7 +743,7 @@ private static void handleUnsupportedHttpMethod( * HTTP/1.1 - 9.2 * - Options). */ - private static void handleOptionsRequest(RestChannel channel, Set validMethodSet) { + private void handleOptionsRequest(RestChannel channel, Set validMethodSet) { RestResponse restResponse = new RestResponse(OK, TEXT_CONTENT_TYPE, BytesArray.EMPTY); // When we have an OPTIONS HTTP request and no valid handlers, simply send OK by default (with the Access Control Origin header // which gets automatically added). @@ -733,13 +751,14 @@ private static void handleOptionsRequest(RestChannel channel, Set getValidHandlerMethodSet(String rawPath) { return validMethods; } - private static final class ResourceHandlingHttpChannel implements RestChannel { + private static void recordRequestMetric(RestStatus statusCode, String handlerName, String requestMethod, LongCounter requestsCounter) { + try { + Map attributes = Map.of( + STATUS_CODE_KEY, + statusCode.getStatus(), + HANDLER_NAME_KEY, + handlerName, + REQUEST_METHOD_KEY, + requestMethod + ); + requestsCounter.incrementBy(1, attributes); + } catch (Exception ex) { + logger.error("Cannot track request status code", ex); + } + } + + private static void recordRequestMetric(RestStatus statusCode, LongCounter requestsCounter) { + try { + Map attributes = Map.of(STATUS_CODE_KEY, statusCode.getStatus()); + requestsCounter.incrementBy(1, attributes); + } catch (Exception ex) { + logger.error("Cannot track request status code", ex); + } + } + + private static class DelegatingRestChannel implements RestChannel { + private final RestChannel delegate; - private final CircuitBreakerService circuitBreakerService; - private final int contentLength; - private final MethodHandlers methodHandlers; - private final long startTime; - private final AtomicBoolean closed = new AtomicBoolean(); - ResourceHandlingHttpChannel( - RestChannel delegate, - CircuitBreakerService circuitBreakerService, - int contentLength, - MethodHandlers methodHandlers - ) { + private DelegatingRestChannel(RestChannel delegate) { this.delegate = delegate; - this.circuitBreakerService = circuitBreakerService; - this.contentLength = contentLength; - this.methodHandlers = methodHandlers; - this.startTime = rawRelativeTimeInMillis(); } @Override @@ -843,6 +874,50 @@ public boolean detailedErrorsEnabled() { return delegate.detailedErrorsEnabled(); } + @Override + public void sendResponse(RestResponse response) { + delegate.sendResponse(response); + } + } + + private static final class MeteringRestChannelDecorator extends DelegatingRestChannel { + + private final LongCounter requestsCounter; + private final RestHandler restHandler; + + private MeteringRestChannelDecorator(RestChannel delegate, LongCounter requestCounter, RestHandler restHandler) { + super(delegate); + this.requestsCounter = requestCounter; + this.restHandler = restHandler; + } + + @Override + public void sendResponse(RestResponse response) { + super.sendResponse(response); + recordRequestMetric(response.status(), restHandler.getName(), request().method().name(), requestsCounter); + } + } + + private static final class ResourceHandlingHttpChannel extends DelegatingRestChannel { + private final CircuitBreakerService circuitBreakerService; + private final int contentLength; + private final MethodHandlers methodHandlers; + private final long startTime; + private final AtomicBoolean closed = new AtomicBoolean(); + + ResourceHandlingHttpChannel( + RestChannel delegate, + CircuitBreakerService circuitBreakerService, + int contentLength, + MethodHandlers methodHandlers + ) { + super(delegate); + this.circuitBreakerService = circuitBreakerService; + this.contentLength = contentLength; + this.methodHandlers = methodHandlers; + this.startTime = rawRelativeTimeInMillis(); + } + @Override public void sendResponse(RestResponse response) { boolean success = false; @@ -866,7 +941,7 @@ public void sendResponse(RestResponse response) { } } } - delegate.sendResponse(response); + super.sendResponse(response); success = true; } finally { if (success == false) { diff --git a/server/src/main/java/org/elasticsearch/rest/RestHandler.java b/server/src/main/java/org/elasticsearch/rest/RestHandler.java index c490f68499783..11208a24ceb10 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/RestHandler.java @@ -126,6 +126,10 @@ default boolean mediaTypesValid(RestRequest request) { return request.getXContentType() != null; } + default String getName() { + return this.getClass().getSimpleName(); + } + class Route { private final Method method; diff --git a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java index 289ab715e3e78..7afa7adedc7bf 100644 --- a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java +++ b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java @@ -37,7 +37,7 @@ import org.elasticsearch.rest.action.admin.cluster.RestNodesInfoAction; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; -import org.elasticsearch.telemetry.tracing.Tracer; +import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -123,7 +123,7 @@ public void testSetupRestHandlerContainsKnownBuiltin() { null, usageService, null, - null, + TelemetryProvider.NOOP, mock(ClusterService.class), null, List.of(), @@ -187,7 +187,7 @@ public String getName() { null, usageService, null, - null, + TelemetryProvider.NOOP, mock(ClusterService.class), null, List.of(), @@ -244,7 +244,7 @@ public List getRestHandlers( null, usageService, null, - null, + TelemetryProvider.NOOP, mock(ClusterService.class), null, List.of(), @@ -335,7 +335,7 @@ public void test3rdPartyRestControllerIsNotInstalled() { null, usageService, null, - null, + TelemetryProvider.NOOP, mock(ClusterService.class), null, List.of(), @@ -388,10 +388,10 @@ public RestController getRestController( NodeClient client, CircuitBreakerService circuitBreakerService, UsageService usageService, - Tracer tracer + TelemetryProvider telemetryProvider ) { if (installController) { - return new RestController(interceptor, client, circuitBreakerService, usageService, tracer); + return new RestController(interceptor, client, circuitBreakerService, usageService, telemetryProvider); } else { return null; } diff --git a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java index 8dcecca0f65c0..26087ce5f1f0b 100644 --- a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java +++ b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java @@ -39,6 +39,7 @@ import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; +import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLog; @@ -1171,7 +1172,7 @@ public Collection getRestHeaders() { null, new UsageService(), null, - null, + TelemetryProvider.NOOP, mock(ClusterService.class), null, List.of(), diff --git a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java index 10ea83e59c0ad..67f42e6cf1808 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -36,6 +36,9 @@ import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.rest.RestHandler.Route; import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.telemetry.TelemetryProvider; +import org.elasticsearch.telemetry.metric.LongCounter; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpNodeClient; @@ -66,8 +69,12 @@ import static org.elasticsearch.rest.RestController.ELASTIC_PRODUCT_HTTP_HEADER; import static org.elasticsearch.rest.RestController.ELASTIC_PRODUCT_HTTP_HEADER_VALUE; +import static org.elasticsearch.rest.RestController.HANDLER_NAME_KEY; +import static org.elasticsearch.rest.RestController.REQUEST_METHOD_KEY; +import static org.elasticsearch.rest.RestController.STATUS_CODE_KEY; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.OPTIONS; +import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; @@ -92,6 +99,8 @@ public class RestControllerTests extends ESTestCase { private TestThreadPool threadPool; private NodeClient client; private Tracer tracer; + private LongCounter requestsCounter; + private TelemetryProvider telemetryProvider; private List methodList; @Before @@ -114,7 +123,16 @@ public void setup() { threadPool = createThreadPool(); client = new NoOpNodeClient(threadPool); tracer = mock(Tracer.class); - restController = new RestController(null, client, circuitBreakerService, usageService, tracer); + requestsCounter = mock(LongCounter.class); + telemetryProvider = mock(TelemetryProvider.class); + var mockMeterRegister = mock(MeterRegistry.class); + when(telemetryProvider.getTracer()).thenReturn(tracer); + when(telemetryProvider.getMeterRegistry()).thenReturn(mockMeterRegister); + when(mockMeterRegister.registerLongCounter(eq(RestController.METRIC_REQUESTS_TOTAL), anyString(), anyString())).thenReturn( + requestsCounter + ); + + restController = new RestController(null, client, circuitBreakerService, usageService, telemetryProvider); restController.registerHandler( new Route(GET, "/"), (request, channel, client) -> channel.sendResponse( @@ -136,7 +154,7 @@ public void teardown() throws IOException { public void testApplyProductSpecificResponseHeaders() { final ThreadContext threadContext = client.threadPool().getThreadContext(); - final RestController restController = new RestController(null, null, circuitBreakerService, usageService, tracer); + final RestController restController = new RestController(null, null, circuitBreakerService, usageService, telemetryProvider); RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).build(); AssertingChannel channel = new AssertingChannel(fakeRequest, false, RestStatus.BAD_REQUEST); restController.dispatchRequest(fakeRequest, channel, threadContext); @@ -152,7 +170,7 @@ public void testRequestWithDisallowedMultiValuedHeader() { Set headers = new HashSet<>( Arrays.asList(new RestHeaderDefinition("header.1", true), new RestHeaderDefinition("header.2", false)) ); - final RestController restController = new RestController(null, null, circuitBreakerService, usageService, tracer); + final RestController restController = new RestController(null, null, circuitBreakerService, usageService, telemetryProvider); Map> restHeaders = new HashMap<>(); restHeaders.put("header.1", Collections.singletonList("boo")); restHeaders.put("header.2", List.of("foo", "bar")); @@ -162,12 +180,122 @@ public void testRequestWithDisallowedMultiValuedHeader() { assertTrue(channel.getSendResponseCalled()); } + public void testDispatchWithNamedHandlerEmitsMetricWithName() { + final ThreadContext threadContext = client.threadPool().getThreadContext(); + final RestController restController = new RestController(null, null, circuitBreakerService, usageService, telemetryProvider); + RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).build(); + final RestController spyRestController = spy(restController); + when(spyRestController.getAllHandlers(any(), eq(fakeRequest.rawPath()))).thenReturn(new Iterator<>() { + @Override + public boolean hasNext() { + return true; + } + + @Override + public MethodHandlers next() { + return new MethodHandlers("/").addMethod(GET, RestApiVersion.current(), new RestHandler() { + @Override + public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) { + channel.sendResponse(new RestResponse(RestStatus.OK, "Test")); + } + + @Override + public String getName() { + return "test_handler_name"; + } + }); + } + }); + AssertingChannel channel = new AssertingChannel(fakeRequest, false, RestStatus.OK); + spyRestController.dispatchRequest(fakeRequest, channel, threadContext); + verify(requestsCounter).incrementBy( + eq(1L), + eq(Map.of(STATUS_CODE_KEY, 200, HANDLER_NAME_KEY, "test_handler_name", REQUEST_METHOD_KEY, fakeRequest.method().name())) + ); + } + + public void testDispatchWithoutANamedHandlerEmitsMetricWithNoName() { + final ThreadContext threadContext = client.threadPool().getThreadContext(); + final RestController restController = new RestController(null, null, circuitBreakerService, usageService, telemetryProvider); + RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).build(); + final RestController spyRestController = spy(restController); + when(spyRestController.getAllHandlers(any(), eq(fakeRequest.rawPath()))).thenReturn(new Iterator<>() { + @Override + public boolean hasNext() { + return false; + } + + @Override + public MethodHandlers next() { + return null; + } + }); + AssertingChannel channel = new AssertingChannel(fakeRequest, false, RestStatus.BAD_REQUEST); + spyRestController.dispatchRequest(fakeRequest, channel, threadContext); + verify(requestsCounter).incrementBy(eq(1L), eq(Map.of(STATUS_CODE_KEY, 400))); + } + + public void testDispatchThrowsEmitsMetric() { + final ThreadContext threadContext = client.threadPool().getThreadContext(); + final RestController restController = new RestController(null, null, circuitBreakerService, usageService, telemetryProvider); + RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).build(); + final RestController spyRestController = spy(restController); + when(spyRestController.getAllHandlers(any(), eq(fakeRequest.rawPath()))).thenReturn(new Iterator<>() { + @Override + public boolean hasNext() { + return true; + } + + @Override + public MethodHandlers next() { + throw new IllegalArgumentException(); + } + }); + + AssertingChannel channel = new AssertingChannel(fakeRequest, false, RestStatus.BAD_REQUEST); + spyRestController.dispatchRequest(fakeRequest, channel, threadContext); + verify(requestsCounter).incrementBy(eq(1L), eq(Map.of(STATUS_CODE_KEY, 400))); + } + + public void testDispatchNoHandlerEmitsMetric() { + final ThreadContext threadContext = client.threadPool().getThreadContext(); + final RestController restController = new RestController(null, null, circuitBreakerService, usageService, telemetryProvider); + RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).build(); + final RestController spyRestController = spy(restController); + var handlers = List.of(new MethodHandlers("/").addMethod(POST, RestApiVersion.current(), new RestHandler() { + @Override + public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) { + channel.sendResponse(new RestResponse(RestStatus.OK, "Test")); + } + + @Override + public String getName() { + return "test_handler_name"; + } + })); + when(spyRestController.getAllHandlers(any(), eq(fakeRequest.rawPath()))).thenAnswer(x -> handlers.iterator()); + + AssertingChannel channel = new AssertingChannel(fakeRequest, false, RestStatus.METHOD_NOT_ALLOWED); + spyRestController.dispatchRequest(fakeRequest, channel, threadContext); + verify(requestsCounter).incrementBy(eq(1L), eq(Map.of(STATUS_CODE_KEY, 405))); + } + + public void testDispatchBadRequestEmitsMetric() { + final ThreadContext threadContext = client.threadPool().getThreadContext(); + final RestController restController = new RestController(null, null, circuitBreakerService, usageService, telemetryProvider); + RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).build(); + + AssertingChannel channel = new AssertingChannel(fakeRequest, false, RestStatus.BAD_REQUEST); + restController.dispatchBadRequest(channel, threadContext, new Exception()); + verify(requestsCounter).incrementBy(eq(1L), eq(Map.of(STATUS_CODE_KEY, 400))); + } + /** * Check that dispatching a request causes a trace span to be started. */ public void testDispatchStartsTrace() { final ThreadContext threadContext = client.threadPool().getThreadContext(); - final RestController restController = new RestController(null, null, circuitBreakerService, usageService, tracer); + final RestController restController = new RestController(null, null, circuitBreakerService, usageService, telemetryProvider); RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).build(); final RestController spyRestController = spy(restController); when(spyRestController.getAllHandlers(null, fakeRequest.rawPath())).thenReturn(new Iterator<>() { @@ -196,7 +324,7 @@ public void testRequestWithDisallowedMultiValuedHeaderButSameValues() { Set headers = new HashSet<>( Arrays.asList(new RestHeaderDefinition("header.1", true), new RestHeaderDefinition("header.2", false)) ); - final RestController restController = new RestController(null, client, circuitBreakerService, usageService, tracer); + final RestController restController = new RestController(null, client, circuitBreakerService, usageService, telemetryProvider); Map> restHeaders = new HashMap<>(); restHeaders.put("header.1", Collections.singletonList("boo")); restHeaders.put("header.2", List.of("foo", "foo")); @@ -267,7 +395,7 @@ public void testRegisterAsReplacedHandler() { } public void testRegisterSecondMethodWithDifferentNamedWildcard() { - final RestController restController = new RestController(null, null, circuitBreakerService, usageService, tracer); + final RestController restController = new RestController(null, null, circuitBreakerService, usageService, telemetryProvider); RestRequest.Method firstMethod = randomFrom(methodList); RestRequest.Method secondMethod = randomFrom(methodList.stream().filter(m -> m != firstMethod).toList()); @@ -297,7 +425,13 @@ public void testRestInterceptor() throws Exception { wrapperCalled.set(true); listener.onResponse(callHandler); }; - final RestController restController = new RestController(interceptor, client, circuitBreakerService, usageService, tracer); + final RestController restController = new RestController( + interceptor, + client, + circuitBreakerService, + usageService, + telemetryProvider + ); restController.registerHandler(new Route(GET, "/wrapped"), handler); RestRequest request = testRestRequest("/wrapped", "{}", XContentType.JSON); AssertingChannel channel = new AssertingChannel(request, true, RestStatus.BAD_REQUEST); @@ -384,7 +518,7 @@ public void testDispatchRequiresContentTypeForRequestsWithContent() { String content = randomAlphaOfLength((int) Math.round(BREAKER_LIMIT.getBytes() / inFlightRequestsBreaker.getOverhead())); RestRequest request = testRestRequest("/", content, null); AssertingChannel channel = new AssertingChannel(request, true, RestStatus.NOT_ACCEPTABLE); - restController = new RestController(null, null, circuitBreakerService, usageService, tracer); + restController = new RestController(null, null, circuitBreakerService, usageService, telemetryProvider); restController.registerHandler( new Route(GET, "/"), (r, c, client) -> c.sendResponse(new RestResponse(RestStatus.OK, RestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY)) @@ -779,7 +913,7 @@ public Method method() { public void testDispatchCompatibleHandler() { - RestController restController = new RestController(null, client, circuitBreakerService, usageService, tracer); + RestController restController = new RestController(null, client, circuitBreakerService, usageService, telemetryProvider); final RestApiVersion version = RestApiVersion.minimumSupported(); @@ -803,7 +937,7 @@ public void testDispatchCompatibleHandler() { public void testDispatchCompatibleRequestToNewlyAddedHandler() { - RestController restController = new RestController(null, client, circuitBreakerService, usageService, tracer); + RestController restController = new RestController(null, client, circuitBreakerService, usageService, telemetryProvider); final RestApiVersion version = RestApiVersion.minimumSupported(); @@ -846,7 +980,7 @@ private FakeRestRequest requestWithContent(String mediaType) { } public void testCurrentVersionVNDMediaTypeIsNotUsingCompatibility() { - RestController restController = new RestController(null, client, circuitBreakerService, usageService, tracer); + RestController restController = new RestController(null, client, circuitBreakerService, usageService, telemetryProvider); final RestApiVersion version = RestApiVersion.current(); @@ -871,7 +1005,7 @@ public void testCurrentVersionVNDMediaTypeIsNotUsingCompatibility() { } public void testCustomMediaTypeValidation() { - RestController restController = new RestController(null, client, circuitBreakerService, usageService, tracer); + RestController restController = new RestController(null, client, circuitBreakerService, usageService, telemetryProvider); final String mediaType = "application/x-protobuf"; FakeRestRequest fakeRestRequest = requestWithContent(mediaType); @@ -897,7 +1031,7 @@ public void handleRequest(RestRequest request, RestChannel channel, NodeClient c } public void testBrowserSafelistedContentTypesAreRejected() { - RestController restController = new RestController(null, client, circuitBreakerService, usageService, tracer); + RestController restController = new RestController(null, client, circuitBreakerService, usageService, telemetryProvider); final String mediaType = randomFrom(RestController.SAFELISTED_MEDIA_TYPES); FakeRestRequest fakeRestRequest = requestWithContent(mediaType); @@ -918,7 +1052,7 @@ public void handleRequest(RestRequest request, RestChannel channel, NodeClient c } public void testRegisterWithReservedPath() { - final RestController restController = new RestController(null, client, circuitBreakerService, usageService, tracer); + final RestController restController = new RestController(null, client, circuitBreakerService, usageService, telemetryProvider); for (String path : RestController.RESERVED_PATHS) { IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> { restController.registerHandler( @@ -936,7 +1070,13 @@ public void testRegisterWithReservedPath() { * Test that when serverless is disabled, all endpoints are available regardless of ServerlessScope annotations. */ public void testApiProtectionWithServerlessDisabled() { - final RestController restController = new RestController(null, client, circuitBreakerService, new UsageService(), tracer); + final RestController restController = new RestController( + null, + client, + circuitBreakerService, + new UsageService(), + telemetryProvider + ); restController.registerHandler(new PublicRestHandler()); restController.registerHandler(new InternalRestHandler()); restController.registerHandler(new HiddenRestHandler()); @@ -952,7 +1092,13 @@ public void testApiProtectionWithServerlessDisabled() { * Test that when serverless is enabled, a normal user can not access endpoints without a ServerlessScope annotation. */ public void testApiProtectionWithServerlessEnabledAsEndUser() { - final RestController restController = new RestController(null, client, circuitBreakerService, new UsageService(), tracer); + final RestController restController = new RestController( + null, + client, + circuitBreakerService, + new UsageService(), + telemetryProvider + ); restController.registerHandler(new PublicRestHandler()); restController.registerHandler(new InternalRestHandler()); restController.registerHandler(new HiddenRestHandler()); diff --git a/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java b/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java index 9c38cd2615355..acb1485740238 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.rest.RestHandler.Route; -import org.elasticsearch.telemetry.tracing.Tracer; +import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestChannel; import org.elasticsearch.test.rest.FakeRestRequest; @@ -80,7 +80,7 @@ public void testUnsupportedMethodResponseHttpHeader() throws Exception { ); UsageService usageService = new UsageService(); - RestController restController = new RestController(null, null, circuitBreakerService, usageService, Tracer.NOOP); + RestController restController = new RestController(null, null, circuitBreakerService, usageService, TelemetryProvider.NOOP); // A basic RestHandler handles requests to the endpoint RestHandler restHandler = (request, channel, client) -> channel.sendResponse(new RestResponse(RestStatus.OK, "")); diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java index 761d2b454b134..59ab7ec719cf4 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java @@ -26,7 +26,7 @@ import org.elasticsearch.search.AbstractSearchTestCase; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; -import org.elasticsearch.telemetry.tracing.Tracer; +import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.test.rest.FakeRestChannel; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.threadpool.TestThreadPool; @@ -53,7 +53,13 @@ public class RestValidateQueryActionTests extends AbstractSearchTestCase { private NodeClient client = new NodeClient(Settings.EMPTY, threadPool); private UsageService usageService = new UsageService(); - private RestController controller = new RestController(null, client, new NoneCircuitBreakerService(), usageService, Tracer.NOOP); + private RestController controller = new RestController( + null, + client, + new NoneCircuitBreakerService(), + usageService, + TelemetryProvider.NOOP + ); private RestValidateQueryAction action = new RestValidateQueryAction(); /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestActionTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestActionTestCase.java index fad8575ae1d58..9fed08234f7a4 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestActionTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestActionTestCase.java @@ -18,7 +18,7 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.tasks.Task; -import org.elasticsearch.telemetry.tracing.Tracer; +import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpNodeClient; import org.elasticsearch.threadpool.TestThreadPool; @@ -45,7 +45,7 @@ public abstract class RestActionTestCase extends ESTestCase { public void setUpController() { threadPool = createThreadPool(); verifyingClient = new VerifyingClient(threadPool); - controller = new RestController(null, verifyingClient, new NoneCircuitBreakerService(), new UsageService(), Tracer.NOOP); + controller = new RestController(null, verifyingClient, new NoneCircuitBreakerService(), new UsageService(), TelemetryProvider.NOOP); } @After diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java index b0ad137f0f1b6..2ea372a84b66c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/action/RestTermsEnumActionTests.java @@ -21,7 +21,7 @@ import org.elasticsearch.search.SearchModule; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; -import org.elasticsearch.telemetry.tracing.Tracer; +import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestChannel; import org.elasticsearch.test.rest.FakeRestRequest; @@ -50,7 +50,13 @@ public class RestTermsEnumActionTests extends ESTestCase { private static NodeClient client = new NodeClient(Settings.EMPTY, threadPool); private static UsageService usageService = new UsageService(); - private static RestController controller = new RestController(null, client, new NoneCircuitBreakerService(), usageService, Tracer.NOOP); + private static RestController controller = new RestController( + null, + client, + new NoneCircuitBreakerService(), + usageService, + TelemetryProvider.NOOP + ); private static RestTermsEnumAction action = new RestTermsEnumAction(); /** diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index 69e8d7b8b681e..1aa40a48ecc97 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -61,7 +61,6 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.script.ScriptService; import org.elasticsearch.telemetry.TelemetryProvider; -import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.MockLog; @@ -821,7 +820,7 @@ public void testSecurityRestHandlerInterceptorCanBeInstalled() throws IllegalAcc null, usageService, null, - Tracer.NOOP, + TelemetryProvider.NOOP, mock(ClusterService.class), null, List.of(), From fc0313f4297a3f90030dbdec73230396bfebecc1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Iv=C3=A1n=20Cea=20Fontenla?= Date: Thu, 27 Jun 2024 13:21:55 +0200 Subject: [PATCH 007/216] ESQL: Add aggregations testing base and docs (#110042) - Added a new `AbstractAggregationTestCase` base class for tests, that shares most of the code of function tests, adapted for aggregations. Including both testing and docs generation. - Reused the `AbstractFunctionTestCase` class to also let us test evaluators if the aggregation is foldable - Added a `TopListTests` example - This includes the docs for Top_list _(Also added a missing include of Ip_prefix docs)_ - Adapted Kibana docs to use `type: "agg"` (@drewdaemon) The current tests are very basic: Consume a page, generate an output, all in Single aggregation mode (No intermediates, no grouping). More complex testing will be added in future PRs Initial PR of https://github.com/elastic/elasticsearch/issues/109917 --- .../functions/aggregation-functions.asciidoc | 2 + .../functions/description/top_list.asciidoc | 5 + .../esql/functions/examples/top_list.asciidoc | 13 + .../esql/functions/ip-functions.asciidoc | 2 + .../functions/kibana/definition/top_list.json | 107 ++ .../esql/functions/kibana/docs/top_list.md | 11 + .../esql/functions/layout/top_list.asciidoc | 15 + .../functions/parameters/top_list.asciidoc | 12 + .../esql/functions/signature/top_list.svg | 1 + .../esql/functions/types/top_list.asciidoc | 12 + .../esql/core/expression/TypeResolutions.java | 22 + .../function/aggregate/TopList.java | 6 +- .../function/scalar/package-info.java | 2 +- .../function/AbstractAggregationTestCase.java | 213 ++++ .../function/AbstractFunctionTestCase.java | 960 ++---------------- .../AbstractScalarFunctionTestCase.java | 884 ++++++++++++++++ .../expression/function/FunctionName.java | 2 +- .../expression/function/TestCaseSupplier.java | 73 +- .../function/aggregate/TopListTests.java | 249 +++++ .../function/grouping/BucketTests.java | 4 +- ...AbstractConfigurationFunctionTestCase.java | 4 +- .../scalar/conditional/CaseTests.java | 4 +- .../scalar/conditional/GreatestTests.java | 4 +- .../scalar/conditional/LeastTests.java | 4 +- .../scalar/convert/FromBase64Tests.java | 4 +- .../scalar/convert/ToBase64Tests.java | 4 +- .../scalar/convert/ToBooleanTests.java | 4 +- .../scalar/convert/ToCartesianPointTests.java | 4 +- .../scalar/convert/ToCartesianShapeTests.java | 4 +- .../scalar/convert/ToDatetimeTests.java | 4 +- .../scalar/convert/ToDegreesTests.java | 4 +- .../scalar/convert/ToDoubleTests.java | 4 +- .../scalar/convert/ToGeoPointTests.java | 4 +- .../scalar/convert/ToGeoShapeTests.java | 4 +- .../function/scalar/convert/ToIPTests.java | 4 +- .../scalar/convert/ToIntegerTests.java | 4 +- .../function/scalar/convert/ToLongTests.java | 4 +- .../scalar/convert/ToRadiansTests.java | 4 +- .../scalar/convert/ToStringTests.java | 4 +- .../scalar/convert/ToUnsignedLongTests.java | 4 +- .../scalar/convert/ToVersionTests.java | 4 +- .../function/scalar/date/DateDiffTests.java | 4 +- .../function/scalar/date/DateParseTests.java | 4 +- .../function/scalar/date/DateTruncTests.java | 4 +- .../function/scalar/ip/CIDRMatchTests.java | 4 +- .../function/scalar/ip/IpPrefixTests.java | 4 +- .../function/scalar/math/AbsTests.java | 4 +- .../function/scalar/math/AcosTests.java | 4 +- .../function/scalar/math/AsinTests.java | 4 +- .../function/scalar/math/Atan2Tests.java | 4 +- .../function/scalar/math/AtanTests.java | 4 +- .../function/scalar/math/CbrtTests.java | 4 +- .../function/scalar/math/CeilTests.java | 4 +- .../function/scalar/math/CosTests.java | 4 +- .../function/scalar/math/CoshTests.java | 4 +- .../function/scalar/math/ETests.java | 4 +- .../function/scalar/math/FloorTests.java | 4 +- .../function/scalar/math/Log10Tests.java | 4 +- .../function/scalar/math/LogTests.java | 4 +- .../function/scalar/math/PiTests.java | 4 +- .../function/scalar/math/PowTests.java | 4 +- .../function/scalar/math/RoundTests.java | 4 +- .../function/scalar/math/SignumTests.java | 4 +- .../function/scalar/math/SinTests.java | 4 +- .../function/scalar/math/SinhTests.java | 4 +- .../function/scalar/math/SqrtTests.java | 4 +- .../function/scalar/math/TanTests.java | 4 +- .../function/scalar/math/TanhTests.java | 4 +- .../function/scalar/math/TauTests.java | 4 +- .../AbstractMultivalueFunctionTestCase.java | 4 +- .../scalar/multivalue/MvAppendTests.java | 4 +- .../scalar/multivalue/MvConcatTests.java | 4 +- .../scalar/multivalue/MvSliceTests.java | 4 +- .../scalar/multivalue/MvSortTests.java | 4 +- .../scalar/multivalue/MvZipTests.java | 4 +- .../function/scalar/nulls/CoalesceTests.java | 4 +- .../function/scalar/nulls/IsNotNullTests.java | 4 +- .../function/scalar/nulls/IsNullTests.java | 4 +- .../BinarySpatialFunctionTestCase.java | 4 +- .../function/scalar/spatial/StXTests.java | 4 +- .../function/scalar/spatial/StYTests.java | 4 +- .../scalar/string/AbstractTrimTests.java | 4 +- .../function/scalar/string/ConcatTests.java | 4 +- .../function/scalar/string/EndsWithTests.java | 4 +- .../function/scalar/string/LeftTests.java | 4 +- .../function/scalar/string/LengthTests.java | 4 +- .../function/scalar/string/LocateTests.java | 4 +- .../function/scalar/string/RLikeTests.java | 4 +- .../scalar/string/RepeatStaticTests.java | 8 +- .../function/scalar/string/RepeatTests.java | 4 +- .../function/scalar/string/ReplaceTests.java | 4 +- .../function/scalar/string/RightTests.java | 4 +- .../function/scalar/string/SplitTests.java | 4 +- .../scalar/string/StartsWithTests.java | 4 +- .../scalar/string/SubstringTests.java | 4 +- .../scalar/string/WildcardLikeTests.java | 4 +- .../AbstractBinaryOperatorTestCase.java | 4 +- .../predicate/operator/BreakerTests.java | 3 +- .../operator/arithmetic/AddTests.java | 4 +- .../operator/arithmetic/DivTests.java | 4 +- .../operator/arithmetic/ModTests.java | 4 +- .../operator/arithmetic/MulTests.java | 4 +- .../operator/arithmetic/NegTests.java | 4 +- .../operator/arithmetic/SubTests.java | 4 +- .../operator/comparison/EqualsTests.java | 9 +- .../comparison/GreaterThanOrEqualTests.java | 9 +- .../operator/comparison/GreaterThanTests.java | 9 +- .../comparison/LessThanOrEqualTests.java | 9 +- .../operator/comparison/LessThanTests.java | 9 +- .../operator/comparison/NotEqualsTests.java | 9 +- 110 files changed, 1895 insertions(+), 1093 deletions(-) create mode 100644 docs/reference/esql/functions/description/top_list.asciidoc create mode 100644 docs/reference/esql/functions/examples/top_list.asciidoc create mode 100644 docs/reference/esql/functions/kibana/definition/top_list.json create mode 100644 docs/reference/esql/functions/kibana/docs/top_list.md create mode 100644 docs/reference/esql/functions/layout/top_list.asciidoc create mode 100644 docs/reference/esql/functions/parameters/top_list.asciidoc create mode 100644 docs/reference/esql/functions/signature/top_list.svg create mode 100644 docs/reference/esql/functions/types/top_list.asciidoc create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopListTests.java diff --git a/docs/reference/esql/functions/aggregation-functions.asciidoc b/docs/reference/esql/functions/aggregation-functions.asciidoc index 074fcce9ad43d..cf3512449e26f 100644 --- a/docs/reference/esql/functions/aggregation-functions.asciidoc +++ b/docs/reference/esql/functions/aggregation-functions.asciidoc @@ -18,6 +18,7 @@ The <> command supports these aggregate functions: * <> * experimental:[] <> * <> +* <> * <> // end::agg_list[] @@ -31,4 +32,5 @@ include::min.asciidoc[] include::percentile.asciidoc[] include::st_centroid_agg.asciidoc[] include::sum.asciidoc[] +include::layout/top_list.asciidoc[] include::values.asciidoc[] diff --git a/docs/reference/esql/functions/description/top_list.asciidoc b/docs/reference/esql/functions/description/top_list.asciidoc new file mode 100644 index 0000000000000..39b31e17aec55 --- /dev/null +++ b/docs/reference/esql/functions/description/top_list.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Collects the top values for a field. Includes repeated values. diff --git a/docs/reference/esql/functions/examples/top_list.asciidoc b/docs/reference/esql/functions/examples/top_list.asciidoc new file mode 100644 index 0000000000000..09d32bc9f601a --- /dev/null +++ b/docs/reference/esql/functions/examples/top_list.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats_top_list.csv-spec[tag=top-list] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats_top_list.csv-spec[tag=top-list-result] +|=== + diff --git a/docs/reference/esql/functions/ip-functions.asciidoc b/docs/reference/esql/functions/ip-functions.asciidoc index 55c808e587a18..0d58e24c02945 100644 --- a/docs/reference/esql/functions/ip-functions.asciidoc +++ b/docs/reference/esql/functions/ip-functions.asciidoc @@ -9,6 +9,8 @@ // tag::ip_list[] * <> +* <> // end::ip_list[] include::layout/cidr_match.asciidoc[] +include::layout/ip_prefix.asciidoc[] diff --git a/docs/reference/esql/functions/kibana/definition/top_list.json b/docs/reference/esql/functions/kibana/definition/top_list.json new file mode 100644 index 0000000000000..99518a40680ee --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/top_list.json @@ -0,0 +1,107 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "agg", + "name" : "top_list", + "description" : "Collects the top values for a field. Includes repeated values.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "datetime", + "optional" : false, + "description" : "The field to collect the top values for." + }, + { + "name" : "limit", + "type" : "integer", + "optional" : false, + "description" : "The maximum number of values to collect." + }, + { + "name" : "order", + "type" : "keyword", + "optional" : false, + "description" : "The order to calculate the top values. Either `asc` or `desc`." + } + ], + "variadic" : false, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "The field to collect the top values for." + }, + { + "name" : "limit", + "type" : "integer", + "optional" : false, + "description" : "The maximum number of values to collect." + }, + { + "name" : "order", + "type" : "keyword", + "optional" : false, + "description" : "The order to calculate the top values. Either `asc` or `desc`." + } + ], + "variadic" : false, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "field", + "type" : "integer", + "optional" : false, + "description" : "The field to collect the top values for." + }, + { + "name" : "limit", + "type" : "integer", + "optional" : false, + "description" : "The maximum number of values to collect." + }, + { + "name" : "order", + "type" : "keyword", + "optional" : false, + "description" : "The order to calculate the top values. Either `asc` or `desc`." + } + ], + "variadic" : false, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "The field to collect the top values for." + }, + { + "name" : "limit", + "type" : "integer", + "optional" : false, + "description" : "The maximum number of values to collect." + }, + { + "name" : "order", + "type" : "keyword", + "optional" : false, + "description" : "The order to calculate the top values. Either `asc` or `desc`." + } + ], + "variadic" : false, + "returnType" : "long" + } + ], + "examples" : [ + "FROM employees\n| STATS top_salaries = TOP_LIST(salary, 3, \"desc\"), top_salary = MAX(salary)" + ] +} diff --git a/docs/reference/esql/functions/kibana/docs/top_list.md b/docs/reference/esql/functions/kibana/docs/top_list.md new file mode 100644 index 0000000000000..f7acdf3162b38 --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/top_list.md @@ -0,0 +1,11 @@ + + +### TOP_LIST +Collects the top values for a field. Includes repeated values. + +``` +FROM employees +| STATS top_salaries = TOP_LIST(salary, 3, "desc"), top_salary = MAX(salary) +``` diff --git a/docs/reference/esql/functions/layout/top_list.asciidoc b/docs/reference/esql/functions/layout/top_list.asciidoc new file mode 100644 index 0000000000000..4735395ca0c0d --- /dev/null +++ b/docs/reference/esql/functions/layout/top_list.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-top_list]] +=== `TOP_LIST` + +*Syntax* + +[.text-center] +image::esql/functions/signature/top_list.svg[Embedded,opts=inline] + +include::../parameters/top_list.asciidoc[] +include::../description/top_list.asciidoc[] +include::../types/top_list.asciidoc[] +include::../examples/top_list.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/top_list.asciidoc b/docs/reference/esql/functions/parameters/top_list.asciidoc new file mode 100644 index 0000000000000..979bca393b5aa --- /dev/null +++ b/docs/reference/esql/functions/parameters/top_list.asciidoc @@ -0,0 +1,12 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* + +`field`:: +The field to collect the top values for. + +`limit`:: +The maximum number of values to collect. + +`order`:: +The order to calculate the top values. Either `asc` or `desc`. diff --git a/docs/reference/esql/functions/signature/top_list.svg b/docs/reference/esql/functions/signature/top_list.svg new file mode 100644 index 0000000000000..e7a5c7a292d41 --- /dev/null +++ b/docs/reference/esql/functions/signature/top_list.svg @@ -0,0 +1 @@ +TOP_LIST(field,limit,order) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/top_list.asciidoc b/docs/reference/esql/functions/types/top_list.asciidoc new file mode 100644 index 0000000000000..1874cd8b12bf3 --- /dev/null +++ b/docs/reference/esql/functions/types/top_list.asciidoc @@ -0,0 +1,12 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +field | limit | order | result +datetime | integer | keyword | datetime +double | integer | keyword | double +integer | integer | keyword | integer +long | integer | keyword | long +|=== diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypeResolutions.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypeResolutions.java index 588b0a2af55d3..7302d08f81925 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypeResolutions.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypeResolutions.java @@ -132,6 +132,28 @@ public static TypeResolution isFoldable(Expression e, String operationName, Para return TypeResolution.TYPE_RESOLVED; } + public static TypeResolution isNotNullAndFoldable(Expression e, String operationName, ParamOrdinal paramOrd) { + TypeResolution resolution = isFoldable(e, operationName, paramOrd); + + if (resolution.unresolved()) { + return resolution; + } + + if (e.dataType() == DataType.NULL || e.fold() == null) { + resolution = new TypeResolution( + format( + null, + "{}argument of [{}] cannot be null, received [{}]", + paramOrd == null || paramOrd == DEFAULT ? "" : paramOrd.name().toLowerCase(Locale.ROOT) + " ", + operationName, + Expressions.name(e) + ) + ); + } + + return resolution; + } + public static TypeResolution isNotFoldable(Expression e, String operationName, ParamOrdinal paramOrd) { if (e.foldable()) { return new TypeResolution( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopList.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopList.java index 93e3da7c19cf8..16cfdad89612b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopList.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopList.java @@ -36,7 +36,7 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.THIRD; -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isFoldable; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNotNullAndFoldable; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; @@ -125,9 +125,9 @@ protected TypeResolution resolveType() { sourceText(), FIRST, "numeric except unsigned_long or counter types" - ).and(isFoldable(limitField(), sourceText(), SECOND)) + ).and(isNotNullAndFoldable(limitField(), sourceText(), SECOND)) .and(isType(limitField(), dt -> dt == DataType.INTEGER, sourceText(), SECOND, "integer")) - .and(isFoldable(orderField(), sourceText(), THIRD)) + .and(isNotNullAndFoldable(orderField(), sourceText(), THIRD)) .and(isString(orderField(), sourceText(), THIRD)); if (typeResolution.unresolved()) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java index 2e40ee1634d1b..cd88619c4fdbe 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java @@ -133,7 +133,7 @@ * *
  • * Now it's time to make a unit test! The infrastructure for these is under some flux at - * the moment, but it's good to extend from {@code AbstractFunctionTestCase}. All of + * the moment, but it's good to extend from {@code AbstractScalarFunctionTestCase}. All of * these tests are parameterized and expect to spend some time finding good parameters. *
  • *
  • diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java new file mode 100644 index 0000000000000..05a6cec51284f --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java @@ -0,0 +1,213 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function; + +import org.elasticsearch.compute.aggregation.Aggregator; +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.util.NumericUtils; +import org.elasticsearch.xpack.esql.expression.SurrogateExpression; +import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; +import org.elasticsearch.xpack.esql.optimizer.FoldNull; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; +import org.elasticsearch.xpack.esql.planner.ToAggregator; + +import java.util.List; +import java.util.function.Consumer; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.Matchers.not; + +/** + * Base class for aggregation tests. + */ +public abstract class AbstractAggregationTestCase extends AbstractFunctionTestCase { + /** + * Converts a list of aggregation test cases into a list of parameter suppliers. + * Also, adds a default set of extra test cases. + *

    + * Use if possible, as this method may get updated with new checks in the future. + *

    + */ + protected static Iterable parameterSuppliersFromTypedDataWithDefaultChecks(List suppliers) { + // TODO: Add case with no input expecting null + return parameterSuppliersFromTypedData(randomizeBytesRefsOffset(suppliers)); + } + + public void testAggregate() { + Expression expression = randomBoolean() ? buildDeepCopyOfFieldExpression(testCase) : buildFieldExpression(testCase); + + resolveExpression(expression, this::aggregateSingleMode, this::evaluate); + } + + public void testFold() { + Expression expression = buildLiteralExpression(testCase); + + resolveExpression(expression, aggregatorFunctionSupplier -> { + // An aggregation cannot be folded + }, evaluableExpression -> { + assertTrue(evaluableExpression.foldable()); + if (testCase.foldingExceptionClass() == null) { + Object result = evaluableExpression.fold(); + // Decode unsigned longs into BigIntegers + if (testCase.expectedType() == DataType.UNSIGNED_LONG && result != null) { + result = NumericUtils.unsignedLongAsBigInteger((Long) result); + } + assertThat(result, testCase.getMatcher()); + if (testCase.getExpectedWarnings() != null) { + assertWarnings(testCase.getExpectedWarnings()); + } + } else { + Throwable t = expectThrows(testCase.foldingExceptionClass(), evaluableExpression::fold); + assertThat(t.getMessage(), equalTo(testCase.foldingExceptionMessage())); + } + }); + } + + private void aggregateSingleMode(AggregatorFunctionSupplier aggregatorFunctionSupplier) { + Object result; + try (var aggregator = new Aggregator(aggregatorFunctionSupplier.aggregator(driverContext()), AggregatorMode.SINGLE)) { + Page inputPage = rows(testCase.getMultiRowDataValues()); + try { + aggregator.processPage(inputPage); + } finally { + inputPage.releaseBlocks(); + } + + // ElementType from DataType + result = extractResultFromAggregator(aggregator, PlannerUtils.toElementType(testCase.expectedType())); + } + + assertThat(result, not(equalTo(Double.NaN))); + assert testCase.getMatcher().matches(Double.POSITIVE_INFINITY) == false; + assertThat(result, not(equalTo(Double.POSITIVE_INFINITY))); + assert testCase.getMatcher().matches(Double.NEGATIVE_INFINITY) == false; + assertThat(result, not(equalTo(Double.NEGATIVE_INFINITY))); + assertThat(result, testCase.getMatcher()); + if (testCase.getExpectedWarnings() != null) { + assertWarnings(testCase.getExpectedWarnings()); + } + } + + private void evaluate(Expression evaluableExpression) { + Object result; + try (var evaluator = evaluator(evaluableExpression).get(driverContext())) { + try (Block block = evaluator.eval(row(testCase.getDataValues()))) { + result = toJavaObjectUnsignedLongAware(block, 0); + } + } + + assertThat(result, not(equalTo(Double.NaN))); + assert testCase.getMatcher().matches(Double.POSITIVE_INFINITY) == false; + assertThat(result, not(equalTo(Double.POSITIVE_INFINITY))); + assert testCase.getMatcher().matches(Double.NEGATIVE_INFINITY) == false; + assertThat(result, not(equalTo(Double.NEGATIVE_INFINITY))); + assertThat(result, testCase.getMatcher()); + if (testCase.getExpectedWarnings() != null) { + assertWarnings(testCase.getExpectedWarnings()); + } + } + + private void resolveExpression( + Expression expression, + Consumer onAggregator, + Consumer onEvaluableExpression + ) { + logger.info( + "Test Values: " + testCase.getData().stream().map(TestCaseSupplier.TypedData::toString).collect(Collectors.joining(",")) + ); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } + expression = resolveSurrogates(expression); + + Expression.TypeResolution resolution = expression.typeResolved(); + if (resolution.unresolved()) { + throw new AssertionError("expected resolved " + resolution.message()); + } + + expression = new FoldNull().rule(expression); + assertThat(expression.dataType(), equalTo(testCase.expectedType())); + + if (expression instanceof AggregateFunction == false) { + onEvaluableExpression.accept(expression); + return; + } + + assertThat(expression, instanceOf(ToAggregator.class)); + logger.info("Result type: " + expression.dataType()); + + var inputChannels = inputChannels(); + onAggregator.accept(((ToAggregator) expression).supplier(inputChannels)); + } + + private Object extractResultFromAggregator(Aggregator aggregator, ElementType expectedElementType) { + var blocksArraySize = randomIntBetween(1, 10); + var resultBlockIndex = randomIntBetween(0, blocksArraySize - 1); + var blocks = new Block[blocksArraySize]; + try { + aggregator.evaluate(blocks, resultBlockIndex, driverContext()); + + var block = blocks[resultBlockIndex]; + + assertThat(block.elementType(), equalTo(expectedElementType)); + + return toJavaObject(blocks[resultBlockIndex], 0); + } finally { + Releasables.close(blocks); + } + } + + private List inputChannels() { + // TODO: Randomize channels + // TODO: If surrogated, channels may change + return IntStream.range(0, testCase.getMultiRowDataValues().size()).boxed().toList(); + } + + /** + * Resolves surrogates of aggregations until a non-surrogate expression is found. + *

    + * No-op if expecting errors, as surrogates depend on correct types + *

    + */ + private Expression resolveSurrogates(Expression expression) { + if (testCase.getExpectedTypeError() != null) { + return expression; + } + + for (int i = 0;; i++) { + assertThat("Potential infinite loop detected in surrogates", i, lessThan(10)); + + if (expression instanceof SurrogateExpression == false) { + break; + } + + var surrogate = ((SurrogateExpression) expression).surrogate(); + + if (surrogate == null) { + break; + } + + expression = surrogate; + } + + return expression; + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index b13fd92d83d6c..d057dc6ff4320 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -15,7 +15,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; import org.elasticsearch.common.breaker.CircuitBreaker; -import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; @@ -24,14 +23,10 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockUtils; -import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.core.PathUtils; -import org.elasticsearch.core.Releasables; import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.indices.CrankyCircuitBreakerService; @@ -45,7 +40,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.expression.function.FunctionDefinition; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -53,16 +47,10 @@ import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.core.util.StringUtils; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; -import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; -import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.AbstractMultivalueFunctionTestCase; -import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.optimizer.FoldNull; import org.elasticsearch.xpack.esql.parser.ExpressionBuilder; import org.elasticsearch.xpack.esql.planner.Layout; -import org.elasticsearch.xpack.esql.planner.PlannerUtils; -import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.versionfield.Version; -import org.hamcrest.Matcher; import org.junit.After; import org.junit.AfterClass; @@ -85,31 +73,19 @@ import java.util.Map; import java.util.Set; import java.util.TreeSet; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; import java.util.function.Function; import java.util.stream.Collectors; -import java.util.stream.IntStream; -import java.util.stream.Stream; import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.elasticsearch.xpack.esql.SerializationTestUtils.assertSerialization; import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.CARTESIAN; import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.GEO; -import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isSpatial; -import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.nullValue; -import static org.hamcrest.Matchers.sameInstance; /** - * Base class for function tests. Tests based on this class will generally build out a single example evaluation, - * which can be automatically tested against several scenarios (null handling, concurrency, etc). + * Base class for function tests. */ public abstract class AbstractFunctionTestCase extends ESTestCase { /** @@ -152,6 +128,12 @@ public static Literal randomLiteral(DataType type) { protected TestCaseSupplier.TestCase testCase; + /** + * Converts typed test suppliers to parameterized test parameters. + *

    + * Use {@code parameterSuppliersFromTypedDataWithDefaultChecks()} instead if possible, as it automatically add default checks. + *

    + */ protected static Iterable parameterSuppliersFromTypedData(List suppliers) { // TODO rename this method to something more descriptive. Javadoc. And make sure all parameters are "representable" types. List parameters = new ArrayList<>(suppliers.size()); @@ -161,24 +143,6 @@ protected static Iterable parameterSuppliersFromTypedData(List - * Use if possible, as this method may get updated with new checks in the future. - *

    - * - * @param entirelyNullPreservesType See {@link #anyNullIsNull(boolean, List)} - */ - protected static Iterable parameterSuppliersFromTypedDataWithDefaultChecks( - boolean entirelyNullPreservesType, - List suppliers - ) { - return parameterSuppliersFromTypedData( - errorsForCasesWithoutExamples(anyNullIsNull(entirelyNullPreservesType, randomizeBytesRefsOffset(suppliers))) - ); - } - /** * Build an {@link Attribute} that loads a field. */ @@ -224,6 +188,7 @@ protected final Expression buildDeepCopyOfFieldExpression(TestCaseSupplier.TestC } protected final Expression buildLiteralExpression(TestCaseSupplier.TestCase testCase) { + assumeTrue("Data can't be converted to literals", testCase.canGetDataAsLiterals()); return build(testCase.getSource(), testCase.getDataAsLiterals()); } @@ -249,6 +214,29 @@ protected final Page row(List values) { return new Page(1, BlockUtils.fromListRow(TestBlockFactory.getNonBreakingInstance(), values)); } + /** + * Creates a page based on a list of lists, where each list represents a column. + */ + protected final Page rows(List> values) { + if (values.isEmpty()) { + return new Page(0, BlockUtils.NO_BLOCKS); + } + + var rowsCount = values.get(0).size(); + + values.stream().skip(1).forEach(l -> assertThat("All multi-row fields must have the same number of rows", l, hasSize(rowsCount))); + + var rows = new ArrayList>(); + for (int i = 0; i < rowsCount; i++) { + final int index = i; + rows.add(values.stream().map(l -> l.get(index)).toList()); + } + + var blocks = BlockUtils.fromList(TestBlockFactory.getNonBreakingInstance(), rows); + + return new Page(rowsCount, blocks); + } + /** * Hack together a layout by scanning for Fields. * Those will show up in the layout in whatever order a depth first traversal finds them. @@ -263,49 +251,7 @@ protected static void buildLayout(Layout.Builder builder, Expression e) { } } - protected final void assertResolveTypeValid(Expression expression, DataType expectedType) { - assertTrue(expression.typeResolved().resolved()); - assertThat(expression.dataType(), equalTo(expectedType)); - } - - public final void testEvaluate() { - assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); - boolean readFloating = randomBoolean(); - Expression expression = readFloating ? buildDeepCopyOfFieldExpression(testCase) : buildFieldExpression(testCase); - if (testCase.getExpectedTypeError() != null) { - assertTypeResolutionFailure(expression); - return; - } - assumeTrue("Expected type must be representable to build an evaluator", EsqlDataTypes.isRepresentable(testCase.expectedType())); - logger.info( - "Test Values: " + testCase.getData().stream().map(TestCaseSupplier.TypedData::toString).collect(Collectors.joining(",")) - ); - Expression.TypeResolution resolution = expression.typeResolved(); - if (resolution.unresolved()) { - throw new AssertionError("expected resolved " + resolution.message()); - } - expression = new FoldNull().rule(expression); - assertThat(expression.dataType(), equalTo(testCase.expectedType())); - logger.info("Result type: " + expression.dataType()); - - Object result; - try (ExpressionEvaluator evaluator = evaluator(expression).get(driverContext())) { - try (Block block = evaluator.eval(row(testCase.getDataValues()))) { - result = toJavaObjectUnsignedLongAware(block, 0); - } - } - assertThat(result, not(equalTo(Double.NaN))); - assert testCase.getMatcher().matches(Double.POSITIVE_INFINITY) == false; - assertThat(result, not(equalTo(Double.POSITIVE_INFINITY))); - assert testCase.getMatcher().matches(Double.NEGATIVE_INFINITY) == false; - assertThat(result, not(equalTo(Double.NEGATIVE_INFINITY))); - assertThat(result, testCase.getMatcher()); - if (testCase.getExpectedWarnings() != null) { - assertWarnings(testCase.getExpectedWarnings()); - } - } - - private Object toJavaObjectUnsignedLongAware(Block block, int position) { + protected Object toJavaObjectUnsignedLongAware(Block block, int position) { Object result; result = toJavaObject(block, position); if (result != null && testCase.expectedType() == DataType.UNSIGNED_LONG) { @@ -315,266 +261,44 @@ private Object toJavaObjectUnsignedLongAware(Block block, int position) { return result; } - /** - * Evaluates a {@link Block} of values, all copied from the input pattern.. - *

    - * Note that this'll sometimes be a {@link Vector} of values if the - * input pattern contained only a single value. - *

    - */ - public final void testEvaluateBlockWithoutNulls() { - assumeTrue("no warning is expected", testCase.getExpectedWarnings() == null); - try { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), false); - } catch (CircuitBreakingException ex) { - assertThat(ex.getMessage(), equalTo(MockBigArrays.ERROR_MESSAGE)); - assertFalse("Test data is too large to fit in the memory", true); - } - } - - /** - * Evaluates a {@link Block} of values, all copied from the input pattern with - * some null values inserted between. - */ - public final void testEvaluateBlockWithNulls() { - assumeTrue("no warning is expected", testCase.getExpectedWarnings() == null); - try { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), true); - } catch (CircuitBreakingException ex) { - assertThat(ex.getMessage(), equalTo(MockBigArrays.ERROR_MESSAGE)); - assertFalse("Test data is too large to fit in the memory", true); - } + protected void assertSimpleWithNulls(List data, Block value, int nullBlock) { + // TODO remove me in favor of cases containing null + assertTrue("argument " + nullBlock + " is null", value.isNull(0)); } /** - * Evaluates a {@link Block} of values, all copied from the input pattern, - * using the {@link CrankyCircuitBreakerService} which fails randomly. - *

    - * Note that this'll sometimes be a {@link Vector} of values if the - * input pattern contained only a single value. - *

    + * Modifies suppliers to generate BytesRefs with random offsets. */ - public final void testCrankyEvaluateBlockWithoutNulls() { - assumeTrue("sometimes the cranky breaker silences warnings, just skip these cases", testCase.getExpectedWarnings() == null); - try { - testEvaluateBlock(driverContext().blockFactory(), crankyContext(), false); - } catch (CircuitBreakingException ex) { - assertThat(ex.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); - } - } + protected static List randomizeBytesRefsOffset(List testCaseSuppliers) { + return testCaseSuppliers.stream().map(supplier -> new TestCaseSupplier(supplier.name(), supplier.types(), () -> { + var testCase = supplier.supplier().get(); - /** - * Evaluates a {@link Block} of values, all copied from the input pattern with - * some null values inserted between, using the {@link CrankyCircuitBreakerService} which fails randomly. - */ - public final void testCrankyEvaluateBlockWithNulls() { - assumeTrue("sometimes the cranky breaker silences warnings, just skip these cases", testCase.getExpectedWarnings() == null); - try { - testEvaluateBlock(driverContext().blockFactory(), crankyContext(), true); - } catch (CircuitBreakingException ex) { - assertThat(ex.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); - } - } + var newData = testCase.getData().stream().map(typedData -> { + if (typedData.data() instanceof BytesRef bytesRef) { + var offset = randomIntBetween(0, 10); + var extraLength = randomIntBetween(0, 10); + var newBytesArray = randomByteArrayOfLength(bytesRef.length + offset + extraLength); - /** - * Does the function produce the same output regardless of input? - */ - protected Matcher allNullsMatcher() { - return nullValue(); - } + System.arraycopy(bytesRef.bytes, bytesRef.offset, newBytesArray, offset, bytesRef.length); - private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext context, boolean insertNulls) { - Expression expression = randomBoolean() ? buildDeepCopyOfFieldExpression(testCase) : buildFieldExpression(testCase); - if (testCase.getExpectedTypeError() != null) { - assertTypeResolutionFailure(expression); - return; - } - assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); - assumeTrue("Expected type must be representable to build an evaluator", EsqlDataTypes.isRepresentable(testCase.expectedType())); - int positions = between(1, 1024); - List data = testCase.getData(); - Page onePositionPage = row(testCase.getDataValues()); - Block[] manyPositionsBlocks = new Block[Math.toIntExact(data.stream().filter(d -> d.isForceLiteral() == false).count())]; - Set nullPositions = insertNulls - ? IntStream.range(0, positions).filter(i -> randomBoolean()).mapToObj(Integer::valueOf).collect(Collectors.toSet()) - : Set.of(); - if (nullPositions.size() == positions) { - nullPositions = Set.of(); - } - try { - int b = 0; - for (TestCaseSupplier.TypedData d : data) { - if (d.isForceLiteral()) { - continue; - } - ElementType elementType = PlannerUtils.toElementType(d.type()); - try (Block.Builder builder = elementType.newBlockBuilder(positions, inputBlockFactory)) { - for (int p = 0; p < positions; p++) { - if (nullPositions.contains(p)) { - builder.appendNull(); - } else { - builder.copyFrom(onePositionPage.getBlock(b), 0, 1); - } - } - manyPositionsBlocks[b] = builder.build(); - } - b++; - } - try ( - ExpressionEvaluator eval = evaluator(expression).get(context); - Block block = eval.eval(new Page(positions, manyPositionsBlocks)) - ) { - for (int p = 0; p < positions; p++) { - if (nullPositions.contains(p)) { - assertThat(toJavaObject(block, p), allNullsMatcher()); - continue; - } - assertThat(toJavaObjectUnsignedLongAware(block, p), testCase.getMatcher()); - } - assertThat( - "evaluates to tracked block", - block.blockFactory(), - either(sameInstance(context.blockFactory())).or(sameInstance(inputBlockFactory)) - ); - } - } finally { - Releasables.close(onePositionPage::releaseBlocks, Releasables.wrap(manyPositionsBlocks)); - } - if (testCase.getExpectedWarnings() != null) { - assertWarnings(testCase.getExpectedWarnings()); - } - } + var newBytesRef = new BytesRef(newBytesArray, offset, bytesRef.length); - public void testSimpleWithNulls() { // TODO replace this with nulls inserted into the test case like anyNullIsNull - Expression expression = buildFieldExpression(testCase); - if (testCase.getExpectedTypeError() != null) { - assertTypeResolutionFailure(expression); - return; - } - assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); - List simpleData = testCase.getDataValues(); - try (EvalOperator.ExpressionEvaluator eval = evaluator(expression).get(driverContext())) { - BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); - Block[] orig = BlockUtils.fromListRow(blockFactory, simpleData); - for (int i = 0; i < orig.length; i++) { - List data = new ArrayList<>(); - Block[] blocks = new Block[orig.length]; - for (int b = 0; b < blocks.length; b++) { - if (b == i) { - blocks[b] = orig[b].elementType().newBlockBuilder(1, blockFactory).appendNull().build(); - data.add(null); - } else { - blocks[b] = orig[b]; - data.add(simpleData.get(b)); - } + return typedData.withData(newBytesRef); } - try (Block block = eval.eval(new Page(blocks))) { - assertSimpleWithNulls(data, block, i); - } - } - - // Note: the null-in-fast-null-out handling prevents any exception from being thrown, so the warnings provided in some test - // cases won't actually be registered. This isn't an issue for unary functions, but could be an issue for n-ary ones, if - // function processing of the first parameter(s) could raise an exception/warning. (But hasn't been the case so far.) - // N-ary non-MV functions dealing with one multivalue (before hitting the null parameter injected above) will now trigger - // a warning ("SV-function encountered a MV") that thus needs to be checked. - if (this instanceof AbstractMultivalueFunctionTestCase == false - && simpleData.stream().anyMatch(List.class::isInstance) - && testCase.getExpectedWarnings() != null) { - assertWarnings(testCase.getExpectedWarnings()); - } - } - } - - protected void assertSimpleWithNulls(List data, Block value, int nullBlock) { - // TODO remove me in favor of cases containing null - assertTrue("argument " + nullBlock + " is null", value.isNull(0)); - } - - public final void testEvaluateInManyThreads() throws ExecutionException, InterruptedException { - Expression expression = buildFieldExpression(testCase); - if (testCase.getExpectedTypeError() != null) { - assertTypeResolutionFailure(expression); - return; - } - assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); - assumeTrue("Expected type must be representable to build an evaluator", EsqlDataTypes.isRepresentable(testCase.expectedType())); - int count = 10_000; - int threads = 5; - var evalSupplier = evaluator(expression); - ExecutorService exec = Executors.newFixedThreadPool(threads); - try { - List> futures = new ArrayList<>(); - for (int i = 0; i < threads; i++) { - List simpleData = testCase.getDataValues(); - Page page = row(simpleData); - - futures.add(exec.submit(() -> { - try (EvalOperator.ExpressionEvaluator eval = evalSupplier.get(driverContext())) { - for (int c = 0; c < count; c++) { - try (Block block = eval.eval(page)) { - assertThat(toJavaObjectUnsignedLongAware(block, 0), testCase.getMatcher()); - } - } - } - })); - } - for (Future f : futures) { - f.get(); - } - } finally { - exec.shutdown(); - } - } - - public final void testEvaluatorToString() { - Expression expression = buildFieldExpression(testCase); - if (testCase.getExpectedTypeError() != null) { - assertTypeResolutionFailure(expression); - return; - } - assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); - var factory = evaluator(expression); - try (ExpressionEvaluator ev = factory.get(driverContext())) { - assertThat(ev.toString(), testCase.evaluatorToString()); - } - } - - public final void testFactoryToString() { - Expression expression = buildFieldExpression(testCase); - if (testCase.getExpectedTypeError() != null) { - assertTypeResolutionFailure(expression); - return; - } - assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); - var factory = evaluator(buildFieldExpression(testCase)); - assertThat(factory.toString(), testCase.evaluatorToString()); - } + return typedData; + }).toList(); - public final void testFold() { - Expression expression = buildLiteralExpression(testCase); - if (testCase.getExpectedTypeError() != null) { - assertTypeResolutionFailure(expression); - return; - } - assertFalse(expression.typeResolved().unresolved()); - Expression nullOptimized = new FoldNull().rule(expression); - assertThat(nullOptimized.dataType(), equalTo(testCase.expectedType())); - assertTrue(nullOptimized.foldable()); - if (testCase.foldingExceptionClass() == null) { - Object result = nullOptimized.fold(); - // Decode unsigned longs into BigIntegers - if (testCase.expectedType() == DataType.UNSIGNED_LONG && result != null) { - result = NumericUtils.unsignedLongAsBigInteger((Long) result); - } - assertThat(result, testCase.getMatcher()); - if (testCase.getExpectedWarnings() != null) { - assertWarnings(testCase.getExpectedWarnings()); - } - } else { - Throwable t = expectThrows(testCase.foldingExceptionClass(), nullOptimized::fold); - assertThat(t.getMessage(), equalTo(testCase.foldingExceptionMessage())); - } + return new TestCaseSupplier.TestCase( + newData, + testCase.evaluatorToString(), + testCase.expectedType(), + testCase.getMatcher(), + testCase.getExpectedWarnings(), + testCase.getExpectedTypeError(), + testCase.foldingExceptionClass(), + testCase.foldingExceptionMessage() + ); + })).toList(); } public void testSerializationOfSimple() { @@ -625,558 +349,6 @@ public static void testFunctionInfo() { Set returnTypes = Arrays.stream(description.returnType()).collect(Collectors.toCollection(TreeSet::new)); assertEquals(returnFromSignature, returnTypes); - - } - - /** - * Adds cases with {@code null} and asserts that the result is {@code null}. - *

    - * Note: This won't add more than a single null to any existing test case, - * just to keep the number of test cases from exploding totally. - *

    - * - * @param entirelyNullPreservesType should a test case that only contains parameters - * with the {@code null} type keep it's expected type? - * This is mostly going to be {@code true} - * except for functions that base their type entirely - * on input types like {@link Greatest} or {@link Coalesce}. - */ - protected static List anyNullIsNull(boolean entirelyNullPreservesType, List testCaseSuppliers) { - return anyNullIsNull( - testCaseSuppliers, - (nullPosition, nullValueDataType, original) -> entirelyNullPreservesType == false - && nullValueDataType == DataType.NULL - && original.getData().size() == 1 ? DataType.NULL : original.expectedType(), - (nullPosition, nullData, original) -> original - ); - } - - public interface ExpectedType { - DataType expectedType(int nullPosition, DataType nullValueDataType, TestCaseSupplier.TestCase original); - } - - public interface ExpectedEvaluatorToString { - Matcher evaluatorToString(int nullPosition, TestCaseSupplier.TypedData nullData, Matcher original); - } - - /** - * Modifies suppliers to generate BytesRefs with random offsets. - */ - protected static List randomizeBytesRefsOffset(List testCaseSuppliers) { - return testCaseSuppliers.stream().map(supplier -> new TestCaseSupplier(supplier.name(), supplier.types(), () -> { - var testCase = supplier.supplier().get(); - - var newData = testCase.getData().stream().map(typedData -> { - if (typedData.data() instanceof BytesRef bytesRef) { - var offset = randomIntBetween(0, 10); - var extraLength = randomIntBetween(0, 10); - var newBytesArray = randomByteArrayOfLength(bytesRef.length + offset + extraLength); - - System.arraycopy(bytesRef.bytes, bytesRef.offset, newBytesArray, offset, bytesRef.length); - - var newBytesRef = new BytesRef(newBytesArray, offset, bytesRef.length); - var newTypedData = new TestCaseSupplier.TypedData(newBytesRef, typedData.type(), typedData.name()); - - if (typedData.isForceLiteral()) { - newTypedData.forceLiteral(); - } - - return newTypedData; - } - return typedData; - }).toList(); - - return new TestCaseSupplier.TestCase( - newData, - testCase.evaluatorToString(), - testCase.expectedType(), - testCase.getMatcher(), - testCase.getExpectedWarnings(), - testCase.getExpectedTypeError(), - testCase.foldingExceptionClass(), - testCase.foldingExceptionMessage() - ); - })).toList(); - } - - protected static List anyNullIsNull( - List testCaseSuppliers, - ExpectedType expectedType, - ExpectedEvaluatorToString evaluatorToString - ) { - typesRequired(testCaseSuppliers); - List suppliers = new ArrayList<>(testCaseSuppliers.size()); - suppliers.addAll(testCaseSuppliers); - - /* - * For each original test case, add as many copies as there were - * arguments, replacing one of the arguments with null and keeping - * the others. - * - * Also, if this was the first time we saw the signature we copy it - * *again*, replacing the argument with null, but annotating the - * argument's type as `null` explicitly. - */ - Set> uniqueSignatures = new HashSet<>(); - for (TestCaseSupplier original : testCaseSuppliers) { - boolean firstTimeSeenSignature = uniqueSignatures.add(original.types()); - for (int nullPosition = 0; nullPosition < original.types().size(); nullPosition++) { - int finalNullPosition = nullPosition; - suppliers.add(new TestCaseSupplier(original.name() + " null in " + nullPosition, original.types(), () -> { - TestCaseSupplier.TestCase oc = original.get(); - List data = IntStream.range(0, oc.getData().size()).mapToObj(i -> { - TestCaseSupplier.TypedData od = oc.getData().get(i); - return i == finalNullPosition ? od.forceValueToNull() : od; - }).toList(); - TestCaseSupplier.TypedData nulledData = oc.getData().get(finalNullPosition); - return new TestCaseSupplier.TestCase( - data, - evaluatorToString.evaluatorToString(finalNullPosition, nulledData, oc.evaluatorToString()), - expectedType.expectedType(finalNullPosition, nulledData.type(), oc), - nullValue(), - null, - oc.getExpectedTypeError(), - null, - null - ); - })); - - if (firstTimeSeenSignature) { - List typesWithNull = IntStream.range(0, original.types().size()) - .mapToObj(i -> i == finalNullPosition ? DataType.NULL : original.types().get(i)) - .toList(); - boolean newSignature = uniqueSignatures.add(typesWithNull); - if (newSignature) { - suppliers.add(new TestCaseSupplier(typesWithNull, () -> { - TestCaseSupplier.TestCase oc = original.get(); - List data = IntStream.range(0, oc.getData().size()) - .mapToObj(i -> i == finalNullPosition ? TestCaseSupplier.TypedData.NULL : oc.getData().get(i)) - .toList(); - return new TestCaseSupplier.TestCase( - data, - equalTo("LiteralsEvaluator[lit=null]"), - expectedType.expectedType(finalNullPosition, DataType.NULL, oc), - nullValue(), - null, - oc.getExpectedTypeError(), - null, - null - ); - })); - } - } - } - } - - return suppliers; - - } - - /** - * Adds test cases containing unsupported parameter types that assert - * that they throw type errors. - */ - protected static List errorsForCasesWithoutExamples(List testCaseSuppliers) { - return errorsForCasesWithoutExamples(testCaseSuppliers, AbstractFunctionTestCase::typeErrorMessage); - } - - protected static List errorsForCasesWithoutExamples( - List testCaseSuppliers, - TypeErrorMessageSupplier typeErrorMessageSupplier - ) { - typesRequired(testCaseSuppliers); - List suppliers = new ArrayList<>(testCaseSuppliers.size()); - suppliers.addAll(testCaseSuppliers); - - Set> valid = testCaseSuppliers.stream().map(TestCaseSupplier::types).collect(Collectors.toSet()); - List> validPerPosition = validPerPosition(valid); - - testCaseSuppliers.stream() - .map(s -> s.types().size()) - .collect(Collectors.toSet()) - .stream() - .flatMap(count -> allPermutations(count)) - .filter(types -> valid.contains(types) == false) - /* - * Skip any cases with more than one null. Our tests don't generate - * the full combinatorial explosions of all nulls - just a single null. - * Hopefully , cases will function the same as , - * cases. - */.filter(types -> types.stream().filter(t -> t == DataType.NULL).count() <= 1) - .map(types -> typeErrorSupplier(validPerPosition.size() != 1, validPerPosition, types, typeErrorMessageSupplier)) - .forEach(suppliers::add); - return suppliers; - } - - public static String errorMessageStringForBinaryOperators( - boolean includeOrdinal, - List> validPerPosition, - List types - ) { - try { - return typeErrorMessage(includeOrdinal, validPerPosition, types); - } catch (IllegalStateException e) { - // This means all the positional args were okay, so the expected error is from the combination - if (types.get(0).equals(DataType.UNSIGNED_LONG)) { - return "first argument of [] is [unsigned_long] and second is [" - + types.get(1).typeName() - + "]. [unsigned_long] can only be operated on together with another [unsigned_long]"; - - } - if (types.get(1).equals(DataType.UNSIGNED_LONG)) { - return "first argument of [] is [" - + types.get(0).typeName() - + "] and second is [unsigned_long]. [unsigned_long] can only be operated on together with another [unsigned_long]"; - } - return "first argument of [] is [" - + (types.get(0).isNumeric() ? "numeric" : types.get(0).typeName()) - + "] so second argument must also be [" - + (types.get(0).isNumeric() ? "numeric" : types.get(0).typeName()) - + "] but was [" - + types.get(1).typeName() - + "]"; - - } - } - - /** - * Adds test cases containing unsupported parameter types that immediately fail. - */ - protected static List failureForCasesWithoutExamples(List testCaseSuppliers) { - typesRequired(testCaseSuppliers); - List suppliers = new ArrayList<>(testCaseSuppliers.size()); - suppliers.addAll(testCaseSuppliers); - - Set> valid = testCaseSuppliers.stream().map(TestCaseSupplier::types).collect(Collectors.toSet()); - List> validPerPosition = validPerPosition(valid); - - testCaseSuppliers.stream() - .map(s -> s.types().size()) - .collect(Collectors.toSet()) - .stream() - .flatMap(count -> allPermutations(count)) - .filter(types -> valid.contains(types) == false) - .map(types -> new TestCaseSupplier("type error for " + TestCaseSupplier.nameFromTypes(types), types, () -> { - throw new IllegalStateException("must implement a case for " + types); - })) - .forEach(suppliers::add); - return suppliers; - } - - /** - * Validate that we know the types for all the test cases already created - * @param suppliers - list of suppliers before adding in the illegal type combinations - */ - private static void typesRequired(List suppliers) { - String bad = suppliers.stream().filter(s -> s.types() == null).map(s -> s.name()).collect(Collectors.joining("\n")); - if (bad.equals("") == false) { - throw new IllegalArgumentException("types required but not found for these tests:\n" + bad); - } - } - - private static List> validPerPosition(Set> valid) { - int max = valid.stream().mapToInt(List::size).max().getAsInt(); - List> result = new ArrayList<>(max); - for (int i = 0; i < max; i++) { - result.add(new HashSet<>()); - } - for (List signature : valid) { - for (int i = 0; i < signature.size(); i++) { - result.get(i).add(signature.get(i)); - } - } - return result; - } - - private static Stream> allPermutations(int argumentCount) { - if (argumentCount == 0) { - return Stream.of(List.of()); - } - if (argumentCount > 3) { - throw new IllegalArgumentException("would generate too many combinations"); - } - Stream> stream = representable().map(t -> List.of(t)); - for (int i = 1; i < argumentCount; i++) { - stream = stream.flatMap(types -> representable().map(t -> append(types, t))); - } - return stream; - } - - private static List append(List orig, DataType extra) { - List longer = new ArrayList<>(orig.size() + 1); - longer.addAll(orig); - longer.add(extra); - return longer; - } - - @FunctionalInterface - protected interface TypeErrorMessageSupplier { - String apply(boolean includeOrdinal, List> validPerPosition, List types); - } - - protected static TestCaseSupplier typeErrorSupplier( - boolean includeOrdinal, - List> validPerPosition, - List types - ) { - return typeErrorSupplier(includeOrdinal, validPerPosition, types, AbstractFunctionTestCase::typeErrorMessage); - } - - /** - * Build a test case that asserts that the combination of parameter types is an error. - */ - protected static TestCaseSupplier typeErrorSupplier( - boolean includeOrdinal, - List> validPerPosition, - List types, - TypeErrorMessageSupplier errorMessageSupplier - ) { - return new TestCaseSupplier( - "type error for " + TestCaseSupplier.nameFromTypes(types), - types, - () -> TestCaseSupplier.TestCase.typeError( - types.stream().map(type -> new TestCaseSupplier.TypedData(randomLiteral(type).value(), type, type.typeName())).toList(), - errorMessageSupplier.apply(includeOrdinal, validPerPosition, types) - ) - ); - } - - /** - * Build the expected error message for an invalid type signature. - */ - protected static String typeErrorMessage(boolean includeOrdinal, List> validPerPosition, List types) { - int badArgPosition = -1; - for (int i = 0; i < types.size(); i++) { - if (validPerPosition.get(i).contains(types.get(i)) == false) { - badArgPosition = i; - break; - } - } - if (badArgPosition == -1) { - throw new IllegalStateException( - "Can't generate error message for these types, you probably need a custom error message function" - ); - } - String ordinal = includeOrdinal ? TypeResolutions.ParamOrdinal.fromIndex(badArgPosition).name().toLowerCase(Locale.ROOT) + " " : ""; - String expectedType = expectedType(validPerPosition.get(badArgPosition)); - String name = types.get(badArgPosition).typeName(); - return ordinal + "argument of [] must be [" + expectedType + "], found value [" + name + "] type [" + name + "]"; - } - - private static final Map, String> NAMED_EXPECTED_TYPES = Map.ofEntries( - Map.entry( - Set.of(DataType.DATE_PERIOD, DataType.DOUBLE, DataType.INTEGER, DataType.LONG, DataType.TIME_DURATION, DataType.NULL), - "numeric, date_period or time_duration" - ), - Map.entry(Set.of(DataType.DATETIME, DataType.NULL), "datetime"), - Map.entry(Set.of(DataType.DOUBLE, DataType.NULL), "double"), - Map.entry(Set.of(DataType.INTEGER, DataType.NULL), "integer"), - Map.entry(Set.of(DataType.IP, DataType.NULL), "ip"), - Map.entry(Set.of(DataType.LONG, DataType.INTEGER, DataType.UNSIGNED_LONG, DataType.DOUBLE, DataType.NULL), "numeric"), - Map.entry(Set.of(DataType.LONG, DataType.INTEGER, DataType.UNSIGNED_LONG, DataType.DOUBLE), "numeric"), - Map.entry(Set.of(DataType.KEYWORD, DataType.TEXT, DataType.VERSION, DataType.NULL), "string or version"), - Map.entry(Set.of(DataType.KEYWORD, DataType.TEXT, DataType.NULL), "string"), - Map.entry(Set.of(DataType.IP, DataType.KEYWORD, DataType.TEXT, DataType.NULL), "ip or string"), - Map.entry(Set.copyOf(Arrays.asList(representableTypes())), "representable"), - Map.entry(Set.copyOf(Arrays.asList(representableNonSpatialTypes())), "representableNonSpatial"), - Map.entry( - Set.of( - DataType.BOOLEAN, - DataType.DOUBLE, - DataType.INTEGER, - DataType.KEYWORD, - DataType.LONG, - DataType.TEXT, - DataType.UNSIGNED_LONG, - DataType.NULL - ), - "boolean or numeric or string" - ), - Map.entry( - Set.of( - DataType.DATETIME, - DataType.DOUBLE, - DataType.INTEGER, - DataType.KEYWORD, - DataType.LONG, - DataType.TEXT, - DataType.UNSIGNED_LONG, - DataType.NULL - ), - "datetime or numeric or string" - ), - // What Add accepts - Map.entry( - Set.of( - DataType.DATE_PERIOD, - DataType.DATETIME, - DataType.DOUBLE, - DataType.INTEGER, - DataType.LONG, - DataType.NULL, - DataType.TIME_DURATION, - DataType.UNSIGNED_LONG - ), - "datetime or numeric" - ), - Map.entry( - Set.of( - DataType.BOOLEAN, - DataType.DATETIME, - DataType.DOUBLE, - DataType.INTEGER, - DataType.KEYWORD, - DataType.LONG, - DataType.TEXT, - DataType.UNSIGNED_LONG, - DataType.NULL - ), - "boolean or datetime or numeric or string" - ), - // to_int - Map.entry( - Set.of( - DataType.BOOLEAN, - DataType.COUNTER_INTEGER, - DataType.DATETIME, - DataType.DOUBLE, - DataType.INTEGER, - DataType.KEYWORD, - DataType.LONG, - DataType.TEXT, - DataType.UNSIGNED_LONG, - DataType.NULL - ), - "boolean or counter_integer or datetime or numeric or string" - ), - // to_long - Map.entry( - Set.of( - DataType.BOOLEAN, - DataType.COUNTER_INTEGER, - DataType.COUNTER_LONG, - DataType.DATETIME, - DataType.DOUBLE, - DataType.INTEGER, - DataType.KEYWORD, - DataType.LONG, - DataType.TEXT, - DataType.UNSIGNED_LONG, - DataType.NULL - ), - "boolean or counter_integer or counter_long or datetime or numeric or string" - ), - // to_double - Map.entry( - Set.of( - DataType.BOOLEAN, - DataType.COUNTER_DOUBLE, - DataType.COUNTER_INTEGER, - DataType.COUNTER_LONG, - DataType.DATETIME, - DataType.DOUBLE, - DataType.INTEGER, - DataType.KEYWORD, - DataType.LONG, - DataType.TEXT, - DataType.UNSIGNED_LONG, - DataType.NULL - ), - "boolean or counter_double or counter_integer or counter_long or datetime or numeric or string" - ), - Map.entry( - Set.of( - DataType.BOOLEAN, - DataType.CARTESIAN_POINT, - DataType.DATETIME, - DataType.DOUBLE, - DataType.GEO_POINT, - DataType.INTEGER, - DataType.KEYWORD, - DataType.LONG, - DataType.TEXT, - DataType.UNSIGNED_LONG, - DataType.NULL - ), - "boolean or cartesian_point or datetime or geo_point or numeric or string" - ), - Map.entry( - Set.of( - DataType.DATETIME, - DataType.DOUBLE, - DataType.INTEGER, - DataType.IP, - DataType.KEYWORD, - DataType.LONG, - DataType.TEXT, - DataType.UNSIGNED_LONG, - DataType.VERSION, - DataType.NULL - ), - "datetime, double, integer, ip, keyword, long, text, unsigned_long or version" - ), - Map.entry( - Set.of( - DataType.BOOLEAN, - DataType.DATETIME, - DataType.DOUBLE, - DataType.GEO_POINT, - DataType.GEO_SHAPE, - DataType.INTEGER, - DataType.IP, - DataType.KEYWORD, - DataType.LONG, - DataType.TEXT, - DataType.UNSIGNED_LONG, - DataType.VERSION, - DataType.NULL - ), - "cartesian_point or datetime or geo_point or numeric or string" - ), - Map.entry(Set.of(DataType.GEO_POINT, DataType.KEYWORD, DataType.TEXT, DataType.NULL), "geo_point or string"), - Map.entry(Set.of(DataType.CARTESIAN_POINT, DataType.KEYWORD, DataType.TEXT, DataType.NULL), "cartesian_point or string"), - Map.entry( - Set.of(DataType.GEO_POINT, DataType.GEO_SHAPE, DataType.KEYWORD, DataType.TEXT, DataType.NULL), - "geo_point or geo_shape or string" - ), - Map.entry( - Set.of(DataType.CARTESIAN_POINT, DataType.CARTESIAN_SHAPE, DataType.KEYWORD, DataType.TEXT, DataType.NULL), - "cartesian_point or cartesian_shape or string" - ), - Map.entry(Set.of(DataType.GEO_POINT, DataType.CARTESIAN_POINT, DataType.NULL), "geo_point or cartesian_point"), - Map.entry(Set.of(DataType.DATE_PERIOD, DataType.TIME_DURATION, DataType.NULL), "dateperiod or timeduration") - ); - - // TODO: generate this message dynamically, a la AbstractConvertFunction#supportedTypesNames()? - private static String expectedType(Set validTypes) { - String named = NAMED_EXPECTED_TYPES.get(validTypes); - if (named == null) { - /* - * Note for anyone who's test lands here - it's likely that you - * don't have a test case covering explicit `null` arguments in - * this position. Generally you can get that with anyNullIsNull. - */ - throw new UnsupportedOperationException( - "can't guess expected types for " + validTypes.stream().sorted(Comparator.comparing(t -> t.typeName())).toList() - ); - } - return named; - } - - protected static Stream representable() { - return DataType.types().stream().filter(EsqlDataTypes::isRepresentable); - } - - protected static DataType[] representableTypes() { - return representable().toArray(DataType[]::new); - } - - protected static Stream representableNonSpatial() { - return representable().filter(t -> isSpatial(t) == false); - } - - protected static DataType[] representableNonSpatialTypes() { - return representableNonSpatial().toArray(DataType[]::new); } protected final void assertTypeResolutionFailure(Expression expression) { @@ -1468,7 +640,7 @@ private static void renderKibanaFunctionDefinition( "comment", "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it." ); - builder.field("type", "eval"); // TODO aggs in here too + builder.field("type", isAggregation() ? "agg" : "eval"); builder.field("name", name); builder.field("description", removeAsciidocLinks(info.description())); if (Strings.isNullOrEmpty(info.note()) == false) { @@ -1661,4 +833,14 @@ static Version randomVersion() { protected static DataType[] strings() { return DataType.types().stream().filter(DataType::isString).toArray(DataType[]::new); } + + /** + * Returns true if the current test case is for an aggregation function. + *

    + * This method requires reflection, as it's called from a static context (@AfterClass documentation rendering). + *

    + */ + private static boolean isAggregation() { + return AbstractAggregationTestCase.class.isAssignableFrom(getTestClass()); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java new file mode 100644 index 0000000000000..1aa90d367099a --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java @@ -0,0 +1,884 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function; + +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.indices.CrankyCircuitBreakerService; +import org.elasticsearch.xpack.esql.TestBlockFactory; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.util.NumericUtils; +import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; +import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.AbstractMultivalueFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; +import org.elasticsearch.xpack.esql.optimizer.FoldNull; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.hamcrest.Matcher; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Comparator; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.stream.Collectors; +import java.util.stream.IntStream; +import java.util.stream.Stream; + +import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isSpatial; +import static org.hamcrest.Matchers.either; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; + +/** + * Base class for scalar function tests. Tests based on this class will generally build out a single example evaluation, + * which can be automatically tested against several scenarios (null handling, concurrency, etc). + */ +public abstract class AbstractScalarFunctionTestCase extends AbstractFunctionTestCase { + + /** + * Converts a list of test cases into a list of parameter suppliers. + * Also, adds a default set of extra test cases. + *

    + * Use if possible, as this method may get updated with new checks in the future. + *

    + * + * @param entirelyNullPreservesType See {@link #anyNullIsNull(boolean, List)} + */ + protected static Iterable parameterSuppliersFromTypedDataWithDefaultChecks( + boolean entirelyNullPreservesType, + List suppliers + ) { + return parameterSuppliersFromTypedData( + errorsForCasesWithoutExamples(anyNullIsNull(entirelyNullPreservesType, randomizeBytesRefsOffset(suppliers))) + ); + } + + public final void testEvaluate() { + assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); + boolean readFloating = randomBoolean(); + Expression expression = readFloating ? buildDeepCopyOfFieldExpression(testCase) : buildFieldExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } + assumeTrue("Expected type must be representable to build an evaluator", EsqlDataTypes.isRepresentable(testCase.expectedType())); + logger.info( + "Test Values: " + testCase.getData().stream().map(TestCaseSupplier.TypedData::toString).collect(Collectors.joining(",")) + ); + Expression.TypeResolution resolution = expression.typeResolved(); + if (resolution.unresolved()) { + throw new AssertionError("expected resolved " + resolution.message()); + } + expression = new FoldNull().rule(expression); + assertThat(expression.dataType(), equalTo(testCase.expectedType())); + logger.info("Result type: " + expression.dataType()); + + Object result; + try (ExpressionEvaluator evaluator = evaluator(expression).get(driverContext())) { + try (Block block = evaluator.eval(row(testCase.getDataValues()))) { + result = toJavaObjectUnsignedLongAware(block, 0); + } + } + assertThat(result, not(equalTo(Double.NaN))); + assert testCase.getMatcher().matches(Double.POSITIVE_INFINITY) == false; + assertThat(result, not(equalTo(Double.POSITIVE_INFINITY))); + assert testCase.getMatcher().matches(Double.NEGATIVE_INFINITY) == false; + assertThat(result, not(equalTo(Double.NEGATIVE_INFINITY))); + assertThat(result, testCase.getMatcher()); + if (testCase.getExpectedWarnings() != null) { + assertWarnings(testCase.getExpectedWarnings()); + } + } + + /** + * Evaluates a {@link Block} of values, all copied from the input pattern.. + *

    + * Note that this'll sometimes be a {@link Vector} of values if the + * input pattern contained only a single value. + *

    + */ + public final void testEvaluateBlockWithoutNulls() { + assumeTrue("no warning is expected", testCase.getExpectedWarnings() == null); + try { + testEvaluateBlock(driverContext().blockFactory(), driverContext(), false); + } catch (CircuitBreakingException ex) { + assertThat(ex.getMessage(), equalTo(MockBigArrays.ERROR_MESSAGE)); + fail("Test data is too large to fit in the memory"); + } + } + + /** + * Evaluates a {@link Block} of values, all copied from the input pattern with + * some null values inserted between. + */ + public final void testEvaluateBlockWithNulls() { + assumeTrue("no warning is expected", testCase.getExpectedWarnings() == null); + try { + testEvaluateBlock(driverContext().blockFactory(), driverContext(), true); + } catch (CircuitBreakingException ex) { + assertThat(ex.getMessage(), equalTo(MockBigArrays.ERROR_MESSAGE)); + fail("Test data is too large to fit in the memory"); + } + } + + /** + * Evaluates a {@link Block} of values, all copied from the input pattern, + * using the {@link CrankyCircuitBreakerService} which fails randomly. + *

    + * Note that this'll sometimes be a {@link Vector} of values if the + * input pattern contained only a single value. + *

    + */ + public final void testCrankyEvaluateBlockWithoutNulls() { + assumeTrue("sometimes the cranky breaker silences warnings, just skip these cases", testCase.getExpectedWarnings() == null); + try { + testEvaluateBlock(driverContext().blockFactory(), crankyContext(), false); + } catch (CircuitBreakingException ex) { + assertThat(ex.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); + } + } + + /** + * Evaluates a {@link Block} of values, all copied from the input pattern with + * some null values inserted between, using the {@link CrankyCircuitBreakerService} which fails randomly. + */ + public final void testCrankyEvaluateBlockWithNulls() { + assumeTrue("sometimes the cranky breaker silences warnings, just skip these cases", testCase.getExpectedWarnings() == null); + try { + testEvaluateBlock(driverContext().blockFactory(), crankyContext(), true); + } catch (CircuitBreakingException ex) { + assertThat(ex.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); + } + } + + protected Matcher allNullsMatcher() { + return nullValue(); + } + + private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext context, boolean insertNulls) { + Expression expression = randomBoolean() ? buildDeepCopyOfFieldExpression(testCase) : buildFieldExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } + assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); + assumeTrue("Expected type must be representable to build an evaluator", EsqlDataTypes.isRepresentable(testCase.expectedType())); + int positions = between(1, 1024); + List data = testCase.getData(); + Page onePositionPage = row(testCase.getDataValues()); + Block[] manyPositionsBlocks = new Block[Math.toIntExact(data.stream().filter(d -> d.isForceLiteral() == false).count())]; + Set nullPositions = insertNulls + ? IntStream.range(0, positions).filter(i -> randomBoolean()).mapToObj(Integer::valueOf).collect(Collectors.toSet()) + : Set.of(); + if (nullPositions.size() == positions) { + nullPositions = Set.of(); + } + try { + int b = 0; + for (TestCaseSupplier.TypedData d : data) { + if (d.isForceLiteral()) { + continue; + } + ElementType elementType = PlannerUtils.toElementType(d.type()); + try (Block.Builder builder = elementType.newBlockBuilder(positions, inputBlockFactory)) { + for (int p = 0; p < positions; p++) { + if (nullPositions.contains(p)) { + builder.appendNull(); + } else { + builder.copyFrom(onePositionPage.getBlock(b), 0, 1); + } + } + manyPositionsBlocks[b] = builder.build(); + } + b++; + } + try ( + ExpressionEvaluator eval = evaluator(expression).get(context); + Block block = eval.eval(new Page(positions, manyPositionsBlocks)) + ) { + for (int p = 0; p < positions; p++) { + if (nullPositions.contains(p)) { + assertThat(toJavaObject(block, p), allNullsMatcher()); + continue; + } + assertThat(toJavaObjectUnsignedLongAware(block, p), testCase.getMatcher()); + } + assertThat( + "evaluates to tracked block", + block.blockFactory(), + either(sameInstance(context.blockFactory())).or(sameInstance(inputBlockFactory)) + ); + } + } finally { + Releasables.close(onePositionPage::releaseBlocks, Releasables.wrap(manyPositionsBlocks)); + } + if (testCase.getExpectedWarnings() != null) { + assertWarnings(testCase.getExpectedWarnings()); + } + } + + public void testSimpleWithNulls() { // TODO replace this with nulls inserted into the test case like anyNullIsNull + Expression expression = buildFieldExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } + assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); + List simpleData = testCase.getDataValues(); + try (EvalOperator.ExpressionEvaluator eval = evaluator(expression).get(driverContext())) { + BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); + Block[] orig = BlockUtils.fromListRow(blockFactory, simpleData); + for (int i = 0; i < orig.length; i++) { + List data = new ArrayList<>(); + Block[] blocks = new Block[orig.length]; + for (int b = 0; b < blocks.length; b++) { + if (b == i) { + blocks[b] = orig[b].elementType().newBlockBuilder(1, blockFactory).appendNull().build(); + data.add(null); + } else { + blocks[b] = orig[b]; + data.add(simpleData.get(b)); + } + } + try (Block block = eval.eval(new Page(blocks))) { + assertSimpleWithNulls(data, block, i); + } + } + + // Note: the null-in-fast-null-out handling prevents any exception from being thrown, so the warnings provided in some test + // cases won't actually be registered. This isn't an issue for unary functions, but could be an issue for n-ary ones, if + // function processing of the first parameter(s) could raise an exception/warning. (But hasn't been the case so far.) + // N-ary non-MV functions dealing with one multivalue (before hitting the null parameter injected above) will now trigger + // a warning ("SV-function encountered a MV") that thus needs to be checked. + if (this instanceof AbstractMultivalueFunctionTestCase == false + && simpleData.stream().anyMatch(List.class::isInstance) + && testCase.getExpectedWarnings() != null) { + assertWarnings(testCase.getExpectedWarnings()); + } + } + } + + public final void testEvaluateInManyThreads() throws ExecutionException, InterruptedException { + Expression expression = buildFieldExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } + assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); + assumeTrue("Expected type must be representable to build an evaluator", EsqlDataTypes.isRepresentable(testCase.expectedType())); + int count = 10_000; + int threads = 5; + var evalSupplier = evaluator(expression); + ExecutorService exec = Executors.newFixedThreadPool(threads); + try { + List> futures = new ArrayList<>(); + for (int i = 0; i < threads; i++) { + List simpleData = testCase.getDataValues(); + Page page = row(simpleData); + + futures.add(exec.submit(() -> { + try (EvalOperator.ExpressionEvaluator eval = evalSupplier.get(driverContext())) { + for (int c = 0; c < count; c++) { + try (Block block = eval.eval(page)) { + assertThat(toJavaObjectUnsignedLongAware(block, 0), testCase.getMatcher()); + } + } + } + })); + } + for (Future f : futures) { + f.get(); + } + } finally { + exec.shutdown(); + } + } + + public final void testEvaluatorToString() { + Expression expression = buildFieldExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } + assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); + var factory = evaluator(expression); + try (ExpressionEvaluator ev = factory.get(driverContext())) { + assertThat(ev.toString(), testCase.evaluatorToString()); + } + } + + public final void testFactoryToString() { + Expression expression = buildFieldExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } + assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); + var factory = evaluator(buildFieldExpression(testCase)); + assertThat(factory.toString(), testCase.evaluatorToString()); + } + + public final void testFold() { + Expression expression = buildLiteralExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } + assertFalse(expression.typeResolved().unresolved()); + Expression nullOptimized = new FoldNull().rule(expression); + assertThat(nullOptimized.dataType(), equalTo(testCase.expectedType())); + assertTrue(nullOptimized.foldable()); + if (testCase.foldingExceptionClass() == null) { + Object result = nullOptimized.fold(); + // Decode unsigned longs into BigIntegers + if (testCase.expectedType() == DataType.UNSIGNED_LONG && result != null) { + result = NumericUtils.unsignedLongAsBigInteger((Long) result); + } + assertThat(result, testCase.getMatcher()); + if (testCase.getExpectedWarnings() != null) { + assertWarnings(testCase.getExpectedWarnings()); + } + } else { + Throwable t = expectThrows(testCase.foldingExceptionClass(), nullOptimized::fold); + assertThat(t.getMessage(), equalTo(testCase.foldingExceptionMessage())); + } + } + + /** + * Adds cases with {@code null} and asserts that the result is {@code null}. + *

    + * Note: This won't add more than a single null to any existing test case, + * just to keep the number of test cases from exploding totally. + *

    + * + * @param entirelyNullPreservesType should a test case that only contains parameters + * with the {@code null} type keep it's expected type? + * This is mostly going to be {@code true} + * except for functions that base their type entirely + * on input types like {@link Greatest} or {@link Coalesce}. + */ + protected static List anyNullIsNull(boolean entirelyNullPreservesType, List testCaseSuppliers) { + return anyNullIsNull( + testCaseSuppliers, + (nullPosition, nullValueDataType, original) -> entirelyNullPreservesType == false + && nullValueDataType == DataType.NULL + && original.getData().size() == 1 ? DataType.NULL : original.expectedType(), + (nullPosition, nullData, original) -> original + ); + } + + public interface ExpectedType { + DataType expectedType(int nullPosition, DataType nullValueDataType, TestCaseSupplier.TestCase original); + } + + public interface ExpectedEvaluatorToString { + Matcher evaluatorToString(int nullPosition, TestCaseSupplier.TypedData nullData, Matcher original); + } + + protected static List anyNullIsNull( + List testCaseSuppliers, + ExpectedType expectedType, + ExpectedEvaluatorToString evaluatorToString + ) { + typesRequired(testCaseSuppliers); + List suppliers = new ArrayList<>(testCaseSuppliers.size()); + suppliers.addAll(testCaseSuppliers); + + /* + * For each original test case, add as many copies as there were + * arguments, replacing one of the arguments with null and keeping + * the others. + * + * Also, if this was the first time we saw the signature we copy it + * *again*, replacing the argument with null, but annotating the + * argument's type as `null` explicitly. + */ + Set> uniqueSignatures = new HashSet<>(); + for (TestCaseSupplier original : testCaseSuppliers) { + boolean firstTimeSeenSignature = uniqueSignatures.add(original.types()); + for (int nullPosition = 0; nullPosition < original.types().size(); nullPosition++) { + int finalNullPosition = nullPosition; + suppliers.add(new TestCaseSupplier(original.name() + " null in " + nullPosition, original.types(), () -> { + TestCaseSupplier.TestCase oc = original.get(); + List data = IntStream.range(0, oc.getData().size()).mapToObj(i -> { + TestCaseSupplier.TypedData od = oc.getData().get(i); + return i == finalNullPosition ? od.withData(null) : od; + }).toList(); + TestCaseSupplier.TypedData nulledData = oc.getData().get(finalNullPosition); + return new TestCaseSupplier.TestCase( + data, + evaluatorToString.evaluatorToString(finalNullPosition, nulledData, oc.evaluatorToString()), + expectedType.expectedType(finalNullPosition, nulledData.type(), oc), + nullValue(), + null, + oc.getExpectedTypeError(), + null, + null + ); + })); + + if (firstTimeSeenSignature) { + List typesWithNull = IntStream.range(0, original.types().size()) + .mapToObj(i -> i == finalNullPosition ? DataType.NULL : original.types().get(i)) + .toList(); + boolean newSignature = uniqueSignatures.add(typesWithNull); + if (newSignature) { + suppliers.add(new TestCaseSupplier(typesWithNull, () -> { + TestCaseSupplier.TestCase oc = original.get(); + List data = IntStream.range(0, oc.getData().size()) + .mapToObj(i -> i == finalNullPosition ? TestCaseSupplier.TypedData.NULL : oc.getData().get(i)) + .toList(); + return new TestCaseSupplier.TestCase( + data, + equalTo("LiteralsEvaluator[lit=null]"), + expectedType.expectedType(finalNullPosition, DataType.NULL, oc), + nullValue(), + null, + oc.getExpectedTypeError(), + null, + null + ); + })); + } + } + } + } + + return suppliers; + + } + + /** + * Adds test cases containing unsupported parameter types that assert + * that they throw type errors. + */ + protected static List errorsForCasesWithoutExamples(List testCaseSuppliers) { + return errorsForCasesWithoutExamples(testCaseSuppliers, AbstractScalarFunctionTestCase::typeErrorMessage); + } + + protected static List errorsForCasesWithoutExamples( + List testCaseSuppliers, + TypeErrorMessageSupplier typeErrorMessageSupplier + ) { + typesRequired(testCaseSuppliers); + List suppliers = new ArrayList<>(testCaseSuppliers.size()); + suppliers.addAll(testCaseSuppliers); + + Set> valid = testCaseSuppliers.stream().map(TestCaseSupplier::types).collect(Collectors.toSet()); + List> validPerPosition = validPerPosition(valid); + + testCaseSuppliers.stream() + .map(s -> s.types().size()) + .collect(Collectors.toSet()) + .stream() + .flatMap(count -> allPermutations(count)) + .filter(types -> valid.contains(types) == false) + /* + * Skip any cases with more than one null. Our tests don't generate + * the full combinatorial explosions of all nulls - just a single null. + * Hopefully , cases will function the same as , + * cases. + */.filter(types -> types.stream().filter(t -> t == DataType.NULL).count() <= 1) + .map(types -> typeErrorSupplier(validPerPosition.size() != 1, validPerPosition, types, typeErrorMessageSupplier)) + .forEach(suppliers::add); + return suppliers; + } + + public static String errorMessageStringForBinaryOperators( + boolean includeOrdinal, + List> validPerPosition, + List types + ) { + try { + return typeErrorMessage(includeOrdinal, validPerPosition, types); + } catch (IllegalStateException e) { + // This means all the positional args were okay, so the expected error is from the combination + if (types.get(0).equals(DataType.UNSIGNED_LONG)) { + return "first argument of [] is [unsigned_long] and second is [" + + types.get(1).typeName() + + "]. [unsigned_long] can only be operated on together with another [unsigned_long]"; + + } + if (types.get(1).equals(DataType.UNSIGNED_LONG)) { + return "first argument of [] is [" + + types.get(0).typeName() + + "] and second is [unsigned_long]. [unsigned_long] can only be operated on together with another [unsigned_long]"; + } + return "first argument of [] is [" + + (types.get(0).isNumeric() ? "numeric" : types.get(0).typeName()) + + "] so second argument must also be [" + + (types.get(0).isNumeric() ? "numeric" : types.get(0).typeName()) + + "] but was [" + + types.get(1).typeName() + + "]"; + + } + } + + /** + * Adds test cases containing unsupported parameter types that immediately fail. + */ + protected static List failureForCasesWithoutExamples(List testCaseSuppliers) { + typesRequired(testCaseSuppliers); + List suppliers = new ArrayList<>(testCaseSuppliers.size()); + suppliers.addAll(testCaseSuppliers); + + Set> valid = testCaseSuppliers.stream().map(TestCaseSupplier::types).collect(Collectors.toSet()); + + testCaseSuppliers.stream() + .map(s -> s.types().size()) + .collect(Collectors.toSet()) + .stream() + .flatMap(count -> allPermutations(count)) + .filter(types -> valid.contains(types) == false) + .map(types -> new TestCaseSupplier("type error for " + TestCaseSupplier.nameFromTypes(types), types, () -> { + throw new IllegalStateException("must implement a case for " + types); + })) + .forEach(suppliers::add); + return suppliers; + } + + /** + * Validate that we know the types for all the test cases already created + * @param suppliers - list of suppliers before adding in the illegal type combinations + */ + private static void typesRequired(List suppliers) { + String bad = suppliers.stream().filter(s -> s.types() == null).map(s -> s.name()).collect(Collectors.joining("\n")); + if (bad.equals("") == false) { + throw new IllegalArgumentException("types required but not found for these tests:\n" + bad); + } + } + + private static List> validPerPosition(Set> valid) { + int max = valid.stream().mapToInt(List::size).max().getAsInt(); + List> result = new ArrayList<>(max); + for (int i = 0; i < max; i++) { + result.add(new HashSet<>()); + } + for (List signature : valid) { + for (int i = 0; i < signature.size(); i++) { + result.get(i).add(signature.get(i)); + } + } + return result; + } + + private static Stream> allPermutations(int argumentCount) { + if (argumentCount == 0) { + return Stream.of(List.of()); + } + if (argumentCount > 3) { + throw new IllegalArgumentException("would generate too many combinations"); + } + Stream> stream = representable().map(t -> List.of(t)); + for (int i = 1; i < argumentCount; i++) { + stream = stream.flatMap(types -> representable().map(t -> append(types, t))); + } + return stream; + } + + private static List append(List orig, DataType extra) { + List longer = new ArrayList<>(orig.size() + 1); + longer.addAll(orig); + longer.add(extra); + return longer; + } + + @FunctionalInterface + protected interface TypeErrorMessageSupplier { + String apply(boolean includeOrdinal, List> validPerPosition, List types); + } + + protected static TestCaseSupplier typeErrorSupplier( + boolean includeOrdinal, + List> validPerPosition, + List types + ) { + return typeErrorSupplier(includeOrdinal, validPerPosition, types, AbstractScalarFunctionTestCase::typeErrorMessage); + } + + /** + * Build a test case that asserts that the combination of parameter types is an error. + */ + protected static TestCaseSupplier typeErrorSupplier( + boolean includeOrdinal, + List> validPerPosition, + List types, + TypeErrorMessageSupplier errorMessageSupplier + ) { + return new TestCaseSupplier( + "type error for " + TestCaseSupplier.nameFromTypes(types), + types, + () -> TestCaseSupplier.TestCase.typeError( + types.stream().map(type -> new TestCaseSupplier.TypedData(randomLiteral(type).value(), type, type.typeName())).toList(), + errorMessageSupplier.apply(includeOrdinal, validPerPosition, types) + ) + ); + } + + /** + * Build the expected error message for an invalid type signature. + */ + protected static String typeErrorMessage(boolean includeOrdinal, List> validPerPosition, List types) { + int badArgPosition = -1; + for (int i = 0; i < types.size(); i++) { + if (validPerPosition.get(i).contains(types.get(i)) == false) { + badArgPosition = i; + break; + } + } + if (badArgPosition == -1) { + throw new IllegalStateException( + "Can't generate error message for these types, you probably need a custom error message function" + ); + } + String ordinal = includeOrdinal ? TypeResolutions.ParamOrdinal.fromIndex(badArgPosition).name().toLowerCase(Locale.ROOT) + " " : ""; + String expectedType = expectedType(validPerPosition.get(badArgPosition)); + String name = types.get(badArgPosition).typeName(); + return ordinal + "argument of [] must be [" + expectedType + "], found value [" + name + "] type [" + name + "]"; + } + + private static final Map, String> NAMED_EXPECTED_TYPES = Map.ofEntries( + Map.entry( + Set.of(DataType.DATE_PERIOD, DataType.DOUBLE, DataType.INTEGER, DataType.LONG, DataType.TIME_DURATION, DataType.NULL), + "numeric, date_period or time_duration" + ), + Map.entry(Set.of(DataType.DATETIME, DataType.NULL), "datetime"), + Map.entry(Set.of(DataType.DOUBLE, DataType.NULL), "double"), + Map.entry(Set.of(DataType.INTEGER, DataType.NULL), "integer"), + Map.entry(Set.of(DataType.IP, DataType.NULL), "ip"), + Map.entry(Set.of(DataType.LONG, DataType.INTEGER, DataType.UNSIGNED_LONG, DataType.DOUBLE, DataType.NULL), "numeric"), + Map.entry(Set.of(DataType.LONG, DataType.INTEGER, DataType.UNSIGNED_LONG, DataType.DOUBLE), "numeric"), + Map.entry(Set.of(DataType.KEYWORD, DataType.TEXT, DataType.VERSION, DataType.NULL), "string or version"), + Map.entry(Set.of(DataType.KEYWORD, DataType.TEXT, DataType.NULL), "string"), + Map.entry(Set.of(DataType.IP, DataType.KEYWORD, DataType.TEXT, DataType.NULL), "ip or string"), + Map.entry(Set.copyOf(Arrays.asList(representableTypes())), "representable"), + Map.entry(Set.copyOf(Arrays.asList(representableNonSpatialTypes())), "representableNonSpatial"), + Map.entry( + Set.of( + DataType.BOOLEAN, + DataType.DOUBLE, + DataType.INTEGER, + DataType.KEYWORD, + DataType.LONG, + DataType.TEXT, + DataType.UNSIGNED_LONG, + DataType.NULL + ), + "boolean or numeric or string" + ), + Map.entry( + Set.of( + DataType.DATETIME, + DataType.DOUBLE, + DataType.INTEGER, + DataType.KEYWORD, + DataType.LONG, + DataType.TEXT, + DataType.UNSIGNED_LONG, + DataType.NULL + ), + "datetime or numeric or string" + ), + // What Add accepts + Map.entry( + Set.of( + DataType.DATE_PERIOD, + DataType.DATETIME, + DataType.DOUBLE, + DataType.INTEGER, + DataType.LONG, + DataType.NULL, + DataType.TIME_DURATION, + DataType.UNSIGNED_LONG + ), + "datetime or numeric" + ), + Map.entry( + Set.of( + DataType.BOOLEAN, + DataType.DATETIME, + DataType.DOUBLE, + DataType.INTEGER, + DataType.KEYWORD, + DataType.LONG, + DataType.TEXT, + DataType.UNSIGNED_LONG, + DataType.NULL + ), + "boolean or datetime or numeric or string" + ), + // to_int + Map.entry( + Set.of( + DataType.BOOLEAN, + DataType.COUNTER_INTEGER, + DataType.DATETIME, + DataType.DOUBLE, + DataType.INTEGER, + DataType.KEYWORD, + DataType.LONG, + DataType.TEXT, + DataType.UNSIGNED_LONG, + DataType.NULL + ), + "boolean or counter_integer or datetime or numeric or string" + ), + // to_long + Map.entry( + Set.of( + DataType.BOOLEAN, + DataType.COUNTER_INTEGER, + DataType.COUNTER_LONG, + DataType.DATETIME, + DataType.DOUBLE, + DataType.INTEGER, + DataType.KEYWORD, + DataType.LONG, + DataType.TEXT, + DataType.UNSIGNED_LONG, + DataType.NULL + ), + "boolean or counter_integer or counter_long or datetime or numeric or string" + ), + // to_double + Map.entry( + Set.of( + DataType.BOOLEAN, + DataType.COUNTER_DOUBLE, + DataType.COUNTER_INTEGER, + DataType.COUNTER_LONG, + DataType.DATETIME, + DataType.DOUBLE, + DataType.INTEGER, + DataType.KEYWORD, + DataType.LONG, + DataType.TEXT, + DataType.UNSIGNED_LONG, + DataType.NULL + ), + "boolean or counter_double or counter_integer or counter_long or datetime or numeric or string" + ), + Map.entry( + Set.of( + DataType.BOOLEAN, + DataType.CARTESIAN_POINT, + DataType.DATETIME, + DataType.DOUBLE, + DataType.GEO_POINT, + DataType.INTEGER, + DataType.KEYWORD, + DataType.LONG, + DataType.TEXT, + DataType.UNSIGNED_LONG, + DataType.NULL + ), + "boolean or cartesian_point or datetime or geo_point or numeric or string" + ), + Map.entry( + Set.of( + DataType.DATETIME, + DataType.DOUBLE, + DataType.INTEGER, + DataType.IP, + DataType.KEYWORD, + DataType.LONG, + DataType.TEXT, + DataType.UNSIGNED_LONG, + DataType.VERSION, + DataType.NULL + ), + "datetime, double, integer, ip, keyword, long, text, unsigned_long or version" + ), + Map.entry( + Set.of( + DataType.BOOLEAN, + DataType.DATETIME, + DataType.DOUBLE, + DataType.GEO_POINT, + DataType.GEO_SHAPE, + DataType.INTEGER, + DataType.IP, + DataType.KEYWORD, + DataType.LONG, + DataType.TEXT, + DataType.UNSIGNED_LONG, + DataType.VERSION, + DataType.NULL + ), + "cartesian_point or datetime or geo_point or numeric or string" + ), + Map.entry(Set.of(DataType.GEO_POINT, DataType.KEYWORD, DataType.TEXT, DataType.NULL), "geo_point or string"), + Map.entry(Set.of(DataType.CARTESIAN_POINT, DataType.KEYWORD, DataType.TEXT, DataType.NULL), "cartesian_point or string"), + Map.entry( + Set.of(DataType.GEO_POINT, DataType.GEO_SHAPE, DataType.KEYWORD, DataType.TEXT, DataType.NULL), + "geo_point or geo_shape or string" + ), + Map.entry( + Set.of(DataType.CARTESIAN_POINT, DataType.CARTESIAN_SHAPE, DataType.KEYWORD, DataType.TEXT, DataType.NULL), + "cartesian_point or cartesian_shape or string" + ), + Map.entry(Set.of(DataType.GEO_POINT, DataType.CARTESIAN_POINT, DataType.NULL), "geo_point or cartesian_point"), + Map.entry(Set.of(DataType.DATE_PERIOD, DataType.TIME_DURATION, DataType.NULL), "dateperiod or timeduration") + ); + + // TODO: generate this message dynamically, a la AbstractConvertFunction#supportedTypesNames()? + private static String expectedType(Set validTypes) { + String named = NAMED_EXPECTED_TYPES.get(validTypes); + if (named == null) { + /* + * Note for anyone who's test lands here - it's likely that you + * don't have a test case covering explicit `null` arguments in + * this position. Generally you can get that with anyNullIsNull. + */ + throw new UnsupportedOperationException( + "can't guess expected types for " + validTypes.stream().sorted(Comparator.comparing(t -> t.typeName())).toList() + ); + } + return named; + } + + protected static Stream representable() { + return DataType.types().stream().filter(EsqlDataTypes::isRepresentable); + } + + protected static DataType[] representableTypes() { + return representable().toArray(DataType[]::new); + } + + protected static Stream representableNonSpatial() { + return representable().filter(t -> isSpatial(t) == false); + } + + protected static DataType[] representableNonSpatialTypes() { + return representableNonSpatial().toArray(DataType[]::new); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FunctionName.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FunctionName.java index b4a5d3bdc2b92..9807cb5365e54 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FunctionName.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/FunctionName.java @@ -13,7 +13,7 @@ import java.lang.annotation.Target; /** - * Tests that extend AbstractFunctionTestCase can use this annotation to specify the name of the function + * Tests that extend {@link AbstractScalarFunctionTestCase} can use this annotation to specify the name of the function * to use when generating documentation files while running tests. * If this is not used, the name will be deduced from the test class name, by removing the "Test" suffix, and converting * the class name to snake case. This annotation can be used to override that behavior, for cases where the deduced name diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index 7eadad58ec09b..9095f5da63bf3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -1301,6 +1301,14 @@ public List getDataValues() { return data.stream().filter(d -> d.forceLiteral == false).map(TypedData::data).collect(Collectors.toList()); } + public List> getMultiRowDataValues() { + return data.stream().filter(TypedData::isMultiRow).map(TypedData::multiRowData).collect(Collectors.toList()); + } + + public boolean canGetDataAsLiterals() { + return data.stream().noneMatch(d -> d.isMultiRow() && d.multiRowData().size() != 1); + } + public boolean canBuildEvaluator() { return canBuildEvaluator; } @@ -1363,14 +1371,18 @@ public Matcher evaluatorToString() { * exists because we can't generate random values from the test parameter generation functions, and instead need to return * suppliers which generate the random values at test execution time. */ - public record TypedDataSupplier(String name, Supplier supplier, DataType type, boolean forceLiteral) { + public record TypedDataSupplier(String name, Supplier supplier, DataType type, boolean forceLiteral, boolean multiRow) { + + public TypedDataSupplier(String name, Supplier supplier, DataType type, boolean forceLiteral) { + this(name, supplier, type, forceLiteral, false); + } public TypedDataSupplier(String name, Supplier supplier, DataType type) { - this(name, supplier, type, false); + this(name, supplier, type, false, false); } public TypedData get() { - return new TypedData(supplier.get(), type, name, forceLiteral); + return new TypedData(supplier.get(), type, name, forceLiteral, multiRow); } } @@ -1384,14 +1396,19 @@ public static class TypedData { private final DataType type; private final String name; private final boolean forceLiteral; + private final boolean multiRow; /** * @param data value to test against * @param type type of the value, for building expressions * @param name a name for the value, used for generating test case names * @param forceLiteral should this data always be converted to a literal and never to a field reference? + * @param multiRow if true, data is expected to be a List of values, one per row */ - private TypedData(Object data, DataType type, String name, boolean forceLiteral) { + private TypedData(Object data, DataType type, String name, boolean forceLiteral, boolean multiRow) { + assert multiRow == false || data instanceof List : "multiRow data must be a List"; + assert multiRow == false || forceLiteral == false : "multiRow data can't be converted to a literal"; + if (type == DataType.UNSIGNED_LONG && data instanceof BigInteger b) { this.data = NumericUtils.asLongUnsigned(b); } else { @@ -1400,6 +1417,7 @@ private TypedData(Object data, DataType type, String name, boolean forceLiteral) this.type = type; this.name = name; this.forceLiteral = forceLiteral; + this.multiRow = multiRow; } /** @@ -1408,7 +1426,7 @@ private TypedData(Object data, DataType type, String name, boolean forceLiteral) * @param name a name for the value, used for generating test case names */ public TypedData(Object data, DataType type, String name) { - this(data, type, name, false); + this(data, type, name, false, false); } /** @@ -1420,13 +1438,23 @@ public TypedData(Object data, String name) { this(data, DataType.fromJava(data), name); } + /** + * Create a TypedData object for field to be aggregated. + * @param data values to test against, one per row + * @param type type of the value, for building expressions + * @param name a name for the value, used for generating test case names + */ + public static TypedData multiRow(List data, DataType type, String name) { + return new TypedData(data, type, name, false, true); + } + /** * Return a {@link TypedData} that always returns a {@link Literal} from * {@link #asField} and {@link #asDeepCopyOfField}. Use this for things that * must be constants. */ public TypedData forceLiteral() { - return new TypedData(data, type, name, true); + return new TypedData(data, type, name, true, multiRow); } /** @@ -1437,11 +1465,19 @@ public boolean isForceLiteral() { } /** - * Return a {@link TypedData} that always returns {@code null} for it's - * value without modifying anything else in the supplier. + * If true, the data is expected to be a List of values, one per row. */ - public TypedData forceValueToNull() { - return new TypedData(null, type, name, forceLiteral); + public boolean isMultiRow() { + return multiRow; + } + + /** + * Return a {@link TypedData} with the new data. + * + * @param data The new data for the {@link TypedData}. + */ + public TypedData withData(Object data) { + return new TypedData(data, type, name, forceLiteral, multiRow); } @Override @@ -1476,6 +1512,15 @@ public Expression asDeepCopyOfField() { * Convert this into a {@link Literal}. */ public Literal asLiteral() { + if (multiRow) { + var values = multiRowData(); + + if (values.size() != 1) { + throw new IllegalStateException("Multirow values require exactly 1 element to be a literal, got " + values.size()); + } + + return new Literal(Source.synthetic(name), values, type); + } return new Literal(Source.synthetic(name), data, type); } @@ -1486,6 +1531,14 @@ public Object data() { return data; } + /** + * Values to test against. + */ + @SuppressWarnings("unchecked") + public List multiRowData() { + return (List) data; + } + /** * @return the data value being supplied, casting unsigned longs into BigIntegers correctly */ diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopListTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopListTests.java new file mode 100644 index 0000000000000..33770ff2467ef --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopListTests.java @@ -0,0 +1,249 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.AbstractAggregationTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; + +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +public class TopListTests extends AbstractAggregationTestCase { + public TopListTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + var suppliers = List.of( + // All types + new TestCaseSupplier(List.of(DataType.INTEGER, DataType.INTEGER, DataType.KEYWORD), () -> { + var limit = randomIntBetween(2, 4); + return new TestCaseSupplier.TestCase( + List.of( + TestCaseSupplier.TypedData.multiRow(List.of(5, 8, -2, 0, 200), DataType.INTEGER, "field"), + new TestCaseSupplier.TypedData(limit, DataType.INTEGER, "limit").forceLiteral(), + new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() + ), + "TopList[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", + DataType.INTEGER, + equalTo(List.of(200, 8, 5, 0).subList(0, limit)) + ); + }), + new TestCaseSupplier(List.of(DataType.LONG, DataType.INTEGER, DataType.KEYWORD), () -> { + var limit = randomIntBetween(2, 4); + return new TestCaseSupplier.TestCase( + List.of( + TestCaseSupplier.TypedData.multiRow(List.of(5L, 8L, -2L, 0L, 200L), DataType.LONG, "field"), + new TestCaseSupplier.TypedData(limit, DataType.INTEGER, "limit").forceLiteral(), + new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() + ), + "TopList[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", + DataType.LONG, + equalTo(List.of(200L, 8L, 5L, 0L).subList(0, limit)) + ); + }), + new TestCaseSupplier(List.of(DataType.DOUBLE, DataType.INTEGER, DataType.KEYWORD), () -> { + var limit = randomIntBetween(2, 4); + return new TestCaseSupplier.TestCase( + List.of( + TestCaseSupplier.TypedData.multiRow(List.of(5., 8., -2., 0., 200.), DataType.DOUBLE, "field"), + new TestCaseSupplier.TypedData(limit, DataType.INTEGER, "limit").forceLiteral(), + new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() + ), + "TopList[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", + DataType.DOUBLE, + equalTo(List.of(200., 8., 5., 0.).subList(0, limit)) + ); + }), + new TestCaseSupplier(List.of(DataType.DATETIME, DataType.INTEGER, DataType.KEYWORD), () -> { + var limit = randomIntBetween(2, 4); + return new TestCaseSupplier.TestCase( + List.of( + TestCaseSupplier.TypedData.multiRow(List.of(5L, 8L, -2L, 0L, 200L), DataType.DATETIME, "field"), + new TestCaseSupplier.TypedData(limit, DataType.INTEGER, "limit").forceLiteral(), + new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() + ), + "TopList[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", + DataType.DATETIME, + equalTo(List.of(200L, 8L, 5L, 0L).subList(0, limit)) + ); + }), + + // Surrogates + new TestCaseSupplier( + List.of(DataType.INTEGER, DataType.INTEGER, DataType.KEYWORD), + () -> new TestCaseSupplier.TestCase( + List.of( + TestCaseSupplier.TypedData.multiRow(List.of(5, 8, -2, 0, 200), DataType.INTEGER, "field"), + new TestCaseSupplier.TypedData(1, DataType.INTEGER, "limit").forceLiteral(), + new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() + ), + "TopList[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", + DataType.INTEGER, + equalTo(200) + ) + ), + new TestCaseSupplier( + List.of(DataType.LONG, DataType.INTEGER, DataType.KEYWORD), + () -> new TestCaseSupplier.TestCase( + List.of( + TestCaseSupplier.TypedData.multiRow(List.of(5L, 8L, -2L, 0L, 200L), DataType.LONG, "field"), + new TestCaseSupplier.TypedData(1, DataType.INTEGER, "limit").forceLiteral(), + new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() + ), + "TopList[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", + DataType.LONG, + equalTo(200L) + ) + ), + new TestCaseSupplier( + List.of(DataType.DOUBLE, DataType.INTEGER, DataType.KEYWORD), + () -> new TestCaseSupplier.TestCase( + List.of( + TestCaseSupplier.TypedData.multiRow(List.of(5., 8., -2., 0., 200.), DataType.DOUBLE, "field"), + new TestCaseSupplier.TypedData(1, DataType.INTEGER, "limit").forceLiteral(), + new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() + ), + "TopList[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", + DataType.DOUBLE, + equalTo(200.) + ) + ), + new TestCaseSupplier( + List.of(DataType.DATETIME, DataType.INTEGER, DataType.KEYWORD), + () -> new TestCaseSupplier.TestCase( + List.of( + TestCaseSupplier.TypedData.multiRow(List.of(5L, 8L, 2L, 0L, 200L), DataType.DATETIME, "field"), + new TestCaseSupplier.TypedData(1, DataType.INTEGER, "limit").forceLiteral(), + new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() + ), + "TopList[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", + DataType.DATETIME, + equalTo(200L) + ) + ), + + // Folding + new TestCaseSupplier( + List.of(DataType.INTEGER, DataType.INTEGER, DataType.KEYWORD), + () -> new TestCaseSupplier.TestCase( + List.of( + TestCaseSupplier.TypedData.multiRow(List.of(200), DataType.INTEGER, "field"), + new TestCaseSupplier.TypedData(1, DataType.INTEGER, "limit").forceLiteral(), + new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() + ), + "TopList[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", + DataType.INTEGER, + equalTo(200) + ) + ), + new TestCaseSupplier( + List.of(DataType.LONG, DataType.INTEGER, DataType.KEYWORD), + () -> new TestCaseSupplier.TestCase( + List.of( + TestCaseSupplier.TypedData.multiRow(List.of(200L), DataType.LONG, "field"), + new TestCaseSupplier.TypedData(1, DataType.INTEGER, "limit").forceLiteral(), + new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() + ), + "TopList[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", + DataType.LONG, + equalTo(200L) + ) + ), + new TestCaseSupplier( + List.of(DataType.DOUBLE, DataType.INTEGER, DataType.KEYWORD), + () -> new TestCaseSupplier.TestCase( + List.of( + TestCaseSupplier.TypedData.multiRow(List.of(200.), DataType.DOUBLE, "field"), + new TestCaseSupplier.TypedData(1, DataType.INTEGER, "limit").forceLiteral(), + new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() + ), + "TopList[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", + DataType.DOUBLE, + equalTo(200.) + ) + ), + new TestCaseSupplier( + List.of(DataType.DATETIME, DataType.INTEGER, DataType.KEYWORD), + () -> new TestCaseSupplier.TestCase( + List.of( + TestCaseSupplier.TypedData.multiRow(List.of(200L), DataType.DATETIME, "field"), + new TestCaseSupplier.TypedData(1, DataType.INTEGER, "limit").forceLiteral(), + new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() + ), + "TopList[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", + DataType.DATETIME, + equalTo(200L) + ) + ), + + // Resolution errors + new TestCaseSupplier( + List.of(DataType.LONG, DataType.INTEGER, DataType.KEYWORD), + () -> TestCaseSupplier.TestCase.typeError( + List.of( + TestCaseSupplier.TypedData.multiRow(List.of(5L, 8L, 2L, 0L, 200L), DataType.LONG, "field"), + new TestCaseSupplier.TypedData(0, DataType.INTEGER, "limit").forceLiteral(), + new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() + ), + "Limit must be greater than 0 in [], found [0]" + ) + ), + new TestCaseSupplier( + List.of(DataType.LONG, DataType.INTEGER, DataType.KEYWORD), + () -> TestCaseSupplier.TestCase.typeError( + List.of( + TestCaseSupplier.TypedData.multiRow(List.of(5L, 8L, 2L, 0L, 200L), DataType.LONG, "field"), + new TestCaseSupplier.TypedData(2, DataType.INTEGER, "limit").forceLiteral(), + new TestCaseSupplier.TypedData(new BytesRef("wrong-order"), DataType.KEYWORD, "order").forceLiteral() + ), + "Invalid order value in [], expected [ASC, DESC] but got [wrong-order]" + ) + ), + new TestCaseSupplier( + List.of(DataType.LONG, DataType.INTEGER, DataType.KEYWORD), + () -> TestCaseSupplier.TestCase.typeError( + List.of( + TestCaseSupplier.TypedData.multiRow(List.of(5L, 8L, 2L, 0L, 200L), DataType.LONG, "field"), + new TestCaseSupplier.TypedData(null, DataType.INTEGER, "limit").forceLiteral(), + new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() + ), + "second argument of [] cannot be null, received [limit]" + ) + ), + new TestCaseSupplier( + List.of(DataType.LONG, DataType.INTEGER, DataType.KEYWORD), + () -> TestCaseSupplier.TestCase.typeError( + List.of( + TestCaseSupplier.TypedData.multiRow(List.of(5L, 8L, 2L, 0L, 200L), DataType.LONG, "field"), + new TestCaseSupplier.TypedData(1, DataType.INTEGER, "limit").forceLiteral(), + new TestCaseSupplier.TypedData(null, DataType.KEYWORD, "order").forceLiteral() + ), + "third argument of [] cannot be null, received [order]" + ) + ) + ); + + return parameterSuppliersFromTypedDataWithDefaultChecks(suppliers); + } + + @Override + protected Expression build(Source source, List args) { + return new TopList(source, args.get(0), args.get(1), args.get(2)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/grouping/BucketTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/grouping/BucketTests.java index aaa6fe7d45c83..9100e71de76df 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/grouping/BucketTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/grouping/BucketTests.java @@ -16,7 +16,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -29,7 +29,7 @@ import static org.hamcrest.Matchers.equalTo; -public class BucketTests extends AbstractFunctionTestCase { +public class BucketTests extends AbstractScalarFunctionTestCase { public BucketTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractConfigurationFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractConfigurationFunctionTestCase.java index 074fe9e159023..760c57f6570bb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractConfigurationFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractConfigurationFunctionTestCase.java @@ -12,7 +12,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.util.StringUtils; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; @@ -22,7 +22,7 @@ import static org.elasticsearch.xpack.esql.SerializationTestUtils.assertSerialization; -public abstract class AbstractConfigurationFunctionTestCase extends AbstractFunctionTestCase { +public abstract class AbstractConfigurationFunctionTestCase extends AbstractScalarFunctionTestCase { protected abstract Expression buildWithConfiguration(Source source, List args, EsqlConfiguration configuration); @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java index 02da8ea22a6a0..0a03af206b846 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java @@ -20,7 +20,7 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; @@ -33,7 +33,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; -public class CaseTests extends AbstractFunctionTestCase { +public class CaseTests extends AbstractScalarFunctionTestCase { public CaseTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestTests.java index 9376849d8136c..7cc03be7d6273 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.VaragsTestCaseBuilder; @@ -26,7 +26,7 @@ import static org.hamcrest.Matchers.equalTo; -public class GreatestTests extends AbstractFunctionTestCase { +public class GreatestTests extends AbstractScalarFunctionTestCase { public GreatestTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastTests.java index 0881b871c30f6..aa475f05ebe69 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.VaragsTestCaseBuilder; @@ -25,7 +25,7 @@ import static org.hamcrest.Matchers.equalTo; -public class LeastTests extends AbstractFunctionTestCase { +public class LeastTests extends AbstractScalarFunctionTestCase { public LeastTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Tests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Tests.java index d97f070275617..e08da9850b555 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Tests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64Tests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -27,7 +27,7 @@ import static org.hamcrest.Matchers.equalTo; @FunctionName("from_base64") -public class FromBase64Tests extends AbstractFunctionTestCase { +public class FromBase64Tests extends AbstractScalarFunctionTestCase { public FromBase64Tests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Tests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Tests.java index 4c9175e4906bf..88ca7d0452b3e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Tests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64Tests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -27,7 +27,7 @@ import static org.hamcrest.Matchers.equalTo; @FunctionName("to_base64") -public class ToBase64Tests extends AbstractFunctionTestCase { +public class ToBase64Tests extends AbstractScalarFunctionTestCase { public ToBase64Tests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanTests.java index c4e53d922ac60..c5b9b2501aeae 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; @@ -24,7 +24,7 @@ import static java.util.Collections.emptyList; -public class ToBooleanTests extends AbstractFunctionTestCase { +public class ToBooleanTests extends AbstractScalarFunctionTestCase { public ToBooleanTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java index 1c1431fe3b7ea..a59e7b0085e4c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -27,7 +27,7 @@ import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.CARTESIAN; @FunctionName("to_cartesianpoint") -public class ToCartesianPointTests extends AbstractFunctionTestCase { +public class ToCartesianPointTests extends AbstractScalarFunctionTestCase { public ToCartesianPointTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeTests.java index 48a610804845d..973431d676b82 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -27,7 +27,7 @@ import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.CARTESIAN; @FunctionName("to_cartesianshape") -public class ToCartesianShapeTests extends AbstractFunctionTestCase { +public class ToCartesianShapeTests extends AbstractScalarFunctionTestCase { public ToCartesianShapeTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java index 6aef91be43088..e512334391bed 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; @@ -26,7 +26,7 @@ import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.DEFAULT_DATE_TIME_FORMATTER; -public class ToDatetimeTests extends AbstractFunctionTestCase { +public class ToDatetimeTests extends AbstractScalarFunctionTestCase { public ToDatetimeTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesTests.java index fc45c8b26a869..bd07141009d3e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; @@ -22,7 +22,7 @@ import java.util.function.Function; import java.util.function.Supplier; -public class ToDegreesTests extends AbstractFunctionTestCase { +public class ToDegreesTests extends AbstractScalarFunctionTestCase { public ToDegreesTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java index 5f45cc11d9c5a..d4d20629da09e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java @@ -16,7 +16,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; @@ -26,7 +26,7 @@ import java.util.function.Function; import java.util.function.Supplier; -public class ToDoubleTests extends AbstractFunctionTestCase { +public class ToDoubleTests extends AbstractScalarFunctionTestCase { public ToDoubleTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java index 2b5dc453acc23..7a3b83f3ab113 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -27,7 +27,7 @@ import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.GEO; @FunctionName("to_geopoint") -public class ToGeoPointTests extends AbstractFunctionTestCase { +public class ToGeoPointTests extends AbstractScalarFunctionTestCase { public ToGeoPointTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeTests.java index bca8dc822052f..831539852846c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -27,7 +27,7 @@ import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.GEO; @FunctionName("to_geoshape") -public class ToGeoShapeTests extends AbstractFunctionTestCase { +public class ToGeoShapeTests extends AbstractScalarFunctionTestCase { public ToGeoShapeTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPTests.java index 20b48d24f8211..ffa94548f0a23 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPTests.java @@ -16,7 +16,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.ArrayList; @@ -26,7 +26,7 @@ import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.esql.core.util.StringUtils.parseIP; -public class ToIPTests extends AbstractFunctionTestCase { +public class ToIPTests extends AbstractScalarFunctionTestCase { public ToIPTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java index 45837c2110ff3..7984c1e04effc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; @@ -26,7 +26,7 @@ import static org.elasticsearch.xpack.esql.core.type.DataTypeConverter.safeToInt; -public class ToIntegerTests extends AbstractFunctionTestCase { +public class ToIntegerTests extends AbstractScalarFunctionTestCase { public ToIntegerTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java index 565562b8574d2..27c69ae977f6b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; @@ -25,7 +25,7 @@ import java.util.function.Function; import java.util.function.Supplier; -public class ToLongTests extends AbstractFunctionTestCase { +public class ToLongTests extends AbstractScalarFunctionTestCase { public ToLongTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansTests.java index 3f6e28c65142f..33e8eee7a8de4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; @@ -22,7 +22,7 @@ import java.util.function.Function; import java.util.function.Supplier; -public class ToRadiansTests extends AbstractFunctionTestCase { +public class ToRadiansTests extends AbstractScalarFunctionTestCase { public ToRadiansTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java index 0556742b55b3c..809b4ddaa78a4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java @@ -16,7 +16,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; @@ -27,7 +27,7 @@ import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.CARTESIAN; import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.GEO; -public class ToStringTests extends AbstractFunctionTestCase { +public class ToStringTests extends AbstractScalarFunctionTestCase { public ToStringTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongTests.java index 44092db499d2d..a1fccac8edfd1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigDecimal; @@ -27,7 +27,7 @@ import static org.elasticsearch.xpack.esql.core.type.DataTypeConverter.safeToUnsignedLong; import static org.elasticsearch.xpack.esql.core.util.NumericUtils.UNSIGNED_LONG_MAX_AS_DOUBLE; -public class ToUnsignedLongTests extends AbstractFunctionTestCase { +public class ToUnsignedLongTests extends AbstractScalarFunctionTestCase { public ToUnsignedLongTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionTests.java index 34281442872a5..1c37afc1c0722 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.versionfield.Version; @@ -22,7 +22,7 @@ import java.util.List; import java.util.function.Supplier; -public class ToVersionTests extends AbstractFunctionTestCase { +public class ToVersionTests extends AbstractScalarFunctionTestCase { public ToVersionTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java index 89cfda5c4bce5..4af2ce1b7cb00 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.time.ZonedDateTime; @@ -25,7 +25,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -public class DateDiffTests extends AbstractFunctionTestCase { +public class DateDiffTests extends AbstractScalarFunctionTestCase { public DateDiffTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java index 161b338cc85b2..f0aa766fb1bf9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java @@ -17,7 +17,7 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.List; @@ -28,7 +28,7 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; -public class DateParseTests extends AbstractFunctionTestCase { +public class DateParseTests extends AbstractScalarFunctionTestCase { public DateParseTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java index 4c5a7d3734ce3..17d8cd6a57223 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.time.Duration; @@ -28,7 +28,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -public class DateTruncTests extends AbstractFunctionTestCase { +public class DateTruncTests extends AbstractScalarFunctionTestCase { public DateTruncTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchTests.java index 0d8f4bc7ea115..3cdc54f240a96 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; @@ -25,7 +25,7 @@ import static org.hamcrest.Matchers.equalTo; @FunctionName("cidr_match") -public class CIDRMatchTests extends AbstractFunctionTestCase { +public class CIDRMatchTests extends AbstractScalarFunctionTestCase { public CIDRMatchTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixTests.java index a575eb48c4bd7..298bcb3f49466 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixTests.java @@ -16,7 +16,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; @@ -25,7 +25,7 @@ import static org.hamcrest.Matchers.equalTo; -public class IpPrefixTests extends AbstractFunctionTestCase { +public class IpPrefixTests extends AbstractScalarFunctionTestCase { public IpPrefixTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsTests.java index 7bd195ab86389..b5923c7a5b214 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; @@ -23,7 +23,7 @@ import static org.hamcrest.Matchers.equalTo; -public class AbsTests extends AbstractFunctionTestCase { +public class AbsTests extends AbstractScalarFunctionTestCase { @ParametersFactory public static Iterable parameters() { List suppliers = new ArrayList<>(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosTests.java index 02974c10480d2..7c5cd87dfee39 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosTests.java @@ -12,13 +12,13 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.List; import java.util.function.Supplier; -public class AcosTests extends AbstractFunctionTestCase { +public class AcosTests extends AbstractScalarFunctionTestCase { public AcosTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinTests.java index d4d13c2054fcd..38e210d81e5fd 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinTests.java @@ -12,13 +12,13 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.List; import java.util.function.Supplier; -public class AsinTests extends AbstractFunctionTestCase { +public class AsinTests extends AbstractScalarFunctionTestCase { public AsinTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Tests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Tests.java index 3b81316da5676..1144919094812 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Tests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Tests.java @@ -12,13 +12,13 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.List; import java.util.function.Supplier; -public class Atan2Tests extends AbstractFunctionTestCase { +public class Atan2Tests extends AbstractScalarFunctionTestCase { public Atan2Tests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanTests.java index c92c626a5601b..c9f7a1baeadbe 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanTests.java @@ -12,13 +12,13 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.List; import java.util.function.Supplier; -public class AtanTests extends AbstractFunctionTestCase { +public class AtanTests extends AbstractScalarFunctionTestCase { public AtanTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtTests.java index 14d6075f5cbe3..f644d8bc72dce 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; @@ -24,7 +24,7 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToDouble; -public class CbrtTests extends AbstractFunctionTestCase { +public class CbrtTests extends AbstractScalarFunctionTestCase { public CbrtTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilTests.java index ff61ecfa39687..1572b928a0d75 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; @@ -23,7 +23,7 @@ import static org.hamcrest.Matchers.equalTo; -public class CeilTests extends AbstractFunctionTestCase { +public class CeilTests extends AbstractScalarFunctionTestCase { public CeilTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosTests.java index 61e7a1f051905..dc5eec4f90d32 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosTests.java @@ -12,13 +12,13 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.List; import java.util.function.Supplier; -public class CosTests extends AbstractFunctionTestCase { +public class CosTests extends AbstractScalarFunctionTestCase { public CosTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshTests.java index 1ea63cc006e9c..79557b15be08a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshTests.java @@ -12,13 +12,13 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.List; import java.util.function.Supplier; -public class CoshTests extends AbstractFunctionTestCase { +public class CoshTests extends AbstractScalarFunctionTestCase { public CoshTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ETests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ETests.java index 8eb0b80fc21d7..763ad3a2b49c9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ETests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ETests.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -24,7 +24,7 @@ import static org.hamcrest.Matchers.equalTo; -public class ETests extends AbstractFunctionTestCase { +public class ETests extends AbstractScalarFunctionTestCase { public ETests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorTests.java index f0c990ec64af1..269dabcc6b6b8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; @@ -21,7 +21,7 @@ import java.util.List; import java.util.function.Supplier; -public class FloorTests extends AbstractFunctionTestCase { +public class FloorTests extends AbstractScalarFunctionTestCase { public FloorTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java index 64329d7824b74..ca0c8718f5ac0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; @@ -24,7 +24,7 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.bigIntegerToUnsignedLong; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToDouble; -public class Log10Tests extends AbstractFunctionTestCase { +public class Log10Tests extends AbstractScalarFunctionTestCase { public Log10Tests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogTests.java index ce53fdbfc1851..1c002e111e575 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogTests.java @@ -13,13 +13,13 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.List; import java.util.function.Supplier; -public class LogTests extends AbstractFunctionTestCase { +public class LogTests extends AbstractScalarFunctionTestCase { public LogTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PiTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PiTests.java index c21082b905962..8e427fcbae2b8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PiTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PiTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -24,7 +24,7 @@ import static org.hamcrest.Matchers.equalTo; -public class PiTests extends AbstractFunctionTestCase { +public class PiTests extends AbstractScalarFunctionTestCase { public PiTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java index 545e7c14ff2b2..bea0f399233fd 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java @@ -13,13 +13,13 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.List; import java.util.function.Supplier; -public class PowTests extends AbstractFunctionTestCase { +public class PowTests extends AbstractScalarFunctionTestCase { public PowTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java index 5e19d5f606034..c05388a9708da 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java @@ -16,7 +16,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.ArrayList; @@ -29,7 +29,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -public class RoundTests extends AbstractFunctionTestCase { +public class RoundTests extends AbstractScalarFunctionTestCase { public RoundTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumTests.java index 89c2d07c4470a..21b44134458b7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SignumTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; @@ -22,7 +22,7 @@ import java.util.List; import java.util.function.Supplier; -public class SignumTests extends AbstractFunctionTestCase { +public class SignumTests extends AbstractScalarFunctionTestCase { public SignumTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinTests.java index 0d9bd6bcae64a..7a1190d86c2bf 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinTests.java @@ -12,13 +12,13 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.List; import java.util.function.Supplier; -public class SinTests extends AbstractFunctionTestCase { +public class SinTests extends AbstractScalarFunctionTestCase { public SinTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhTests.java index 8f78e8ee67106..b83519c6d1299 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhTests.java @@ -12,13 +12,13 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.List; import java.util.function.Supplier; -public class SinhTests extends AbstractFunctionTestCase { +public class SinhTests extends AbstractScalarFunctionTestCase { public SinhTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java index a1d5b8523175c..9c81bbdc3cd49 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; @@ -24,7 +24,7 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToDouble; -public class SqrtTests extends AbstractFunctionTestCase { +public class SqrtTests extends AbstractScalarFunctionTestCase { public SqrtTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanTests.java index 86c59a7a06cf4..369c33a1291f1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanTests.java @@ -12,13 +12,13 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.List; import java.util.function.Supplier; -public class TanTests extends AbstractFunctionTestCase { +public class TanTests extends AbstractScalarFunctionTestCase { public TanTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhTests.java index 1f4fef4ab15c8..14fdcdca2fe96 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhTests.java @@ -12,13 +12,13 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.List; import java.util.function.Supplier; -public class TanhTests extends AbstractFunctionTestCase { +public class TanhTests extends AbstractScalarFunctionTestCase { public TanhTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TauTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TauTests.java index aa64dfc6af90d..959db368ce348 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TauTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TauTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -24,7 +24,7 @@ import static org.hamcrest.Matchers.equalTo; -public class TauTests extends AbstractFunctionTestCase { +public class TauTests extends AbstractScalarFunctionTestCase { public TauTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java index 2ea79d8a165c6..212b66027d455 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java @@ -18,7 +18,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.hamcrest.Matcher; @@ -39,7 +39,7 @@ import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.CARTESIAN; import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.GEO; -public abstract class AbstractMultivalueFunctionTestCase extends AbstractFunctionTestCase { +public abstract class AbstractMultivalueFunctionTestCase extends AbstractScalarFunctionTestCase { /** * Build many test cases with {@code boolean} values. */ diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendTests.java index f95747618dd28..7039d9edf794b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendTests.java @@ -16,7 +16,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.ArrayList; @@ -27,7 +27,7 @@ import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.GEO; import static org.hamcrest.Matchers.equalTo; -public class MvAppendTests extends AbstractFunctionTestCase { +public class MvAppendTests extends AbstractScalarFunctionTestCase { public MvAppendTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatTests.java index ba4ddb1be84cc..0277093152cba 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -24,7 +24,7 @@ import static org.hamcrest.Matchers.equalTo; -public class MvConcatTests extends AbstractFunctionTestCase { +public class MvConcatTests extends AbstractScalarFunctionTestCase { public MvConcatTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java index 3f6fb841f006f..5684c68051446 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java @@ -16,7 +16,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.ArrayList; @@ -28,7 +28,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; -public class MvSliceTests extends AbstractFunctionTestCase { +public class MvSliceTests extends AbstractScalarFunctionTestCase { public MvSliceTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java index 7c6413e590bfe..a085c0acfa25d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.ArrayList; @@ -25,7 +25,7 @@ import static org.hamcrest.Matchers.equalTo; -public class MvSortTests extends AbstractFunctionTestCase { +public class MvSortTests extends AbstractScalarFunctionTestCase { public MvSortTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipTests.java index 30fe420f29960..e9f0fd5b51516 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.ArrayList; @@ -25,7 +25,7 @@ import static java.lang.Math.max; import static org.hamcrest.Matchers.equalTo; -public class MvZipTests extends AbstractFunctionTestCase { +public class MvZipTests extends AbstractScalarFunctionTestCase { public MvZipTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java index 83f5a621c93a5..c779fa9e2789f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceTests.java @@ -22,7 +22,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.VaragsTestCaseBuilder; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunctionTestCase; @@ -39,7 +39,7 @@ import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.hamcrest.Matchers.equalTo; -public class CoalesceTests extends AbstractFunctionTestCase { +public class CoalesceTests extends AbstractScalarFunctionTestCase { public CoalesceTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNotNullTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNotNullTests.java index 299b66433dcd0..b99b47b6f505a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNotNullTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNotNullTests.java @@ -16,7 +16,7 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.hamcrest.Matcher; @@ -27,7 +27,7 @@ import static org.hamcrest.Matchers.equalTo; -public class IsNotNullTests extends AbstractFunctionTestCase { +public class IsNotNullTests extends AbstractScalarFunctionTestCase { public IsNotNullTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNullTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNullTests.java index 606e9598bda63..7abfad39967a5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNullTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNullTests.java @@ -16,7 +16,7 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.hamcrest.Matcher; @@ -27,7 +27,7 @@ import static org.hamcrest.Matchers.equalTo; -public class IsNullTests extends AbstractFunctionTestCase { +public class IsNullTests extends AbstractScalarFunctionTestCase { public IsNullTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunctionTestCase.java index 37e09caf0d105..a30cce9f765ed 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunctionTestCase.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -33,7 +33,7 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isString; import static org.hamcrest.Matchers.equalTo; -public abstract class BinarySpatialFunctionTestCase extends AbstractFunctionTestCase { +public abstract class BinarySpatialFunctionTestCase extends AbstractScalarFunctionTestCase { private static String getFunctionClassName() { Class testClass = getTestClass(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXTests.java index fa0fc8465ce7a..71e73398ddcd4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXTests.java @@ -13,7 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -25,7 +25,7 @@ import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.UNSPECIFIED; @FunctionName("st_x") -public class StXTests extends AbstractFunctionTestCase { +public class StXTests extends AbstractScalarFunctionTestCase { public StXTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYTests.java index 15f34271be779..a30ae924754d6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYTests.java @@ -13,7 +13,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -25,7 +25,7 @@ import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.UNSPECIFIED; @FunctionName("st_y") -public class StYTests extends AbstractFunctionTestCase { +public class StYTests extends AbstractScalarFunctionTestCase { public StYTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AbstractTrimTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AbstractTrimTests.java index 27e3fc8684efc..a92f3ffb49533 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AbstractTrimTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AbstractTrimTests.java @@ -9,7 +9,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.ArrayList; @@ -18,7 +18,7 @@ import static org.hamcrest.Matchers.equalTo; -public abstract class AbstractTrimTests extends AbstractFunctionTestCase { +public abstract class AbstractTrimTests extends AbstractScalarFunctionTestCase { static Iterable parameters(String name, boolean trimLeading, boolean trimTrailing) { List suppliers = new ArrayList<>(); for (DataType type : strings()) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java index f46ae25fddfc7..c398faacb90d0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java @@ -18,7 +18,7 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -35,7 +35,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -public class ConcatTests extends AbstractFunctionTestCase { +public class ConcatTests extends AbstractScalarFunctionTestCase { public ConcatTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithTests.java index 863243a352bb0..5ae69b03ae882 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -24,7 +24,7 @@ import static org.hamcrest.Matchers.equalTo; -public class EndsWithTests extends AbstractFunctionTestCase { +public class EndsWithTests extends AbstractScalarFunctionTestCase { public EndsWithTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java index 7d6e3439c8063..88ee7881e128a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftTests.java @@ -17,7 +17,7 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -28,7 +28,7 @@ import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.hamcrest.Matchers.equalTo; -public class LeftTests extends AbstractFunctionTestCase { +public class LeftTests extends AbstractScalarFunctionTestCase { public LeftTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthTests.java index 4a7e6b3a0996d..a1451b6bedf7a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -25,7 +25,7 @@ import static org.hamcrest.Matchers.equalTo; -public class LengthTests extends AbstractFunctionTestCase { +public class LengthTests extends AbstractScalarFunctionTestCase { public LengthTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java index 011252a3f7e14..13d8edf489a66 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.nio.charset.StandardCharsets; @@ -30,7 +30,7 @@ /** * Tests for {@link Locate} function. */ -public class LocateTests extends AbstractFunctionTestCase { +public class LocateTests extends AbstractScalarFunctionTestCase { public LocateTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java index e673be2ad5290..0074f83b3bbce 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java @@ -17,7 +17,7 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLikePattern; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -30,7 +30,7 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; -public class RLikeTests extends AbstractFunctionTestCase { +public class RLikeTests extends AbstractScalarFunctionTestCase { public RLikeTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatStaticTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatStaticTests.java index dc266066bd424..7c8426a5fe3fc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatStaticTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatStaticTests.java @@ -24,7 +24,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.junit.After; import java.util.ArrayList; @@ -36,7 +36,7 @@ /** * These tests create rows that are 1MB in size. Test classes - * which extend AbstractFunctionTestCase rerun test cases with + * which extend AbstractScalarFunctionTestCase rerun test cases with * many randomized inputs. Unfortunately, tests are run with * limited memory, and instantiating many copies of these * tests with large rows causes out of memory. @@ -63,7 +63,7 @@ public void testTooBig() { public String process(String str, int number) { try ( - var eval = AbstractFunctionTestCase.evaluator( + var eval = AbstractScalarFunctionTestCase.evaluator( new Repeat(Source.EMPTY, field("string", DataType.KEYWORD), field("number", DataType.INTEGER)) ).get(driverContext()); Block block = eval.eval(row(List.of(new BytesRef(str), number))); @@ -73,7 +73,7 @@ public String process(String str, int number) { } /** - * The following fields and methods were borrowed from AbstractFunctionTestCase + * The following fields and methods were borrowed from AbstractScalarFunctionTestCase */ private final List breakers = Collections.synchronizedList(new ArrayList<>()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatTests.java index cb89dc168b928..8d0368d1c618f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.ArrayList; @@ -24,7 +24,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; -public class RepeatTests extends AbstractFunctionTestCase { +public class RepeatTests extends AbstractScalarFunctionTestCase { public RepeatTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java index 82581b69f8713..fe77b9dcdb075 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.ArrayList; @@ -24,7 +24,7 @@ import static org.hamcrest.Matchers.equalTo; -public class ReplaceTests extends AbstractFunctionTestCase { +public class ReplaceTests extends AbstractScalarFunctionTestCase { public ReplaceTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java index 9d2b55e02fff7..cc98edb85f547 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightTests.java @@ -17,7 +17,7 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -28,7 +28,7 @@ import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.hamcrest.Matchers.equalTo; -public class RightTests extends AbstractFunctionTestCase { +public class RightTests extends AbstractScalarFunctionTestCase { public RightTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java index bf2dd0359a352..dd28b43bd66ed 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java @@ -21,7 +21,7 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.ArrayList; @@ -34,7 +34,7 @@ import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.hamcrest.Matchers.equalTo; -public class SplitTests extends AbstractFunctionTestCase { +public class SplitTests extends AbstractScalarFunctionTestCase { public SplitTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithTests.java index f0c51a9b22e55..bd01f926d1571 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -23,7 +23,7 @@ import static org.hamcrest.Matchers.equalTo; -public class StartsWithTests extends AbstractFunctionTestCase { +public class StartsWithTests extends AbstractScalarFunctionTestCase { public StartsWithTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java index 0ee60cfc77d2f..1c49d3b408ad6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringTests.java @@ -17,7 +17,7 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -29,7 +29,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -public class SubstringTests extends AbstractFunctionTestCase { +public class SubstringTests extends AbstractScalarFunctionTestCase { public SubstringTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java index 3aee4a92e9570..06736db28b2cc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java @@ -17,7 +17,7 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardPattern; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -29,7 +29,7 @@ import static org.hamcrest.Matchers.startsWith; @FunctionName("like") -public class WildcardLikeTests extends AbstractFunctionTestCase { +public class WildcardLikeTests extends AbstractScalarFunctionTestCase { public WildcardLikeTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/AbstractBinaryOperatorTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/AbstractBinaryOperatorTestCase.java index 7e803ea2f84a0..a9663f9e37852 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/AbstractBinaryOperatorTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/AbstractBinaryOperatorTestCase.java @@ -16,7 +16,7 @@ import org.elasticsearch.xpack.esql.core.tree.Location; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -33,7 +33,7 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; -public abstract class AbstractBinaryOperatorTestCase extends AbstractFunctionTestCase { +public abstract class AbstractBinaryOperatorTestCase extends AbstractScalarFunctionTestCase { protected abstract Matcher resultsMatcher(List typedData); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/BreakerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/BreakerTests.java index b5bea7d858187..a5408cdb971c4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/BreakerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/BreakerTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.junit.After; @@ -63,7 +64,7 @@ public BreakerTests(ByteSizeValue limit, Expression expression) { public void testBreaker() { DriverContext unlimited = driverContext(ByteSizeValue.ofGb(1)); DriverContext context = driverContext(limit); - EvalOperator.ExpressionEvaluator eval = AbstractFunctionTestCase.evaluator(expression).get(context); + EvalOperator.ExpressionEvaluator eval = AbstractScalarFunctionTestCase.evaluator(expression).get(context); try (Block b = unlimited.blockFactory().newConstantNullBlock(1)) { Exception e = expectThrows(CircuitBreakingException.class, () -> eval.eval(new Page(b))); assertThat(e.getMessage(), equalTo("over test limit")); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java index 26a5d58b33900..c8a2511e34211 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; @@ -36,7 +36,7 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; -public class AddTests extends AbstractFunctionTestCase { +public class AddTests extends AbstractScalarFunctionTestCase { public AddTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java index a50d44822a4e3..7bc5b24651218 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -26,7 +26,7 @@ import static org.hamcrest.Matchers.equalTo; -public class DivTests extends AbstractFunctionTestCase { +public class DivTests extends AbstractScalarFunctionTestCase { public DivTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java index ce67f6453362b..133324bafd134 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.hamcrest.Matcher; @@ -26,7 +26,7 @@ import static org.hamcrest.Matchers.equalTo; -public class ModTests extends AbstractFunctionTestCase { +public class ModTests extends AbstractScalarFunctionTestCase { public ModTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulTests.java index 8b4dfa88415be..7472636611063 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.ArrayList; @@ -24,7 +24,7 @@ import static org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.AbstractArithmeticTestCase.arithmeticExceptionOverflowCase; import static org.hamcrest.Matchers.equalTo; -public class MulTests extends AbstractFunctionTestCase { +public class MulTests extends AbstractScalarFunctionTestCase { public MulTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java index a628416ecc4b7..7eadd74eaeb9e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java @@ -16,7 +16,7 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.time.Duration; @@ -28,7 +28,7 @@ import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.hamcrest.Matchers.equalTo; -public class NegTests extends AbstractFunctionTestCase { +public class NegTests extends AbstractScalarFunctionTestCase { public NegTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java index e75ee9333ba54..9dc024ac1e8ff 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.time.Duration; @@ -29,7 +29,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -public class SubTests extends AbstractFunctionTestCase { +public class SubTests extends AbstractScalarFunctionTestCase { public SubTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsTests.java index 3817bbe9cc74c..d3539f4a56fe9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EqualsTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; @@ -22,7 +22,7 @@ import java.util.List; import java.util.function.Supplier; -public class EqualsTests extends AbstractFunctionTestCase { +public class EqualsTests extends AbstractScalarFunctionTestCase { public EqualsTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @@ -196,7 +196,10 @@ public static Iterable parameters() { ); return parameterSuppliersFromTypedData( - errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), AbstractFunctionTestCase::errorMessageStringForBinaryOperators) + errorsForCasesWithoutExamples( + anyNullIsNull(true, suppliers), + AbstractScalarFunctionTestCase::errorMessageStringForBinaryOperators + ) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java index f25638b482817..b2174f7be1593 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanOrEqualTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; @@ -23,7 +23,7 @@ import java.util.List; import java.util.function.Supplier; -public class GreaterThanOrEqualTests extends AbstractFunctionTestCase { +public class GreaterThanOrEqualTests extends AbstractScalarFunctionTestCase { public GreaterThanOrEqualTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @@ -131,7 +131,10 @@ public static Iterable parameters() { ); return parameterSuppliersFromTypedData( - errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), AbstractFunctionTestCase::errorMessageStringForBinaryOperators) + errorsForCasesWithoutExamples( + anyNullIsNull(true, suppliers), + AbstractScalarFunctionTestCase::errorMessageStringForBinaryOperators + ) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java index 0735e0dfd64f2..edb276e16dd99 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; @@ -23,7 +23,7 @@ import java.util.List; import java.util.function.Supplier; -public class GreaterThanTests extends AbstractFunctionTestCase { +public class GreaterThanTests extends AbstractScalarFunctionTestCase { public GreaterThanTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @@ -131,7 +131,10 @@ public static Iterable parameters() { ); return parameterSuppliersFromTypedData( - errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), AbstractFunctionTestCase::errorMessageStringForBinaryOperators) + errorsForCasesWithoutExamples( + anyNullIsNull(true, suppliers), + AbstractScalarFunctionTestCase::errorMessageStringForBinaryOperators + ) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java index 4a802dfcaf975..d89421f579b08 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanOrEqualTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; @@ -23,7 +23,7 @@ import java.util.List; import java.util.function.Supplier; -public class LessThanOrEqualTests extends AbstractFunctionTestCase { +public class LessThanOrEqualTests extends AbstractScalarFunctionTestCase { public LessThanOrEqualTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @@ -131,7 +131,10 @@ public static Iterable parameters() { ); return parameterSuppliersFromTypedData( - errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), AbstractFunctionTestCase::errorMessageStringForBinaryOperators) + errorsForCasesWithoutExamples( + anyNullIsNull(true, suppliers), + AbstractScalarFunctionTestCase::errorMessageStringForBinaryOperators + ) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java index 6f3f2441c6d00..9487d774ff221 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; @@ -23,7 +23,7 @@ import java.util.List; import java.util.function.Supplier; -public class LessThanTests extends AbstractFunctionTestCase { +public class LessThanTests extends AbstractScalarFunctionTestCase { public LessThanTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @@ -131,7 +131,10 @@ public static Iterable parameters() { ); return parameterSuppliersFromTypedData( - errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), AbstractFunctionTestCase::errorMessageStringForBinaryOperators) + errorsForCasesWithoutExamples( + anyNullIsNull(true, suppliers), + AbstractScalarFunctionTestCase::errorMessageStringForBinaryOperators + ) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsTests.java index 174e2457eb0a5..e7d8c680ba5cc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.math.BigInteger; @@ -21,7 +21,7 @@ import java.util.List; import java.util.function.Supplier; -public class NotEqualsTests extends AbstractFunctionTestCase { +public class NotEqualsTests extends AbstractScalarFunctionTestCase { public NotEqualsTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @@ -190,7 +190,10 @@ public static Iterable parameters() { ) ); return parameterSuppliersFromTypedData( - errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), AbstractFunctionTestCase::errorMessageStringForBinaryOperators) + errorsForCasesWithoutExamples( + anyNullIsNull(true, suppliers), + AbstractScalarFunctionTestCase::errorMessageStringForBinaryOperators + ) ); } From 10ad8a642eaa33533d5e013a2c7f9be09990b40f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20Fred=C3=A9n?= <109296772+jfreden@users.noreply.github.com> Date: Thu, 27 Jun 2024 13:58:50 +0200 Subject: [PATCH 008/216] Make await security migrations more robust (#109854) * Make await security migrations more robust --- muted-tests.yml | 2 - .../org/elasticsearch/TransportVersions.java | 1 + .../elasticsearch/index/IndexVersions.java | 1 + .../support/SecurityMigrationTaskParams.java | 22 ++++++- .../test/SecuritySingleNodeTestCase.java | 13 +++- .../store/NativePrivilegeStoreCacheTests.java | 2 - .../xpack/security/Security.java | 61 +++++++++++-------- .../support/SecurityIndexManager.java | 35 ++++++++++- .../support/SecurityMigrationExecutor.java | 18 +++++- .../security/support/SecurityMigrations.java | 4 ++ .../authc/AuthenticationServiceTests.java | 1 + .../authc/esnative/NativeRealmTests.java | 1 + .../mapper/NativeRoleMappingStoreTests.java | 1 + .../authz/store/CompositeRolesStoreTests.java | 1 + .../store/NativePrivilegeStoreTests.java | 1 + .../CacheInvalidatorRegistryTests.java | 1 + .../SecurityMigrationExecutorTests.java | 25 +++----- 17 files changed, 136 insertions(+), 54 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 748f6f463f345..d5e603bbed2f0 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -65,8 +65,6 @@ tests: - class: "org.elasticsearch.xpack.esql.action.AsyncEsqlQueryActionIT" issue: "https://github.com/elastic/elasticsearch/issues/109944" method: "testBasicAsyncExecution" -- class: "org.elasticsearch.xpack.security.authz.store.NativePrivilegeStoreCacheTests" - issue: "https://github.com/elastic/elasticsearch/issues/110015" - class: "org.elasticsearch.action.admin.indices.rollover.RolloverIT" issue: "https://github.com/elastic/elasticsearch/issues/110034" method: "testRolloverWithClosedWriteIndex" diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 767ba71cd262f..0a75ccfbbedf3 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -203,6 +203,7 @@ static TransportVersion def(int id) { public static final TransportVersion ML_INFERENCE_GOOGLE_VERTEX_AI_EMBEDDINGS_ADDED = def(8_694_00_0); public static final TransportVersion EVENT_INGESTED_RANGE_IN_CLUSTER_STATE = def(8_695_00_0); public static final TransportVersion ESQL_ADD_AGGREGATE_TYPE = def(8_696_00_0); + public static final TransportVersion SECURITY_MIGRATIONS_MIGRATION_NEEDED_ADDED = def(8_697_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 6dd87dedf24f7..f08b97cd7033e 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -109,6 +109,7 @@ private static IndexVersion def(int id, Version luceneVersion) { public static final IndexVersion SEMANTIC_TEXT_FIELD_TYPE = def(8_507_00_0, Version.LUCENE_9_10_0); public static final IndexVersion UPGRADE_TO_LUCENE_9_11 = def(8_508_00_0, Version.LUCENE_9_11_0); public static final IndexVersion UNIQUE_TOKEN_FILTER_POS_FIX = def(8_509_00_0, Version.LUCENE_9_11_0); + public static final IndexVersion ADD_SECURITY_MIGRATION = def(8_510_00_0, Version.LUCENE_9_11_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/SecurityMigrationTaskParams.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/SecurityMigrationTaskParams.java index d54f3098fead9..14cc4d3d6f5b9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/SecurityMigrationTaskParams.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/SecurityMigrationTaskParams.java @@ -21,33 +21,46 @@ import java.io.IOException; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; public class SecurityMigrationTaskParams implements PersistentTaskParams { public static final String TASK_NAME = "security-migration"; private final int migrationVersion; + private final boolean migrationNeeded; + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( TASK_NAME, true, - (arr) -> new SecurityMigrationTaskParams((int) arr[0]) + (arr) -> new SecurityMigrationTaskParams((int) arr[0], arr[1] == null || (boolean) arr[1]) ); static { PARSER.declareInt(constructorArg(), new ParseField("migration_version")); + PARSER.declareBoolean(optionalConstructorArg(), new ParseField("migration_needed")); } - public SecurityMigrationTaskParams(int migrationVersion) { + public SecurityMigrationTaskParams(int migrationVersion, boolean migrationNeeded) { this.migrationVersion = migrationVersion; + this.migrationNeeded = migrationNeeded; } public SecurityMigrationTaskParams(StreamInput in) throws IOException { this.migrationVersion = in.readInt(); + if (in.getTransportVersion().onOrAfter(TransportVersions.SECURITY_MIGRATIONS_MIGRATION_NEEDED_ADDED)) { + this.migrationNeeded = in.readBoolean(); + } else { + this.migrationNeeded = true; + } } @Override public void writeTo(StreamOutput out) throws IOException { out.writeInt(migrationVersion); + if (out.getTransportVersion().onOrAfter(TransportVersions.SECURITY_MIGRATIONS_MIGRATION_NEEDED_ADDED)) { + out.writeBoolean(migrationNeeded); + } } @Override @@ -64,6 +77,7 @@ public TransportVersion getMinimalSupportedVersion() { public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startObject(); builder.field("migration_version", migrationVersion); + builder.field("migration_needed", migrationNeeded); builder.endObject(); return builder; } @@ -75,4 +89,8 @@ public static SecurityMigrationTaskParams fromXContent(XContentParser parser) { public int getMigrationVersion() { return migrationVersion; } + + public boolean isMigrationNeeded() { + return migrationNeeded; + } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/test/SecuritySingleNodeTestCase.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/test/SecuritySingleNodeTestCase.java index 2eb45021a5bfe..07bdd83c9a144 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/test/SecuritySingleNodeTestCase.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/test/SecuritySingleNodeTestCase.java @@ -15,6 +15,7 @@ import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.settings.MockSecureSettings; @@ -26,7 +27,9 @@ import org.elasticsearch.license.LicenseSettings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.xpack.core.security.authc.support.Hasher; +import org.elasticsearch.xpack.core.security.test.TestRestrictedIndices; import org.elasticsearch.xpack.security.LocalStateSecurity; +import org.elasticsearch.xpack.security.support.SecurityMigrations; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; @@ -41,10 +44,9 @@ import java.util.concurrent.CountDownLatch; import java.util.stream.Collectors; -import static org.elasticsearch.persistent.PersistentTasksCustomMetadata.getTaskWithId; import static org.elasticsearch.test.SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; -import static org.elasticsearch.xpack.core.security.support.SecurityMigrationTaskParams.TASK_NAME; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.getMigrationVersionFromIndexMetadata; import static org.hamcrest.Matchers.hasItem; /** @@ -90,7 +92,12 @@ public void tearDown() throws Exception { } private boolean isMigrationComplete(ClusterState state) { - return getTaskWithId(state, TASK_NAME) == null; + IndexMetadata indexMetadata = state.metadata().index(TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7); + if (indexMetadata == null) { + // If index doesn't exist, no migration needed + return true; + } + return getMigrationVersionFromIndexMetadata(indexMetadata) == SecurityMigrations.MIGRATIONS_BY_VERSION.lastKey(); } private void awaitSecurityMigration() { diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreCacheTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreCacheTests.java index 3094a10b1572d..d11ca70744b7b 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreCacheTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreCacheTests.java @@ -116,7 +116,6 @@ public void configureApplicationPrivileges() { assertEquals(6, putPrivilegesResponse.created().values().stream().mapToInt(List::size).sum()); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/109894") public void testGetPrivilegesUsesCache() { final Client client = client(); @@ -205,7 +204,6 @@ public void testPopulationOfCacheWhenLoadingPrivilegesForAllApplications() { assertEquals(1, new GetPrivilegesRequestBuilder(client).application("app-1").privileges("write").get().privileges().length); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/109895") public void testSuffixWildcard() { final Client client = client(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 404b9b85e2b24..bbb1feeef8d44 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -788,7 +788,8 @@ Collection createComponents( this.persistentTasksService.set(persistentTasksService); systemIndices.getMainIndexManager().addStateListener((oldState, newState) -> { - if (clusterService.state().nodes().isLocalNodeElectedMaster()) { + // Only consider applying migrations if it's the master node and the security index exists + if (clusterService.state().nodes().isLocalNodeElectedMaster() && newState.indexExists()) { applyPendingSecurityMigrations(newState); } }); @@ -1203,43 +1204,53 @@ Collection createComponents( } private void applyPendingSecurityMigrations(SecurityIndexManager.State newState) { + // If no migrations have been applied and the security index is on the latest version (new index), all migrations can be skipped + if (newState.migrationsVersion == 0 && newState.createdOnLatestVersion) { + submitPersistentMigrationTask(SecurityMigrations.MIGRATIONS_BY_VERSION.lastKey(), false); + return; + } + Map.Entry nextMigration = SecurityMigrations.MIGRATIONS_BY_VERSION.higherEntry( newState.migrationsVersion ); - if (nextMigration == null) { - return; - } - // Check if next migration that has not been applied is eligible to run on the current cluster - if (systemIndices.getMainIndexManager().isEligibleSecurityMigration(nextMigration.getValue()) == false) { + if (nextMigration == null || systemIndices.getMainIndexManager().isEligibleSecurityMigration(nextMigration.getValue()) == false) { // Reset retry counter if all eligible migrations have been applied successfully nodeLocalMigrationRetryCount.set(0); } else if (nodeLocalMigrationRetryCount.get() > MAX_SECURITY_MIGRATION_RETRY_COUNT) { logger.warn("Security migration failed [" + nodeLocalMigrationRetryCount.get() + "] times, restart node to retry again."); } else if (systemIndices.getMainIndexManager().isReadyForSecurityMigration(nextMigration.getValue())) { - nodeLocalMigrationRetryCount.incrementAndGet(); - persistentTasksService.get() - .sendStartRequest( - SecurityMigrationTaskParams.TASK_NAME, - SecurityMigrationTaskParams.TASK_NAME, - new SecurityMigrationTaskParams(newState.migrationsVersion), - null, - ActionListener.wrap((response) -> { - logger.debug("Security migration task submitted"); - }, (exception) -> { - // Do nothing if the task is already in progress - if (ExceptionsHelper.unwrapCause(exception) instanceof ResourceAlreadyExistsException) { - // Do not count ResourceAlreadyExistsException as failure - nodeLocalMigrationRetryCount.decrementAndGet(); - } else { - logger.warn("Submit security migration task failed: " + exception.getCause()); - } - }) - ); + submitPersistentMigrationTask(newState.migrationsVersion); } } + private void submitPersistentMigrationTask(int migrationsVersion) { + submitPersistentMigrationTask(migrationsVersion, true); + } + + private void submitPersistentMigrationTask(int migrationsVersion, boolean securityMigrationNeeded) { + nodeLocalMigrationRetryCount.incrementAndGet(); + persistentTasksService.get() + .sendStartRequest( + SecurityMigrationTaskParams.TASK_NAME, + SecurityMigrationTaskParams.TASK_NAME, + new SecurityMigrationTaskParams(migrationsVersion, securityMigrationNeeded), + null, + ActionListener.wrap((response) -> { + logger.debug("Security migration task submitted"); + }, (exception) -> { + // Do nothing if the task is already in progress + if (ExceptionsHelper.unwrapCause(exception) instanceof ResourceAlreadyExistsException) { + // Do not count ResourceAlreadyExistsException as failure + nodeLocalMigrationRetryCount.decrementAndGet(); + } else { + logger.warn("Submit security migration task failed: " + exception.getCause()); + } + }) + ); + } + private AuthorizationEngine getAuthorizationEngine() { return findValueFromExtensions("authorization engine", extension -> extension.getAuthorizationEngine(settings)); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java index 1ac22bfd21883..1796b43b0726f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java @@ -38,6 +38,7 @@ import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.IndexVersion; import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.rest.RestStatus; @@ -55,6 +56,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_FORMAT_SETTING; +import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_INDEX_VERSION_CREATED; import static org.elasticsearch.indices.SystemIndexDescriptor.VERSION_META_KEY; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.security.action.UpdateIndexMigrationVersionAction.MIGRATION_VERSION_CUSTOM_DATA_KEY; @@ -244,6 +246,19 @@ private SystemIndexDescriptor.MappingsVersion getMinSecurityIndexMappingVersion( return mappingsVersion == null ? new SystemIndexDescriptor.MappingsVersion(1, 0) : mappingsVersion; } + /** + * Check if the index was created on the latest index version available in the cluster + */ + private static boolean isCreatedOnLatestVersion(IndexMetadata indexMetadata, ClusterState clusterState) { + final IndexVersion indexVersionCreated = indexMetadata != null + ? SETTING_INDEX_VERSION_CREATED.get(indexMetadata.getSettings()) + : null; + return indexVersionCreated != null + && indexVersionCreated.onOrAfter( + IndexVersion.min(IndexVersion.current(), clusterState.nodes().getMaxDataNodeCompatibleIndexVersion()) + ); + } + @Override public void clusterChanged(ClusterChangedEvent event) { if (event.state().blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) { @@ -254,7 +269,7 @@ public void clusterChanged(ClusterChangedEvent event) { } final State previousState = state; final IndexMetadata indexMetadata = resolveConcreteIndex(systemIndexDescriptor.getAliasName(), event.state().metadata()); - final Map customMetadata = indexMetadata == null ? null : indexMetadata.getCustomData(MIGRATION_VERSION_CUSTOM_KEY); + final boolean createdOnLatestVersion = isCreatedOnLatestVersion(indexMetadata, event.state()); final Instant creationTime = indexMetadata != null ? Instant.ofEpochMilli(indexMetadata.getCreationDate()) : null; final boolean isIndexUpToDate = indexMetadata == null || INDEX_FORMAT_SETTING.get(indexMetadata.getSettings()) == systemIndexDescriptor.getIndexFormat(); @@ -262,7 +277,7 @@ public void clusterChanged(ClusterChangedEvent event) { final boolean indexAvailableForWrite = available.v1(); final boolean indexAvailableForSearch = available.v2(); final boolean mappingIsUpToDate = indexMetadata == null || checkIndexMappingUpToDate(event.state()); - final int migrationsVersion = customMetadata == null ? 0 : Integer.parseInt(customMetadata.get(MIGRATION_VERSION_CUSTOM_DATA_KEY)); + final int migrationsVersion = getMigrationVersionFromIndexMetadata(indexMetadata); final SystemIndexDescriptor.MappingsVersion minClusterMappingVersion = getMinSecurityIndexMappingVersion(event.state()); final int indexMappingVersion = loadIndexMappingVersion(systemIndexDescriptor.getAliasName(), event.state()); final String concreteIndexName = indexMetadata == null @@ -290,6 +305,7 @@ public void clusterChanged(ClusterChangedEvent event) { indexAvailableForSearch, indexAvailableForWrite, mappingIsUpToDate, + createdOnLatestVersion, migrationsVersion, minClusterMappingVersion, indexMappingVersion, @@ -310,6 +326,15 @@ public void clusterChanged(ClusterChangedEvent event) { } } + public static int getMigrationVersionFromIndexMetadata(IndexMetadata indexMetadata) { + Map customMetadata = indexMetadata == null ? null : indexMetadata.getCustomData(MIGRATION_VERSION_CUSTOM_KEY); + if (customMetadata == null) { + return 0; + } + String migrationVersion = customMetadata.get(MIGRATION_VERSION_CUSTOM_DATA_KEY); + return migrationVersion == null ? 0 : Integer.parseInt(migrationVersion); + } + public void onStateRecovered(Consumer recoveredStateConsumer) { BiConsumer stateChangeListener = (previousState, nextState) -> { boolean stateJustRecovered = previousState == UNRECOVERED_STATE && nextState != UNRECOVERED_STATE; @@ -588,6 +613,7 @@ public static class State { false, false, false, + false, null, null, null, @@ -602,6 +628,7 @@ public static class State { public final boolean indexAvailableForSearch; public final boolean indexAvailableForWrite; public final boolean mappingUpToDate; + public final boolean createdOnLatestVersion; public final Integer migrationsVersion; // Min mapping version supported by the descriptors in the cluster public final SystemIndexDescriptor.MappingsVersion minClusterMappingVersion; @@ -619,6 +646,7 @@ public State( boolean indexAvailableForSearch, boolean indexAvailableForWrite, boolean mappingUpToDate, + boolean createdOnLatestVersion, Integer migrationsVersion, SystemIndexDescriptor.MappingsVersion minClusterMappingVersion, Integer indexMappingVersion, @@ -634,6 +662,7 @@ public State( this.indexAvailableForWrite = indexAvailableForWrite; this.mappingUpToDate = mappingUpToDate; this.migrationsVersion = migrationsVersion; + this.createdOnLatestVersion = createdOnLatestVersion; this.minClusterMappingVersion = minClusterMappingVersion; this.indexMappingVersion = indexMappingVersion; this.concreteIndexName = concreteIndexName; @@ -653,6 +682,7 @@ public boolean equals(Object o) { && indexAvailableForSearch == state.indexAvailableForSearch && indexAvailableForWrite == state.indexAvailableForWrite && mappingUpToDate == state.mappingUpToDate + && createdOnLatestVersion == state.createdOnLatestVersion && Objects.equals(indexMappingVersion, state.indexMappingVersion) && Objects.equals(migrationsVersion, state.migrationsVersion) && Objects.equals(minClusterMappingVersion, state.minClusterMappingVersion) @@ -674,6 +704,7 @@ public int hashCode() { indexAvailableForSearch, indexAvailableForWrite, mappingUpToDate, + createdOnLatestVersion, migrationsVersion, minClusterMappingVersion, indexMappingVersion, diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrationExecutor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrationExecutor.java index bd5d0fb5a8ef5..0f895a2db17e0 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrationExecutor.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrationExecutor.java @@ -46,10 +46,24 @@ public SecurityMigrationExecutor( @Override protected void nodeOperation(AllocatedPersistentTask task, SecurityMigrationTaskParams params, PersistentTaskState state) { - applyOutstandingMigrations(task, params.getMigrationVersion(), ActionListener.wrap((res) -> task.markAsCompleted(), (exception) -> { + ActionListener listener = ActionListener.wrap((res) -> task.markAsCompleted(), (exception) -> { logger.warn("Security migration failed: " + exception); task.markAsFailed(exception); - })); + }); + + if (params.isMigrationNeeded() == false) { + updateMigrationVersion( + params.getMigrationVersion(), + securityIndexManager.getConcreteIndexName(), + ActionListener.wrap(response -> { + logger.info("Security migration not needed. Setting current version to: [" + params.getMigrationVersion() + "]"); + listener.onResponse(response); + }, listener::onFailure) + ); + return; + } + + applyOutstandingMigrations(task, params.getMigrationVersion(), listener); } private void applyOutstandingMigrations(AllocatedPersistentTask task, int currentMigrationVersion, ActionListener listener) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java index 8ef132ad0ed34..f7ca72cd89eba 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java @@ -28,6 +28,10 @@ import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SecurityMainIndexMappingVersion.ADD_REMOTE_CLUSTER_AND_DESCRIPTION_FIELDS; +/** + * Interface for creating SecurityMigrations that will be automatically applied once to existing .security indices + * IMPORTANT: A new index version needs to be added to {@link org.elasticsearch.index.IndexVersions} for the migration to be triggered + */ public class SecurityMigrations { public interface SecurityMigration { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java index 330eecc1563e2..62b72b4f9750c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java @@ -2512,6 +2512,7 @@ private SecurityIndexManager.State dummyState(ClusterHealthStatus indexStatus) { true, true, true, + true, null, null, null, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmTests.java index 37a4cd4f783e4..2254c78a2910c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmTests.java @@ -39,6 +39,7 @@ private SecurityIndexManager.State dummyState(ClusterHealthStatus indexStatus) { true, true, true, + true, null, null, null, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java index c860ceeafc0f4..2a084bacfaf76 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java @@ -411,6 +411,7 @@ private SecurityIndexManager.State indexState(boolean isUpToDate, ClusterHealthS true, true, true, + true, null, null, null, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java index 5b28c3dc39cfe..693bd9b868ede 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java @@ -1617,6 +1617,7 @@ public SecurityIndexManager.State dummyIndexState(boolean isIndexUpToDate, Clust true, true, true, + true, null, null, null, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java index 6a2ac7721c9a1..d3b75210a5cbe 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java @@ -792,6 +792,7 @@ private SecurityIndexManager.State dummyState( true, true, true, + true, null, null, null, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/CacheInvalidatorRegistryTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/CacheInvalidatorRegistryTests.java index 698809beb6d30..e3b00dfbcc6b8 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/CacheInvalidatorRegistryTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/CacheInvalidatorRegistryTests.java @@ -61,6 +61,7 @@ public void testSecurityIndexStateChangeWillInvalidateAllRegisteredInvalidators( true, true, true, + true, null, new SystemIndexDescriptor.MappingsVersion(SecurityMainIndexMappingVersion.latest().id(), 0), null, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityMigrationExecutorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityMigrationExecutorTests.java index 3c3b322c28a2f..0f63e5302a5f1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityMigrationExecutorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityMigrationExecutorTests.java @@ -43,6 +43,8 @@ public class SecurityMigrationExecutorTests extends ESTestCase { private boolean clientShouldThrowException = false; + private AllocatedPersistentTask mockTask = mock(AllocatedPersistentTask.class); + @Before public void setUpMocks() { threadPool = mock(ThreadPool.class); @@ -78,8 +80,8 @@ public void testSuccessfulMigration() { client, new TreeMap<>(Map.of(1, generateMigration(migrateInvocations, true), 2, generateMigration(migrateInvocations, true))) ); - AllocatedPersistentTask mockTask = mock(AllocatedPersistentTask.class); - securityMigrationExecutor.nodeOperation(mockTask, mock(SecurityMigrationTaskParams.class), mock(PersistentTaskState.class)); + + securityMigrationExecutor.nodeOperation(mockTask, new SecurityMigrationTaskParams(0, true), mock(PersistentTaskState.class)); verify(mockTask, times(1)).markAsCompleted(); verify(mockTask, times(0)).markAsFailed(any()); assertEquals(2, updateIndexMigrationVersionActionInvocations); @@ -105,8 +107,7 @@ public void testNoMigrationMeetsRequirements() { ) ); - AllocatedPersistentTask mockTask = mock(AllocatedPersistentTask.class); - securityMigrationExecutor.nodeOperation(mockTask, mock(SecurityMigrationTaskParams.class), mock(PersistentTaskState.class)); + securityMigrationExecutor.nodeOperation(mockTask, new SecurityMigrationTaskParams(0, true), mock(PersistentTaskState.class)); verify(mockTask, times(1)).markAsCompleted(); verify(mockTask, times(0)).markAsFailed(any()); assertEquals(0, updateIndexMigrationVersionActionInvocations); @@ -136,8 +137,7 @@ public void testPartialMigration() { ) ); - AllocatedPersistentTask mockTask = mock(AllocatedPersistentTask.class); - securityMigrationExecutor.nodeOperation(mockTask, mock(SecurityMigrationTaskParams.class), mock(PersistentTaskState.class)); + securityMigrationExecutor.nodeOperation(mockTask, new SecurityMigrationTaskParams(0, true), mock(PersistentTaskState.class)); verify(mockTask, times(1)).markAsCompleted(); verify(mockTask, times(0)).markAsFailed(any()); assertEquals(2, updateIndexMigrationVersionActionInvocations); @@ -154,11 +154,7 @@ public void testNoMigrationNeeded() { new TreeMap<>(Map.of(1, generateMigration(migrateInvocations, true), 2, generateMigration(migrateInvocations, true))) ); - AllocatedPersistentTask mockTask = mock(AllocatedPersistentTask.class); - SecurityMigrationTaskParams taskParams = mock(SecurityMigrationTaskParams.class); - when(taskParams.getMigrationVersion()).thenReturn(7); - - securityMigrationExecutor.nodeOperation(mockTask, taskParams, mock(PersistentTaskState.class)); + securityMigrationExecutor.nodeOperation(mockTask, new SecurityMigrationTaskParams(7, true), mock(PersistentTaskState.class)); verify(mockTask, times(1)).markAsCompleted(); verify(mockTask, times(0)).markAsFailed(any()); assertEquals(0, updateIndexMigrationVersionActionInvocations); @@ -190,13 +186,11 @@ public int minMappingVersion() { })) ); - AllocatedPersistentTask mockTask = mock(AllocatedPersistentTask.class); - assertThrows( IllegalStateException.class, () -> securityMigrationExecutor.nodeOperation( mockTask, - mock(SecurityMigrationTaskParams.class), + new SecurityMigrationTaskParams(0, true), mock(PersistentTaskState.class) ) ); @@ -212,8 +206,7 @@ public void testUpdateMigrationVersionThrowsException() { new TreeMap<>(Map.of(1, generateMigration(migrateInvocations, true), 2, generateMigration(migrateInvocations, true))) ); clientShouldThrowException = true; - AllocatedPersistentTask mockTask = mock(AllocatedPersistentTask.class); - securityMigrationExecutor.nodeOperation(mockTask, mock(SecurityMigrationTaskParams.class), mock(PersistentTaskState.class)); + securityMigrationExecutor.nodeOperation(mockTask, new SecurityMigrationTaskParams(0, true), mock(PersistentTaskState.class)); verify(mockTask, times(1)).markAsFailed(any()); verify(mockTask, times(0)).markAsCompleted(); } From 41141fc31e4e243fba65640b44811d2cc4932c93 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Thu, 27 Jun 2024 14:12:29 +0200 Subject: [PATCH 009/216] AwaitsFix: https://github.com/elastic/elasticsearch/issues/110225 --- muted-tests.yml | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index d5e603bbed2f0..ebf59855d93e1 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -10,7 +10,8 @@ tests: method: "testGuessIsDayFirstFromLocale" - class: "org.elasticsearch.test.rest.ClientYamlTestSuiteIT" issue: "https://github.com/elastic/elasticsearch/issues/108857" - method: "test {yaml=search/180_locale_dependent_mapping/Test Index and Search locale dependent mappings / dates}" + method: "test {yaml=search/180_locale_dependent_mapping/Test Index and Search locale\ + \ dependent mappings / dates}" - class: "org.elasticsearch.upgrades.SearchStatesIT" issue: "https://github.com/elastic/elasticsearch/issues/108991" method: "testCanMatch" @@ -19,7 +20,8 @@ tests: method: "testTrainedModelInference" - class: "org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT" issue: "https://github.com/elastic/elasticsearch/issues/109188" - method: "test {yaml=search/180_locale_dependent_mapping/Test Index and Search locale dependent mappings / dates}" + method: "test {yaml=search/180_locale_dependent_mapping/Test Index and Search locale\ + \ dependent mappings / dates}" - class: "org.elasticsearch.xpack.esql.qa.mixed.EsqlClientYamlIT" issue: "https://github.com/elastic/elasticsearch/issues/109189" method: "test {p0=esql/70_locale/Date format with Italian locale}" @@ -34,7 +36,8 @@ tests: method: "testTimestampFieldTypeExposedByAllIndicesServices" - class: "org.elasticsearch.analysis.common.CommonAnalysisClientYamlTestSuiteIT" issue: "https://github.com/elastic/elasticsearch/issues/109318" - method: "test {yaml=analysis-common/50_char_filters/pattern_replace error handling (too complex pattern)}" + method: "test {yaml=analysis-common/50_char_filters/pattern_replace error handling\ + \ (too complex pattern)}" - class: "org.elasticsearch.xpack.ml.integration.ClassificationHousePricingIT" issue: "https://github.com/elastic/elasticsearch/issues/101598" method: "testFeatureImportanceValues" @@ -80,6 +83,8 @@ tests: - class: org.elasticsearch.synonyms.SynonymsManagementAPIServiceIT method: testUpdateRuleWithMaxSynonyms issue: https://github.com/elastic/elasticsearch/issues/110212 +- class: "org.elasticsearch.rest.RestControllerIT" + issue: "https://github.com/elastic/elasticsearch/issues/110225" # Examples: # From ca2ea690a60d48af83ee56e61e594e4ddd354d21 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Fern=C3=A1ndez=20Casta=C3=B1o?= Date: Thu, 27 Jun 2024 14:26:17 +0200 Subject: [PATCH 010/216] Update checkpoints after post-replication actions, even on failure (#109908) A failed post write refresh should not prevent advancing the local checkpoint if the translog operations have been fsynced correctly, hence we should update the checkpoints in all situations. On the other hand, if the fsync failed the local checkpoint won't advance anyway and the engine will fail during the next indexing operation. Closes #108190 --- docs/changelog/109908.yaml | 5 + .../bulk/BulkAfterWriteFsyncFailureIT.java | 119 ++++++++++++++++++ .../replication/ReplicationOperation.java | 5 +- 3 files changed, 128 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/109908.yaml create mode 100644 server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkAfterWriteFsyncFailureIT.java diff --git a/docs/changelog/109908.yaml b/docs/changelog/109908.yaml new file mode 100644 index 0000000000000..cdf2acf17096c --- /dev/null +++ b/docs/changelog/109908.yaml @@ -0,0 +1,5 @@ +pr: 109908 +summary: "Update checkpoints after post-replication actions, even on failure" +area: CRUD +type: bug +issues: [] diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkAfterWriteFsyncFailureIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkAfterWriteFsyncFailureIT.java new file mode 100644 index 0000000000000..5adc0b090ed37 --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkAfterWriteFsyncFailureIT.java @@ -0,0 +1,119 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.bulk; + +import org.apache.lucene.tests.mockfile.FilterFileChannel; +import org.apache.lucene.tests.mockfile.FilterFileSystemProvider; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.PathUtils; +import org.elasticsearch.core.PathUtilsForTesting; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.nio.channels.FileChannel; +import java.nio.file.FileSystem; +import java.nio.file.OpenOption; +import java.nio.file.Path; +import java.nio.file.attribute.FileAttribute; +import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.elasticsearch.index.IndexSettings.INDEX_REFRESH_INTERVAL_SETTING; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.is; + +public class BulkAfterWriteFsyncFailureIT extends ESSingleNodeTestCase { + private static FSyncFailureFileSystemProvider fsyncFailureFileSystemProvider; + + @BeforeClass + public static void installDisruptFSyncFS() { + FileSystem current = PathUtils.getDefaultFileSystem(); + fsyncFailureFileSystemProvider = new FSyncFailureFileSystemProvider(current); + PathUtilsForTesting.installMock(fsyncFailureFileSystemProvider.getFileSystem(null)); + } + + @AfterClass + public static void removeDisruptFSyncFS() { + PathUtilsForTesting.teardown(); + } + + public void testFsyncFailureDoesNotAdvanceLocalCheckpoints() { + String indexName = randomIdentifier(); + client().admin() + .indices() + .prepareCreate(indexName) + .setSettings( + Settings.builder() + .put(INDEX_REFRESH_INTERVAL_SETTING.getKey(), -1) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .build() + ) + .setMapping("key", "type=keyword", "val", "type=long") + .get(); + ensureGreen(indexName); + + var localCheckpointBeforeBulk = getLocalCheckpointForShard(indexName, 0); + fsyncFailureFileSystemProvider.failFSyncOnce(true); + var bulkResponse = client().prepareBulk().add(prepareIndex(indexName).setId("1").setSource("key", "foo", "val", 10)).get(); + assertTrue(bulkResponse.hasFailures()); + var localCheckpointAfterFailedBulk = getLocalCheckpointForShard(indexName, 0); + // fsync for the translog failed, hence the checkpoint doesn't advance + assertThat(localCheckpointBeforeBulk, equalTo(localCheckpointAfterFailedBulk)); + + // Since background refreshes are disabled, the shard is considered green until the next operation is appended into the translog + ensureGreen(indexName); + + // If the after write fsync fails, it'll fail the TranslogWriter but not the Engine, we'll need to try to append a new operation + // into the translog so the exception bubbles up and fails the engine. On the other hand, the TranslogReplicationAction will retry + // this action on AlreadyClosedExceptions, that's why the operation ends up succeeding even after the engine failed. + var bulkResponse2 = client().prepareBulk().add(prepareIndex(indexName).setId("2").setSource("key", "bar", "val", 20)).get(); + assertFalse(bulkResponse2.hasFailures()); + + var localCheckpointAfterSuccessfulBulk = getLocalCheckpointForShard(indexName, 0); + assertThat(localCheckpointAfterSuccessfulBulk, is(greaterThan(localCheckpointAfterFailedBulk))); + } + + long getLocalCheckpointForShard(String index, int shardId) { + var indicesService = getInstanceFromNode(IndicesService.class); + var indexShard = indicesService.indexServiceSafe(resolveIndex(index)).getShard(shardId); + return indexShard.getLocalCheckpoint(); + } + + public static class FSyncFailureFileSystemProvider extends FilterFileSystemProvider { + private final AtomicBoolean failFSyncs = new AtomicBoolean(); + + public FSyncFailureFileSystemProvider(FileSystem delegate) { + super("fsyncfailure://", delegate); + } + + public void failFSyncOnce(boolean shouldFail) { + failFSyncs.set(shouldFail); + } + + @Override + public FileChannel newFileChannel(Path path, Set options, FileAttribute... attrs) throws IOException { + return new FilterFileChannel(super.newFileChannel(path, options, attrs)) { + + @Override + public void force(boolean metaData) throws IOException { + if (failFSyncs.compareAndSet(true, false)) { + throw new IOException("simulated"); + } + super.force(metaData); + } + }; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java index 04ba462523f5f..b38a067e8b316 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java @@ -189,7 +189,10 @@ public void onFailure(Exception e) { logger.trace("[{}] op [{}] post replication actions failed for [{}]", primary.routingEntry().shardId(), opType, request); // TODO: fail shard? This will otherwise have the local / global checkpoint info lagging, or possibly have replicas // go out of sync with the primary - finishAsFailed(e); + // We update the checkpoints since a refresh might fail but the operations could be safely persisted, in the case that the + // fsync failed the local checkpoint won't advance and the engine will be marked as failed when the next indexing operation + // is appended into the translog. + updateCheckPoints(primary.routingEntry(), primary::localCheckpoint, primary::globalCheckpoint, () -> finishAsFailed(e)); } }); } From 280fd2c68e811178893b7c5765fdfe92efad15c8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20Fred=C3=A9n?= <109296772+jfreden@users.noreply.github.com> Date: Thu, 27 Jun 2024 14:48:52 +0200 Subject: [PATCH 011/216] AwaitsFix: https://github.com/elastic/elasticsearch/issues/110227 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index ebf59855d93e1..6b28a297b546c 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -85,6 +85,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/110212 - class: "org.elasticsearch.rest.RestControllerIT" issue: "https://github.com/elastic/elasticsearch/issues/110225" +- class: "org.elasticsearch.xpack.security.authz.store.NativePrivilegeStoreCacheTests" + issue: "https://github.com/elastic/elasticsearch/issues/110227" + method: "testGetPrivilegesUsesCache" # Examples: # From 446d04e75cd52d7a5c974e695ebfc2e2ee3e7611 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Thu, 27 Jun 2024 15:01:30 +0200 Subject: [PATCH 012/216] [Inference API] Make error messages consistent in InferenceAction (#110220) --- .../xpack/core/inference/action/InferenceAction.java | 7 +++++-- .../core/inference/action/InferenceActionRequestTests.java | 4 ++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java index 16d0b940d40e6..229285510249c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java @@ -167,14 +167,16 @@ public TimeValue getInferenceTimeout() { public ActionRequestValidationException validate() { if (input == null) { var e = new ActionRequestValidationException(); - e.addValidationError("missing input"); + e.addValidationError("Field [input] cannot be null"); return e; } + if (input.isEmpty()) { var e = new ActionRequestValidationException(); - e.addValidationError("input array is empty"); + e.addValidationError("Field [input] cannot be an empty array"); return e; } + if (taskType.equals(TaskType.RERANK)) { if (query == null) { var e = new ActionRequestValidationException(); @@ -187,6 +189,7 @@ public ActionRequestValidationException validate() { return e; } } + return null; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java index fa7044ffd8c8b..d4d4146c6a5ba 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java @@ -112,7 +112,7 @@ public void testValidation_TextEmbedding_Null() { ); ActionRequestValidationException inputNullError = inputNullRequest.validate(); assertNotNull(inputNullError); - assertThat(inputNullError.getMessage(), is("Validation Failed: 1: missing input;")); + assertThat(inputNullError.getMessage(), is("Validation Failed: 1: Field [input] cannot be null;")); } public void testValidation_TextEmbedding_Empty() { @@ -127,7 +127,7 @@ public void testValidation_TextEmbedding_Empty() { ); ActionRequestValidationException inputEmptyError = inputEmptyRequest.validate(); assertNotNull(inputEmptyError); - assertThat(inputEmptyError.getMessage(), is("Validation Failed: 1: input array is empty;")); + assertThat(inputEmptyError.getMessage(), is("Validation Failed: 1: Field [input] cannot be an empty array;")); } public void testValidation_Rerank_Null() { From fa23fbb4146834ae23b740d639d9d75ecd28b539 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Thu, 27 Jun 2024 09:08:37 -0400 Subject: [PATCH 013/216] Fixing bug with allocations (#110204) Co-authored-by: Elastic Machine --- ...RestStartTrainedModelDeploymentAction.java | 85 +----- ...tartTrainedModelDeploymentActionTests.java | 277 ++++++++++++++---- 2 files changed, 236 insertions(+), 126 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestStartTrainedModelDeploymentAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestStartTrainedModelDeploymentAction.java index 40cf7d531d5ee..1a9fc6ce99823 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestStartTrainedModelDeploymentAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestStartTrainedModelDeploymentAction.java @@ -7,15 +7,11 @@ package org.elasticsearch.xpack.ml.rest.inference; -import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.core.Nullable; import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; @@ -27,7 +23,6 @@ import java.io.IOException; import java.util.Collections; import java.util.List; -import java.util.Objects; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction.Request.CACHE_SIZE; @@ -87,22 +82,11 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient } if (restRequest.hasParam(TIMEOUT.getPreferredName())) { - TimeValue openTimeout = validateParameters( - request.getTimeout(), - restRequest.paramAsTime(TIMEOUT.getPreferredName(), StartTrainedModelDeploymentAction.DEFAULT_TIMEOUT), - StartTrainedModelDeploymentAction.DEFAULT_TIMEOUT - ); // hasParam, so never default - request.setTimeout(openTimeout); + request.setTimeout(restRequest.paramAsTime(TIMEOUT.getPreferredName(), request.getTimeout())); } request.setWaitForState( - validateParameters( - request.getWaitForState(), - AllocationStatus.State.fromString( - restRequest.param(WAIT_FOR.getPreferredName(), StartTrainedModelDeploymentAction.DEFAULT_WAITFOR_STATE.toString()) - ), - StartTrainedModelDeploymentAction.DEFAULT_WAITFOR_STATE - ) + AllocationStatus.State.fromString(restRequest.param(WAIT_FOR.getPreferredName(), request.getWaitForState().toString())) ); RestCompatibilityChecker.checkAndSetDeprecatedParam( @@ -110,84 +94,33 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient NUMBER_OF_ALLOCATIONS.getPreferredName(), RestApiVersion.V_8, restRequest, - (r, s) -> validateParameters( - request.getNumberOfAllocations(), - r.paramAsInt(s, StartTrainedModelDeploymentAction.DEFAULT_NUM_ALLOCATIONS), - StartTrainedModelDeploymentAction.DEFAULT_NUM_ALLOCATIONS - ), + (r, s) -> r.paramAsInt(s, request.getNumberOfAllocations()), request::setNumberOfAllocations ); + RestCompatibilityChecker.checkAndSetDeprecatedParam( THREADS_PER_ALLOCATION.getDeprecatedNames()[0], THREADS_PER_ALLOCATION.getPreferredName(), RestApiVersion.V_8, restRequest, - (r, s) -> validateParameters( - request.getThreadsPerAllocation(), - r.paramAsInt(s, StartTrainedModelDeploymentAction.DEFAULT_NUM_THREADS), - StartTrainedModelDeploymentAction.DEFAULT_NUM_THREADS - ), + (r, s) -> r.paramAsInt(s, request.getThreadsPerAllocation()), request::setThreadsPerAllocation ); - request.setQueueCapacity( - validateParameters( - request.getQueueCapacity(), - restRequest.paramAsInt(QUEUE_CAPACITY.getPreferredName(), StartTrainedModelDeploymentAction.DEFAULT_QUEUE_CAPACITY), - StartTrainedModelDeploymentAction.DEFAULT_QUEUE_CAPACITY - ) - ); + + request.setQueueCapacity(restRequest.paramAsInt(QUEUE_CAPACITY.getPreferredName(), request.getQueueCapacity())); if (restRequest.hasParam(CACHE_SIZE.getPreferredName())) { request.setCacheSize( - validateParameters( - request.getCacheSize(), - ByteSizeValue.parseBytesSizeValue(restRequest.param(CACHE_SIZE.getPreferredName()), CACHE_SIZE.getPreferredName()), - null - ) + ByteSizeValue.parseBytesSizeValue(restRequest.param(CACHE_SIZE.getPreferredName()), CACHE_SIZE.getPreferredName()) ); } else if (defaultCacheSize != null && request.getCacheSize() == null) { request.setCacheSize(defaultCacheSize); } request.setPriority( - validateParameters( - request.getPriority().toString(), - restRequest.param(StartTrainedModelDeploymentAction.TaskParams.PRIORITY.getPreferredName()), - StartTrainedModelDeploymentAction.DEFAULT_PRIORITY.toString() - ) + restRequest.param(StartTrainedModelDeploymentAction.TaskParams.PRIORITY.getPreferredName(), request.getPriority().toString()) ); return channel -> client.execute(StartTrainedModelDeploymentAction.INSTANCE, request, new RestToXContentListener<>(channel)); } - - /** - * This function validates that the body and query parameters don't conflict, and returns the value that should be used. - * When using this function, the body parameter should already have been set to the default value in - * {@link StartTrainedModelDeploymentAction}, or, set to a different value from the rest request. - * - * @param paramDefault (from {@link StartTrainedModelDeploymentAction}) - * @return the parameter to use - * @throws ElasticsearchStatusException if the parameters don't match - */ - private static T validateParameters(@Nullable T bodyParam, @Nullable T queryParam, @Nullable T paramDefault) - throws ElasticsearchStatusException { - if (Objects.equals(bodyParam, paramDefault) && queryParam != null) { - // the body param is the same as the default for this value. We cannot tell if this was set intentionally, or if it was just the - // default, thus we will assume it was the default - return queryParam; - } - - if (Objects.equals(bodyParam, queryParam)) { - return bodyParam; - } else if (bodyParam == null) { - return queryParam; - } else if (queryParam == null) { - return bodyParam; - } else { - throw new ElasticsearchStatusException( - "The parameter " + bodyParam + " in the body is different from the parameter " + queryParam + " in the query", - RestStatus.BAD_REQUEST - ); - } - } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/rest/inference/RestStartTrainedModelDeploymentActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/rest/inference/RestStartTrainedModelDeploymentActionTests.java index 7c1f499640e64..b6c450c84d596 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/rest/inference/RestStartTrainedModelDeploymentActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/rest/inference/RestStartTrainedModelDeploymentActionTests.java @@ -7,12 +7,18 @@ package org.elasticsearch.xpack.ml.rest.inference; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.apache.lucene.util.SetOnce; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.test.rest.RestActionTestCase; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; @@ -21,81 +27,252 @@ import java.io.IOException; import java.util.HashMap; +import java.util.List; import java.util.Map; +import java.util.function.BiConsumer; +import java.util.function.Function; +import static java.util.stream.Collectors.toList; +import static org.elasticsearch.xpack.core.ml.inference.assignment.AllocationStatus.State.FULLY_ALLOCATED; +import static org.elasticsearch.xpack.core.ml.inference.assignment.AllocationStatus.State.STARTING; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; public class RestStartTrainedModelDeploymentActionTests extends RestActionTestCase { + private final TestCase testCase; - public void testCacheDisabled() { - final boolean disableInferenceProcessCache = true; - controller().registerHandler(new RestStartTrainedModelDeploymentAction(disableInferenceProcessCache)); - SetOnce executeCalled = new SetOnce<>(); - verifyingClient.setExecuteVerifier(((actionType, actionRequest) -> { - assertThat(actionRequest, instanceOf(StartTrainedModelDeploymentAction.Request.class)); - - var request = (StartTrainedModelDeploymentAction.Request) actionRequest; - assertThat(request.getCacheSize(), is(ByteSizeValue.ZERO)); + public RestStartTrainedModelDeploymentActionTests(TestCase testCase) { + this.testCase = testCase; + } - executeCalled.set(true); - return createResponse(); - })); + @ParametersFactory(shuffle = false) + public static Iterable parameters() throws Exception { + List testCases = List.of( + // parsing from body only + TestCase.of( + "Parses body timeout field", + false, + (description, request) -> assertThat(description, request.getTimeout(), is(TimeValue.timeValueSeconds(4))), + Map.of(), + XContentFactory.jsonBuilder().startObject().field("timeout", "4s").endObject() + ), + TestCase.of( + "Parses body wait_for state field", + false, + (description, request) -> assertThat(description, request.getWaitForState(), is(FULLY_ALLOCATED)), + Map.of(), + XContentFactory.jsonBuilder().startObject().field("wait_for", FULLY_ALLOCATED.toString()).endObject() + ), + TestCase.of( + "Parses body number_of_allocations field", + false, + (description, request) -> assertThat(description, request.getNumberOfAllocations(), is(2)), + Map.of(), + XContentFactory.jsonBuilder().startObject().field("number_of_allocations", "2").endObject() + ), + TestCase.of( + "Parses body threads_per_allocation field", + false, + (description, request) -> assertThat(description, request.getThreadsPerAllocation(), is(2)), + Map.of(), + XContentFactory.jsonBuilder().startObject().field("threads_per_allocation", "2").endObject() + ), + TestCase.of( + "Parses body queue_capacity field", + false, + (description, request) -> assertThat(description, request.getQueueCapacity(), is(2)), + Map.of(), + XContentFactory.jsonBuilder().startObject().field("queue_capacity", "2").endObject() + ), + TestCase.of( + "Parses body cache_size field", + false, + (description, request) -> assertThat(description, request.getCacheSize(), is(ByteSizeValue.ofMb(2))), + Map.of(), + XContentFactory.jsonBuilder().startObject().field("cache_size", "2mb").endObject() + ), + // parsing from query params only + TestCase.of( + "Parses query param timeout field", + false, + (description, request) -> assertThat(description, request.getTimeout(), is(TimeValue.timeValueSeconds(4))), + Map.of("timeout", "4s") + ), + TestCase.of( + "Parses query param wait_for state field", + false, + (description, request) -> assertThat(description, request.getWaitForState(), is(FULLY_ALLOCATED)), + Map.of("wait_for", FULLY_ALLOCATED.toString()) + ), + TestCase.of( + "Parses query param number_of_allocations field", + false, + (description, request) -> assertThat(description, request.getNumberOfAllocations(), is(2)), + Map.of("number_of_allocations", "2") + ), + TestCase.of( + "Parses query param threads_per_allocation field", + false, + (description, request) -> assertThat(description, request.getThreadsPerAllocation(), is(2)), + Map.of("threads_per_allocation", "2") + ), + TestCase.of( + "Parses query param queue_capacity field", + false, + (description, request) -> assertThat(description, request.getQueueCapacity(), is(2)), + Map.of("queue_capacity", "2") + ), + TestCase.of( + "Parses query param cache_size field", + false, + (description, request) -> assertThat(description, request.getCacheSize(), is(ByteSizeValue.ofMb(2))), + Map.of("cache_size", "2mb") + ), + // query params override body + TestCase.of( + "Query param overrides body timeout field", + false, + (description, request) -> assertThat(description, request.getTimeout(), is(TimeValue.timeValueSeconds(4))), + Map.of("timeout", "4s"), + XContentFactory.jsonBuilder().startObject().field("timeout", "2s").endObject() + ), + TestCase.of( + "Query param overrides body wait_for state field", + false, + (description, request) -> assertThat(description, request.getWaitForState(), is(STARTING)), + Map.of("wait_for", STARTING.toString()), + XContentFactory.jsonBuilder().startObject().field("wait_for", FULLY_ALLOCATED.toString()).endObject() + ), + TestCase.of( + "Query param overrides body number_of_allocations field", + false, + (description, request) -> assertThat(description, request.getNumberOfAllocations(), is(5)), + Map.of("number_of_allocations", "5"), + XContentFactory.jsonBuilder().startObject().field("number_of_allocations", "2").endObject() + ), + TestCase.of( + "Query param overrides body threads_per_allocation field", + false, + (description, request) -> assertThat(description, request.getThreadsPerAllocation(), is(3)), + Map.of("threads_per_allocation", "3"), + XContentFactory.jsonBuilder().startObject().field("threads_per_allocation", "2").endObject() + ), + TestCase.of( + "Query param overrides body queue_capacity field", + false, + (description, request) -> assertThat(description, request.getQueueCapacity(), is(2)), + Map.of("queue_capacity", "2"), + XContentFactory.jsonBuilder().startObject().field("queue_capacity", "1").endObject() + ), + TestCase.of( + "Query param overrides body cache_size field", + false, + (description, request) -> assertThat(description, request.getCacheSize(), is(ByteSizeValue.ofMb(3))), + Map.of("cache_size", "3mb"), + XContentFactory.jsonBuilder().startObject().field("cache_size", "2mb").endObject() + ), + // cache size tests + TestCase.of( + "Disables cache_size", + true, + (description, request) -> assertThat(description, request.getCacheSize(), is(ByteSizeValue.ZERO)), + Map.of() + ), + TestCase.of( + "Sets cache_size to null", + false, + (description, request) -> assertNull(description, request.getCacheSize()), + Map.of() + ) + ); - RestRequest inferenceRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST) - .withPath("_ml/trained_models/test_id/deployment/_start") - .build(); - dispatchRequest(inferenceRequest); - assertThat(executeCalled.get(), equalTo(true)); + return testCases.stream().map(TestCase::toArray).collect(toList()); } - public void testCacheEnabled() { - final boolean disableInferenceProcessCache = false; - controller().registerHandler(new RestStartTrainedModelDeploymentAction(disableInferenceProcessCache)); + /** + * This test is run for each of the supplied {@link TestCase} configurations. + * @throws IOException _ + */ + public void test() throws IOException { + controller().registerHandler(testCase.action); SetOnce executeCalled = new SetOnce<>(); verifyingClient.setExecuteVerifier(((actionType, actionRequest) -> { assertThat(actionRequest, instanceOf(StartTrainedModelDeploymentAction.Request.class)); var request = (StartTrainedModelDeploymentAction.Request) actionRequest; - assertNull(request.getCacheSize()); - - executeCalled.set(true); - return createResponse(); - })); + testCase.verifyingAssertFunc.accept(testCase.testDescription, request); - RestRequest inferenceRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST) - .withPath("_ml/trained_models/test_id/deployment/_start") - .build(); - dispatchRequest(inferenceRequest); - assertThat(executeCalled.get(), equalTo(true)); - } - - public void testExceptionFromDifferentParamsInQueryAndBody() throws IOException { - SetOnce executeCalled = new SetOnce<>(); - controller().registerHandler(new RestStartTrainedModelDeploymentAction(false)); - verifyingClient.setExecuteVerifier(((actionType, actionRequest) -> { - assertThat(actionRequest, instanceOf(StartTrainedModelDeploymentAction.Request.class)); executeCalled.set(true); return createResponse(); })); - Map paramsMap = new HashMap<>(1); - paramsMap.put("cache_size", "1mb"); - RestRequest inferenceRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST) - .withPath("_ml/trained_models/test_id/deployment/_start") - .withParams(paramsMap) - .withContent( - BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("cache_size", "2mb").endObject()), - XContentType.JSON - ) - .build(); - dispatchRequest(inferenceRequest); - assertThat(executeCalled.get(), equalTo(null)); // the duplicate parameter should cause an exception, but the exception isn't - // visible here, so we just check that the request failed + dispatchRequest(testCase.buildRequestFunc.apply(xContentRegistry())); + assertThat(testCase.testDescription, executeCalled.get(), equalTo(true)); } private static CreateTrainedModelAssignmentAction.Response createResponse() { return new CreateTrainedModelAssignmentAction.Response(TrainedModelAssignmentTests.randomInstance()); } + + /** + * A single test case + * @param testDescription description of the test + * @param action the rest action specifying whether the cache should be disabled + * @param verifyingAssertFunc an assertion function that will be called after the + * {@link RestStartTrainedModelDeploymentAction#prepareRequest} method is called + * @param buildRequestFunc a function for constructing a fake request + */ + public record TestCase( + String testDescription, + RestStartTrainedModelDeploymentAction action, + BiConsumer verifyingAssertFunc, + Function buildRequestFunc + ) { + private static TestCase of( + String testDescription, + boolean shouldDisableCache, + BiConsumer verifyingAssertFunc, + Map queryParams, + @Nullable XContentBuilder builder + ) { + return new TestCase( + testDescription, + new RestStartTrainedModelDeploymentAction(shouldDisableCache), + verifyingAssertFunc, + buildRequest(queryParams, builder) + ); + } + + private static TestCase of( + String testDescription, + boolean shouldDisableCache, + BiConsumer verifyingAssertFunc, + Map queryParams + ) { + return of(testDescription, shouldDisableCache, verifyingAssertFunc, queryParams, null); + } + + private static Function buildRequest(Map queryParams, XContentBuilder builder) { + Map params = new HashMap<>(Map.of("model_id", "model", "deployment_id", "dep")); + params.putAll(queryParams); + + return (registry) -> { + var requestBuilder = new FakeRestRequest.Builder(registry).withMethod(RestRequest.Method.POST) + .withPath("_ml/trained_models/test_id/deployment/_start") + .withParams(params); + + if (builder != null) { + requestBuilder = requestBuilder.withContent(BytesReference.bytes(builder), XContentType.JSON); + } + + return requestBuilder.build(); + }; + } + + Object[] toArray() { + return new Object[] { this }; + } + } + } From 30dd002c5ae09ce2c5fed3e835f4abbd3a3a6c8f Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Thu, 27 Jun 2024 08:20:43 -0500 Subject: [PATCH 014/216] Refactoring TransportSimulateBulkAction to not extend TransportBulkAction (#109889) Co-authored-by: henningandersen --- .../bulk/TransportAbstractBulkAction.java | 346 ++++++++++++++++++ .../action/bulk/TransportBulkAction.java | 270 ++------------ .../bulk/TransportSimulateBulkAction.java | 58 ++- .../TransportAbstractBulkActionTests.java | 37 ++ .../bulk/TransportBulkActionIngestTests.java | 6 +- .../action/bulk/TransportBulkActionTests.java | 34 +- .../TransportSimulateBulkActionTests.java | 43 +-- 7 files changed, 448 insertions(+), 346 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/action/bulk/TransportAbstractBulkAction.java create mode 100644 server/src/test/java/org/elasticsearch/action/bulk/TransportAbstractBulkActionTests.java diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportAbstractBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportAbstractBulkAction.java new file mode 100644 index 0000000000000..ff306cfb08745 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportAbstractBulkAction.java @@ -0,0 +1,346 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.bulk; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRunnable; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.ingest.IngestActionForwarder; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateObserver; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.Assertions; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexingPressure; +import org.elasticsearch.indices.SystemIndices; +import org.elasticsearch.ingest.IngestService; +import org.elasticsearch.node.NodeClosedException; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.util.Objects; +import java.util.concurrent.Executor; +import java.util.concurrent.TimeUnit; +import java.util.function.LongSupplier; + +/** + * This is an abstract base class for bulk actions. It traverses all indices that the request gets routed to, executes all applicable + * pipelines, and then delegates to the concrete implementation of #doInternalExecute to actually index the data. + */ +public abstract class TransportAbstractBulkAction extends HandledTransportAction { + private static final Logger logger = LogManager.getLogger(TransportAbstractBulkAction.class); + + protected final ThreadPool threadPool; + protected final ClusterService clusterService; + protected final IndexingPressure indexingPressure; + protected final SystemIndices systemIndices; + private final IngestService ingestService; + private final IngestActionForwarder ingestForwarder; + protected final LongSupplier relativeTimeProvider; + protected final Executor writeExecutor; + protected final Executor systemWriteExecutor; + private final ActionType bulkAction; + + public TransportAbstractBulkAction( + ActionType action, + TransportService transportService, + ActionFilters actionFilters, + Writeable.Reader requestReader, + ThreadPool threadPool, + ClusterService clusterService, + IngestService ingestService, + IndexingPressure indexingPressure, + SystemIndices systemIndices, + LongSupplier relativeTimeProvider + ) { + super(action.name(), transportService, actionFilters, requestReader, EsExecutors.DIRECT_EXECUTOR_SERVICE); + this.threadPool = threadPool; + this.clusterService = clusterService; + this.ingestService = ingestService; + this.indexingPressure = indexingPressure; + this.systemIndices = systemIndices; + this.writeExecutor = threadPool.executor(ThreadPool.Names.WRITE); + this.systemWriteExecutor = threadPool.executor(ThreadPool.Names.SYSTEM_WRITE); + this.ingestForwarder = new IngestActionForwarder(transportService); + clusterService.addStateApplier(this.ingestForwarder); + this.relativeTimeProvider = relativeTimeProvider; + this.bulkAction = action; + } + + @Override + protected void doExecute(Task task, BulkRequest bulkRequest, ActionListener listener) { + /* + * This is called on the Transport thread so we can check the indexing + * memory pressure *quickly* but we don't want to keep the transport + * thread busy. Then, as soon as we have the indexing pressure in we fork + * to one of the write thread pools. We do this because juggling the + * bulk request can get expensive for a few reasons: + * 1. Figuring out which shard should receive a bulk request might require + * parsing the _source. + * 2. When dispatching the sub-requests to shards we may have to compress + * them. LZ4 is super fast, but slow enough that it's best not to do it + * on the transport thread, especially for large sub-requests. + * + * We *could* detect these cases and only fork in then, but that is complex + * to get right and the fork is fairly low overhead. + */ + final int indexingOps = bulkRequest.numberOfActions(); + final long indexingBytes = bulkRequest.ramBytesUsed(); + final boolean isOnlySystem = TransportBulkAction.isOnlySystem( + bulkRequest, + clusterService.state().metadata().getIndicesLookup(), + systemIndices + ); + final Releasable releasable = indexingPressure.markCoordinatingOperationStarted(indexingOps, indexingBytes, isOnlySystem); + final ActionListener releasingListener = ActionListener.runBefore(listener, releasable::close); + final Executor executor = isOnlySystem ? systemWriteExecutor : writeExecutor; + ensureClusterStateThenForkAndExecute(task, bulkRequest, executor, releasingListener); + } + + private void ensureClusterStateThenForkAndExecute( + Task task, + BulkRequest bulkRequest, + Executor executor, + ActionListener releasingListener + ) { + final ClusterState initialState = clusterService.state(); + final ClusterBlockException blockException = initialState.blocks().globalBlockedException(ClusterBlockLevel.WRITE); + if (blockException != null) { + if (false == blockException.retryable()) { + releasingListener.onFailure(blockException); + return; + } + logger.trace("cluster is blocked, waiting for it to recover", blockException); + final ClusterStateObserver clusterStateObserver = new ClusterStateObserver( + initialState, + clusterService, + bulkRequest.timeout(), + logger, + threadPool.getThreadContext() + ); + clusterStateObserver.waitForNextChange(new ClusterStateObserver.Listener() { + @Override + public void onNewClusterState(ClusterState state) { + forkAndExecute(task, bulkRequest, executor, releasingListener); + } + + @Override + public void onClusterServiceClose() { + releasingListener.onFailure(new NodeClosedException(clusterService.localNode())); + } + + @Override + public void onTimeout(TimeValue timeout) { + releasingListener.onFailure(blockException); + } + }, newState -> false == newState.blocks().hasGlobalBlockWithLevel(ClusterBlockLevel.WRITE)); + } else { + forkAndExecute(task, bulkRequest, executor, releasingListener); + } + } + + private void forkAndExecute(Task task, BulkRequest bulkRequest, Executor executor, ActionListener releasingListener) { + executor.execute(new ActionRunnable<>(releasingListener) { + @Override + protected void doRun() { + applyPipelinesAndDoInternalExecute(task, bulkRequest, executor, releasingListener); + } + }); + } + + private boolean applyPipelines(Task task, BulkRequest bulkRequest, Executor executor, ActionListener listener) { + boolean hasIndexRequestsWithPipelines = false; + final Metadata metadata = clusterService.state().getMetadata(); + for (DocWriteRequest actionRequest : bulkRequest.requests) { + IndexRequest indexRequest = getIndexWriteRequest(actionRequest); + if (indexRequest != null) { + IngestService.resolvePipelinesAndUpdateIndexRequest(actionRequest, indexRequest, metadata); + hasIndexRequestsWithPipelines |= IngestService.hasPipeline(indexRequest); + } + + if (actionRequest instanceof IndexRequest ir) { + if (ir.getAutoGeneratedTimestamp() != IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP) { + throw new IllegalArgumentException("autoGeneratedTimestamp should not be set externally"); + } + } + } + + if (hasIndexRequestsWithPipelines) { + // this method (doExecute) will be called again, but with the bulk requests updated from the ingest node processing but + // also with IngestService.NOOP_PIPELINE_NAME on each request. This ensures that this on the second time through this method, + // this path is never taken. + ActionListener.run(listener, l -> { + if (Assertions.ENABLED) { + final boolean arePipelinesResolved = bulkRequest.requests() + .stream() + .map(TransportBulkAction::getIndexWriteRequest) + .filter(Objects::nonNull) + .allMatch(IndexRequest::isPipelineResolved); + assert arePipelinesResolved : bulkRequest; + } + if (clusterService.localNode().isIngestNode()) { + processBulkIndexIngestRequest(task, bulkRequest, executor, metadata, l); + } else { + ingestForwarder.forwardIngestRequest(bulkAction, bulkRequest, l); + } + }); + return true; + } + return false; + } + + private void processBulkIndexIngestRequest( + Task task, + BulkRequest original, + Executor executor, + Metadata metadata, + ActionListener listener + ) { + final long ingestStartTimeInNanos = System.nanoTime(); + final BulkRequestModifier bulkRequestModifier = new BulkRequestModifier(original); + getIngestService(original).executeBulkRequest( + original.numberOfActions(), + () -> bulkRequestModifier, + bulkRequestModifier::markItemAsDropped, + (indexName) -> shouldStoreFailure(indexName, metadata, threadPool.absoluteTimeInMillis()), + bulkRequestModifier::markItemForFailureStore, + bulkRequestModifier::markItemAsFailed, + (originalThread, exception) -> { + if (exception != null) { + logger.debug("failed to execute pipeline for a bulk request", exception); + listener.onFailure(exception); + } else { + long ingestTookInMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - ingestStartTimeInNanos); + BulkRequest bulkRequest = bulkRequestModifier.getBulkRequest(); + ActionListener actionListener = bulkRequestModifier.wrapActionListenerIfNeeded( + ingestTookInMillis, + listener + ); + if (bulkRequest.requests().isEmpty()) { + // at this stage, the transport bulk action can't deal with a bulk request with no requests, + // so we stop and send an empty response back to the client. + // (this will happen if pre-processing all items in the bulk failed) + actionListener.onResponse(new BulkResponse(new BulkItemResponse[0], 0)); + } else { + ActionRunnable runnable = new ActionRunnable<>(actionListener) { + @Override + protected void doRun() { + applyPipelinesAndDoInternalExecute(task, bulkRequest, executor, actionListener); + } + + @Override + public boolean isForceExecution() { + // If we fork back to a write thread we **not** should fail, because tp queue is full. + // (Otherwise the work done during ingest will be lost) + // It is okay to force execution here. Throttling of write requests happens prior to + // ingest when a node receives a bulk request. + return true; + } + }; + // If a processor went async and returned a response on a different thread then + // before we continue the bulk request we should fork back on a write thread: + if (originalThread == Thread.currentThread()) { + runnable.run(); + } else { + executor.execute(runnable); + } + } + } + }, + executor + ); + } + + /** + * Determines if an index name is associated with either an existing data stream or a template + * for one that has the failure store enabled. + * @param indexName The index name to check. + * @param metadata Cluster state metadata. + * @param epochMillis A timestamp to use when resolving date math in the index name. + * @return true if this is not a simulation, and the given index name corresponds to a data stream with a failure store + * or if it matches a template that has a data stream failure store enabled. + */ + protected abstract boolean shouldStoreFailure(String indexName, Metadata metadata, long epochMillis); + + /** + * Retrieves the {@link IndexRequest} from the provided {@link DocWriteRequest} for index or upsert actions. Upserts are + * modeled as {@link IndexRequest} inside the {@link UpdateRequest}. Ignores {@link org.elasticsearch.action.delete.DeleteRequest}'s + * + * @param docWriteRequest The request to find the {@link IndexRequest} + * @return the found {@link IndexRequest} or {@code null} if one can not be found. + */ + public static IndexRequest getIndexWriteRequest(DocWriteRequest docWriteRequest) { + IndexRequest indexRequest = null; + if (docWriteRequest instanceof IndexRequest) { + indexRequest = (IndexRequest) docWriteRequest; + } else if (docWriteRequest instanceof UpdateRequest updateRequest) { + indexRequest = updateRequest.docAsUpsert() ? updateRequest.doc() : updateRequest.upsertRequest(); + } + return indexRequest; + } + + /* + * This returns the IngestService to be used for the given request. The default implementation ignores the request and always returns + * the same ingestService, but child classes might use information in the request in creating an IngestService specific to that request. + */ + protected IngestService getIngestService(BulkRequest request) { + return ingestService; + } + + protected long relativeTime() { + return relativeTimeProvider.getAsLong(); + } + + protected long buildTookInMillis(long startTimeNanos) { + return TimeUnit.NANOSECONDS.toMillis(relativeTime() - startTimeNanos); + } + + private void applyPipelinesAndDoInternalExecute( + Task task, + BulkRequest bulkRequest, + Executor executor, + ActionListener listener + ) { + final long relativeStartTime = threadPool.relativeTimeInMillis(); + if (applyPipelines(task, bulkRequest, executor, listener) == false) { + doInternalExecute(task, bulkRequest, executor, listener, relativeStartTime); + } + } + + /** + * This method creates any missing resources and actually applies the BulkRequest to the relevant indices + * @param task The task in which this work is being done + * @param bulkRequest The BulkRequest of changes to make to indices + * @param executor The executor for the thread pool in which the work is to be done + * @param listener The listener to be notified of results + * @param relativeStartTimeNanos The relative start time of this bulk load, to be used in computing the time taken for the BulkResponse + */ + protected abstract void doInternalExecute( + Task task, + BulkRequest bulkRequest, + Executor executor, + ActionListener listener, + long relativeStartTimeNanos + ); + +} diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index b14a63362cb9f..02a374044f864 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -24,20 +24,14 @@ import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; import org.elasticsearch.action.admin.indices.rollover.RolloverResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.ingest.IngestActionForwarder; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.RefCountingRunnable; import org.elasticsearch.action.support.WriteResponse; import org.elasticsearch.action.support.replication.ReplicationResponse; -import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateObserver; -import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexAbstraction; @@ -48,10 +42,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.concurrent.AtomicArray; -import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.core.Assertions; -import org.elasticsearch.core.Releasable; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; @@ -59,10 +49,8 @@ import org.elasticsearch.index.VersionType; import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.ingest.IngestService; -import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.transport.TransportService; import java.util.HashMap; @@ -73,7 +61,6 @@ import java.util.Set; import java.util.SortedMap; import java.util.concurrent.Executor; -import java.util.concurrent.TimeUnit; import java.util.function.Function; import java.util.function.LongSupplier; @@ -84,29 +71,18 @@ * Groups bulk request items by shard, optionally creating non-existent indices and * delegates to {@link TransportShardBulkAction} for shard-level bulk execution */ -public class TransportBulkAction extends HandledTransportAction { +public class TransportBulkAction extends TransportAbstractBulkAction { public static final String NAME = "indices:data/write/bulk"; public static final ActionType TYPE = new ActionType<>(NAME); private static final Logger logger = LogManager.getLogger(TransportBulkAction.class); public static final String LAZY_ROLLOVER_ORIGIN = "lazy_rollover"; - private final ActionType bulkAction; - private final ThreadPool threadPool; - private final ClusterService clusterService; - private final IngestService ingestService; private final FeatureService featureService; - private final LongSupplier relativeTimeProvider; - private final IngestActionForwarder ingestForwarder; private final NodeClient client; private final IndexNameExpressionResolver indexNameExpressionResolver; - private final IndexingPressure indexingPressure; - private final SystemIndices systemIndices; private final OriginSettingClient rolloverClient; - private final Executor writeExecutor; - private final Executor systemWriteExecutor; - @Inject public TransportBulkAction( ThreadPool threadPool, @@ -180,40 +156,23 @@ public TransportBulkAction( SystemIndices systemIndices, LongSupplier relativeTimeProvider ) { - super(bulkAction.name(), transportService, actionFilters, requestReader, EsExecutors.DIRECT_EXECUTOR_SERVICE); + super( + bulkAction, + transportService, + actionFilters, + requestReader, + threadPool, + clusterService, + ingestService, + indexingPressure, + systemIndices, + relativeTimeProvider + ); Objects.requireNonNull(relativeTimeProvider); - this.bulkAction = bulkAction; - this.threadPool = threadPool; - this.clusterService = clusterService; - this.ingestService = ingestService; this.featureService = featureService; - this.relativeTimeProvider = relativeTimeProvider; - this.ingestForwarder = new IngestActionForwarder(transportService); this.client = client; this.indexNameExpressionResolver = indexNameExpressionResolver; - this.indexingPressure = indexingPressure; - this.systemIndices = systemIndices; - clusterService.addStateApplier(this.ingestForwarder); this.rolloverClient = new OriginSettingClient(client, LAZY_ROLLOVER_ORIGIN); - this.writeExecutor = threadPool.executor(Names.WRITE); - this.systemWriteExecutor = threadPool.executor(Names.SYSTEM_WRITE); - } - - /** - * Retrieves the {@link IndexRequest} from the provided {@link DocWriteRequest} for index or upsert actions. Upserts are - * modeled as {@link IndexRequest} inside the {@link UpdateRequest}. Ignores {@link org.elasticsearch.action.delete.DeleteRequest}'s - * - * @param docWriteRequest The request to find the {@link IndexRequest} - * @return the found {@link IndexRequest} or {@code null} if one can not be found. - */ - public static IndexRequest getIndexWriteRequest(DocWriteRequest docWriteRequest) { - IndexRequest indexRequest = null; - if (docWriteRequest instanceof IndexRequest) { - indexRequest = (IndexRequest) docWriteRequest; - } else if (docWriteRequest instanceof UpdateRequest updateRequest) { - indexRequest = updateRequest.docAsUpsert() ? updateRequest.doc() : updateRequest.upsertRequest(); - } - return indexRequest; } public static ActionListener unwrappingSingleItemBulkResponse( @@ -233,123 +192,13 @@ public static ActionListe } @Override - protected void doExecute(Task task, BulkRequest bulkRequest, ActionListener listener) { - /* - * This is called on the Transport thread so we can check the indexing - * memory pressure *quickly* but we don't want to keep the transport - * thread busy. Then, as soon as we have the indexing pressure in we fork - * to one of the write thread pools. We do this because juggling the - * bulk request can get expensive for a few reasons: - * 1. Figuring out which shard should receive a bulk request might require - * parsing the _source. - * 2. When dispatching the sub-requests to shards we may have to compress - * them. LZ4 is super fast, but slow enough that it's best not to do it - * on the transport thread, especially for large sub-requests. - * - * We *could* detect these cases and only fork in then, but that is complex - * to get right and the fork is fairly low overhead. - */ - final int indexingOps = bulkRequest.numberOfActions(); - final long indexingBytes = bulkRequest.ramBytesUsed(); - final boolean isOnlySystem = isOnlySystem(bulkRequest, clusterService.state().metadata().getIndicesLookup(), systemIndices); - final Releasable releasable = indexingPressure.markCoordinatingOperationStarted(indexingOps, indexingBytes, isOnlySystem); - final ActionListener releasingListener = ActionListener.runBefore(listener, releasable::close); - final Executor executor = isOnlySystem ? systemWriteExecutor : writeExecutor; - ensureClusterStateThenForkAndExecute(task, bulkRequest, executor, releasingListener); - } - - private void ensureClusterStateThenForkAndExecute( + protected void doInternalExecute( Task task, BulkRequest bulkRequest, Executor executor, - ActionListener releasingListener + ActionListener listener, + long relativeStartTime ) { - final ClusterState initialState = clusterService.state(); - final ClusterBlockException blockException = initialState.blocks().globalBlockedException(ClusterBlockLevel.WRITE); - if (blockException != null) { - if (false == blockException.retryable()) { - releasingListener.onFailure(blockException); - return; - } - logger.trace("cluster is blocked, waiting for it to recover", blockException); - final ClusterStateObserver clusterStateObserver = new ClusterStateObserver( - initialState, - clusterService, - bulkRequest.timeout(), - logger, - threadPool.getThreadContext() - ); - clusterStateObserver.waitForNextChange(new ClusterStateObserver.Listener() { - @Override - public void onNewClusterState(ClusterState state) { - forkAndExecute(task, bulkRequest, executor, releasingListener); - } - - @Override - public void onClusterServiceClose() { - releasingListener.onFailure(new NodeClosedException(clusterService.localNode())); - } - - @Override - public void onTimeout(TimeValue timeout) { - releasingListener.onFailure(blockException); - } - }, newState -> false == newState.blocks().hasGlobalBlockWithLevel(ClusterBlockLevel.WRITE)); - } else { - forkAndExecute(task, bulkRequest, executor, releasingListener); - } - } - - private void forkAndExecute(Task task, BulkRequest bulkRequest, Executor executor, ActionListener releasingListener) { - executor.execute(new ActionRunnable<>(releasingListener) { - @Override - protected void doRun() { - doInternalExecute(task, bulkRequest, executor, releasingListener); - } - }); - } - - protected void doInternalExecute(Task task, BulkRequest bulkRequest, Executor executor, ActionListener listener) { - final long startTime = relativeTime(); - - boolean hasIndexRequestsWithPipelines = false; - final Metadata metadata = clusterService.state().getMetadata(); - for (DocWriteRequest actionRequest : bulkRequest.requests) { - IndexRequest indexRequest = getIndexWriteRequest(actionRequest); - if (indexRequest != null) { - IngestService.resolvePipelinesAndUpdateIndexRequest(actionRequest, indexRequest, metadata); - hasIndexRequestsWithPipelines |= IngestService.hasPipeline(indexRequest); - } - - if (actionRequest instanceof IndexRequest ir) { - if (ir.getAutoGeneratedTimestamp() != IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP) { - throw new IllegalArgumentException("autoGeneratedTimestamp should not be set externally"); - } - } - } - - if (hasIndexRequestsWithPipelines) { - // this method (doExecute) will be called again, but with the bulk requests updated from the ingest node processing but - // also with IngestService.NOOP_PIPELINE_NAME on each request. This ensures that this on the second time through this method, - // this path is never taken. - ActionListener.run(listener, l -> { - if (Assertions.ENABLED) { - final boolean arePipelinesResolved = bulkRequest.requests() - .stream() - .map(TransportBulkAction::getIndexWriteRequest) - .filter(Objects::nonNull) - .allMatch(IndexRequest::isPipelineResolved); - assert arePipelinesResolved : bulkRequest; - } - if (clusterService.localNode().isIngestNode()) { - processBulkIndexIngestRequest(task, bulkRequest, executor, metadata, l); - } else { - ingestForwarder.forwardIngestRequest(bulkAction, bulkRequest, l); - } - }); - return; - } - Map indicesToAutoCreate = new HashMap<>(); Set dataStreamsToBeRolledOver = new HashSet<>(); Set failureStoresToBeRolledOver = new HashSet<>(); @@ -363,7 +212,7 @@ protected void doInternalExecute(Task task, BulkRequest bulkRequest, Executor ex indicesToAutoCreate, dataStreamsToBeRolledOver, failureStoresToBeRolledOver, - startTime + relativeStartTime ); } @@ -568,14 +417,6 @@ private static void failRequestsWhenPrerequisiteActionFailed( } } - /* - * This returns the IngestService to be used for the given request. The default implementation ignores the request and always returns - * the same ingestService, but child classes might use information in the request in creating an IngestService specific to that request. - */ - protected IngestService getIngestService(BulkRequest request) { - return ingestService; - } - static void prohibitAppendWritesInBackingIndices(DocWriteRequest writeRequest, Metadata metadata) { DocWriteRequest.OpType opType = writeRequest.opType(); if ((opType == OpType.CREATE || opType == OpType.INDEX) == false) { @@ -677,10 +518,6 @@ private static boolean setResponseFailureIfIndexMatches( return false; } - protected long buildTookInMillis(long startTimeNanos) { - return TimeUnit.NANOSECONDS.toMillis(relativeTime() - startTimeNanos); - } - void executeBulk( Task task, BulkRequest bulkRequest, @@ -706,72 +543,6 @@ void executeBulk( ).run(); } - private long relativeTime() { - return relativeTimeProvider.getAsLong(); - } - - private void processBulkIndexIngestRequest( - Task task, - BulkRequest original, - Executor executor, - Metadata metadata, - ActionListener listener - ) { - final long ingestStartTimeInNanos = System.nanoTime(); - final BulkRequestModifier bulkRequestModifier = new BulkRequestModifier(original); - getIngestService(original).executeBulkRequest( - original.numberOfActions(), - () -> bulkRequestModifier, - bulkRequestModifier::markItemAsDropped, - (indexName) -> shouldStoreFailure(indexName, metadata, threadPool.absoluteTimeInMillis()), - bulkRequestModifier::markItemForFailureStore, - bulkRequestModifier::markItemAsFailed, - (originalThread, exception) -> { - if (exception != null) { - logger.debug("failed to execute pipeline for a bulk request", exception); - listener.onFailure(exception); - } else { - long ingestTookInMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - ingestStartTimeInNanos); - BulkRequest bulkRequest = bulkRequestModifier.getBulkRequest(); - ActionListener actionListener = bulkRequestModifier.wrapActionListenerIfNeeded( - ingestTookInMillis, - listener - ); - if (bulkRequest.requests().isEmpty()) { - // at this stage, the transport bulk action can't deal with a bulk request with no requests, - // so we stop and send an empty response back to the client. - // (this will happen if pre-processing all items in the bulk failed) - actionListener.onResponse(new BulkResponse(new BulkItemResponse[0], 0)); - } else { - ActionRunnable runnable = new ActionRunnable<>(actionListener) { - @Override - protected void doRun() { - doInternalExecute(task, bulkRequest, executor, actionListener); - } - - @Override - public boolean isForceExecution() { - // If we fork back to a write thread we **not** should fail, because tp queue is full. - // (Otherwise the work done during ingest will be lost) - // It is okay to force execution here. Throttling of write requests happens prior to - // ingest when a node receives a bulk request. - return true; - } - }; - // If a processor went async and returned a response on a different thread then - // before we continue the bulk request we should fork back on a write thread: - if (originalThread == Thread.currentThread()) { - runnable.run(); - } else { - executor.execute(runnable); - } - } - } - }, - executor - ); - } - /** * Determines if an index name is associated with either an existing data stream or a template * for one that has the failure store enabled. @@ -781,13 +552,18 @@ public boolean isForceExecution() { * @return true if the given index name corresponds to a data stream with a failure store, * or if it matches a template that has a data stream failure store enabled. */ - static boolean shouldStoreFailure(String indexName, Metadata metadata, long epochMillis) { + static boolean shouldStoreFailureInternal(String indexName, Metadata metadata, long epochMillis) { return DataStream.isFailureStoreFeatureFlagEnabled() && resolveFailureStoreFromMetadata(indexName, metadata, epochMillis).or( () -> resolveFailureStoreFromTemplate(indexName, metadata) ).orElse(false); } + @Override + protected boolean shouldStoreFailure(String indexName, Metadata metadata, long time) { + return shouldStoreFailureInternal(indexName, metadata, time); + } + /** * Determines if an index name is associated with an existing data stream that has a failure store enabled. * @param indexName The index name to check. diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java index f0f950ca324bf..95c1c0ce05d89 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java @@ -10,16 +10,13 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.ingest.SimulateIndexResponse; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.AtomicArray; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.IndexingPressure; import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.ingest.IngestService; @@ -28,60 +25,53 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import java.util.Map; -import java.util.Set; import java.util.concurrent.Executor; -public class TransportSimulateBulkAction extends TransportBulkAction { +/** + * This action simulates bulk indexing data. Pipelines are executed for all indices that the request routes to, but no data is actually + * indexed and no state is changed. Unlike TransportBulkAction, this does not push the work out to the nodes where the shards live (since + * shards are not actually modified). + */ +public class TransportSimulateBulkAction extends TransportAbstractBulkAction { + @Inject public TransportSimulateBulkAction( ThreadPool threadPool, TransportService transportService, ClusterService clusterService, IngestService ingestService, - FeatureService featureService, - NodeClient client, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, IndexingPressure indexingPressure, SystemIndices systemIndices ) { super( SimulateBulkAction.INSTANCE, + transportService, + actionFilters, SimulateBulkRequest::new, threadPool, - transportService, clusterService, ingestService, - featureService, - client, - actionFilters, - indexNameExpressionResolver, indexingPressure, systemIndices, System::nanoTime ); } - /* - * This overrides indexData in TransportBulkAction in order to _not_ actually create any indices or index any data. Instead, each - * request gets a corresponding CREATE response, using information from the request. - */ @Override - protected void createMissingIndicesAndIndexData( + protected void doInternalExecute( Task task, BulkRequest bulkRequest, Executor executor, ActionListener listener, - Map indicesToAutoCreate, - Set dataStreamsToRollover, - Set failureStoresToBeRolledOver, - long startTime + long relativeStartTime ) { final AtomicArray responses = new AtomicArray<>(bulkRequest.requests.size()); for (int i = 0; i < bulkRequest.requests.size(); i++) { - DocWriteRequest request = bulkRequest.requests.get(i); - assert request instanceof IndexRequest; // This action is only ever called with IndexRequests + DocWriteRequest docRequest = bulkRequest.requests.get(i); + assert docRequest instanceof IndexRequest : "TransportSimulateBulkAction should only ever be called with IndexRequests"; + IndexRequest request = (IndexRequest) docRequest; + responses.set( i, BulkItemResponse.success( @@ -91,15 +81,17 @@ protected void createMissingIndicesAndIndexData( request.id(), request.index(), request.version(), - ((IndexRequest) request).source(), - ((IndexRequest) request).getContentType(), - ((IndexRequest) request).getExecutedPipelines(), + request.source(), + request.getContentType(), + request.getExecutedPipelines(), null ) ) ); } - listener.onResponse(new BulkResponse(responses.toArray(new BulkItemResponse[responses.length()]), buildTookInMillis(startTime))); + listener.onResponse( + new BulkResponse(responses.toArray(new BulkItemResponse[responses.length()]), buildTookInMillis(relativeStartTime)) + ); } /* @@ -111,4 +103,10 @@ protected IngestService getIngestService(BulkRequest request) { IngestService rawIngestService = super.getIngestService(request); return new SimulateIngestService(rawIngestService, request); } + + @Override + protected boolean shouldStoreFailure(String indexName, Metadata metadata, long time) { + // A simulate bulk request should not change any persistent state in the system, so we never write to the failure store + return false; + } } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportAbstractBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportAbstractBulkActionTests.java new file mode 100644 index 0000000000000..4ce0aa6a0c6c2 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportAbstractBulkActionTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.bulk; + +import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.test.ESTestCase; + +import java.util.Collections; + +public class TransportAbstractBulkActionTests extends ESTestCase { + + public void testGetIndexWriteRequest() { + IndexRequest indexRequest = new IndexRequest("index").id("id1").source(Collections.emptyMap()); + UpdateRequest upsertRequest = new UpdateRequest("index", "id1").upsert(indexRequest).script(mockScript("1")); + UpdateRequest docAsUpsertRequest = new UpdateRequest("index", "id2").doc(indexRequest).docAsUpsert(true); + UpdateRequest scriptedUpsert = new UpdateRequest("index", "id2").upsert(indexRequest).script(mockScript("1")).scriptedUpsert(true); + + assertEquals(TransportAbstractBulkAction.getIndexWriteRequest(indexRequest), indexRequest); + assertEquals(TransportAbstractBulkAction.getIndexWriteRequest(upsertRequest), indexRequest); + assertEquals(TransportAbstractBulkAction.getIndexWriteRequest(docAsUpsertRequest), indexRequest); + assertEquals(TransportAbstractBulkAction.getIndexWriteRequest(scriptedUpsert), indexRequest); + + DeleteRequest deleteRequest = new DeleteRequest("index", "id"); + assertNull(TransportAbstractBulkAction.getIndexWriteRequest(deleteRequest)); + + UpdateRequest badUpsertRequest = new UpdateRequest("index", "id1"); + assertNull(TransportAbstractBulkAction.getIndexWriteRequest(badUpsertRequest)); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java index d7adf3aa8b4e2..3683c2c271739 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java @@ -146,12 +146,12 @@ class TestTransportBulkAction extends TransportBulkAction { TestTransportBulkAction() { super( - threadPool, + TransportBulkActionIngestTests.this.threadPool, transportService, - clusterService, + TransportBulkActionIngestTests.this.clusterService, ingestService, mockFeatureService, - new NodeClient(Settings.EMPTY, threadPool), + new NodeClient(Settings.EMPTY, TransportBulkActionIngestTests.this.threadPool), new ActionFilters(Collections.emptySet()), TestIndexNameExpressionResolver.newInstance(), new IndexingPressure(SETTINGS), diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java index 1a34b1e856a5e..776174b7cf502 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java @@ -93,7 +93,7 @@ class TestTransportBulkAction extends TransportBulkAction { super( TransportBulkActionTests.this.threadPool, transportService, - clusterService, + TransportBulkActionTests.this.clusterService, null, mockFeatureService, new NodeClient(Settings.EMPTY, TransportBulkActionTests.this.threadPool), @@ -189,24 +189,6 @@ public void testDeleteNonExistingDocExternalGteVersionCreatesIndex() throws Exce assertTrue(bulkAction.indexCreated); } - public void testGetIndexWriteRequest() throws Exception { - IndexRequest indexRequest = new IndexRequest("index").id("id1").source(Collections.emptyMap()); - UpdateRequest upsertRequest = new UpdateRequest("index", "id1").upsert(indexRequest).script(mockScript("1")); - UpdateRequest docAsUpsertRequest = new UpdateRequest("index", "id2").doc(indexRequest).docAsUpsert(true); - UpdateRequest scriptedUpsert = new UpdateRequest("index", "id2").upsert(indexRequest).script(mockScript("1")).scriptedUpsert(true); - - assertEquals(TransportBulkAction.getIndexWriteRequest(indexRequest), indexRequest); - assertEquals(TransportBulkAction.getIndexWriteRequest(upsertRequest), indexRequest); - assertEquals(TransportBulkAction.getIndexWriteRequest(docAsUpsertRequest), indexRequest); - assertEquals(TransportBulkAction.getIndexWriteRequest(scriptedUpsert), indexRequest); - - DeleteRequest deleteRequest = new DeleteRequest("index", "id"); - assertNull(TransportBulkAction.getIndexWriteRequest(deleteRequest)); - - UpdateRequest badUpsertRequest = new UpdateRequest("index", "id1"); - assertNull(TransportBulkAction.getIndexWriteRequest(badUpsertRequest)); - } - public void testProhibitAppendWritesInBackingIndices() throws Exception { String dataStreamName = "logs-foobar"; ClusterState clusterState = createDataStream(dataStreamName); @@ -415,13 +397,13 @@ public void testResolveFailureStoreFromMetadata() throws Exception { .build(); // Data stream with failure store should store failures - assertThat(TransportBulkAction.shouldStoreFailure(dataStreamWithFailureStore, metadata, testTime), is(true)); + assertThat(TransportBulkAction.shouldStoreFailureInternal(dataStreamWithFailureStore, metadata, testTime), is(true)); // Data stream without failure store should not - assertThat(TransportBulkAction.shouldStoreFailure(dataStreamWithoutFailureStore, metadata, testTime), is(false)); + assertThat(TransportBulkAction.shouldStoreFailureInternal(dataStreamWithoutFailureStore, metadata, testTime), is(false)); // An index should not be considered for failure storage - assertThat(TransportBulkAction.shouldStoreFailure(backingIndex1.getIndex().getName(), metadata, testTime), is(false)); + assertThat(TransportBulkAction.shouldStoreFailureInternal(backingIndex1.getIndex().getName(), metadata, testTime), is(false)); // even if that index is itself a failure store - assertThat(TransportBulkAction.shouldStoreFailure(failureStoreIndex1.getIndex().getName(), metadata, testTime), is(false)); + assertThat(TransportBulkAction.shouldStoreFailureInternal(failureStoreIndex1.getIndex().getName(), metadata, testTime), is(false)); } public void testResolveFailureStoreFromTemplate() throws Exception { @@ -452,11 +434,11 @@ public void testResolveFailureStoreFromTemplate() throws Exception { .build(); // Data stream with failure store should store failures - assertThat(TransportBulkAction.shouldStoreFailure(dsTemplateWithFailureStore + "-1", metadata, testTime), is(true)); + assertThat(TransportBulkAction.shouldStoreFailureInternal(dsTemplateWithFailureStore + "-1", metadata, testTime), is(true)); // Data stream without failure store should not - assertThat(TransportBulkAction.shouldStoreFailure(dsTemplateWithoutFailureStore + "-1", metadata, testTime), is(false)); + assertThat(TransportBulkAction.shouldStoreFailureInternal(dsTemplateWithoutFailureStore + "-1", metadata, testTime), is(false)); // An index template should not be considered for failure storage - assertThat(TransportBulkAction.shouldStoreFailure(indexTemplate + "-1", metadata, testTime), is(false)); + assertThat(TransportBulkAction.shouldStoreFailureInternal(indexTemplate + "-1", metadata, testTime), is(false)); } private BulkRequest buildBulkRequest(List indices) { diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java index 590029f8537f7..7313cb3277100 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java @@ -8,15 +8,11 @@ package org.elasticsearch.action.bulk; -import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; -import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.ingest.SimulateIndexResponse; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.service.ClusterService; @@ -63,37 +59,17 @@ public class TransportSimulateBulkActionTests extends ESTestCase { class TestTransportSimulateBulkAction extends TransportSimulateBulkAction { - volatile boolean failIndexCreation = false; - boolean indexCreated = false; // set when the "real" index is created - Runnable beforeIndexCreation = null; - TestTransportSimulateBulkAction() { super( TransportSimulateBulkActionTests.this.threadPool, transportService, - clusterService, - null, + TransportSimulateBulkActionTests.this.clusterService, null, - new NodeClient(Settings.EMPTY, TransportSimulateBulkActionTests.this.threadPool), - new ActionFilters(Collections.emptySet()), - new TransportBulkActionTookTests.Resolver(), + new ActionFilters(Set.of()), new IndexingPressure(Settings.EMPTY), EmptySystemIndices.INSTANCE ); } - - @Override - void createIndex(CreateIndexRequest createIndexRequest, ActionListener listener) { - indexCreated = true; - if (beforeIndexCreation != null) { - beforeIndexCreation.run(); - } - if (failIndexCreation) { - listener.onFailure(new ResourceAlreadyExistsException("index already exists")); - } else { - listener.onResponse(null); - } - } } @Before @@ -192,20 +168,7 @@ public void onFailure(Exception e) { fail(e, "Unexpected error"); } }; - Map indicesToAutoCreate = Map.of(); // unused - Set dataStreamsToRollover = Set.of(); // unused - Set failureStoresToRollover = Set.of(); // unused - long startTime = 0; - bulkAction.createMissingIndicesAndIndexData( - task, - bulkRequest, - r -> fail("executor is unused"), - listener, - indicesToAutoCreate, - dataStreamsToRollover, - failureStoresToRollover, - startTime - ); + bulkAction.doInternalExecute(task, bulkRequest, r -> fail("executor is unused"), listener, randomLongBetween(0, Long.MAX_VALUE)); assertThat(onResponseCalled.get(), equalTo(true)); } } From cb9219399796b5d0a99e19b28e6a11bcac951ded Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20Fred=C3=A9n?= <109296772+jfreden@users.noreply.github.com> Date: Thu, 27 Jun 2024 16:13:20 +0200 Subject: [PATCH 015/216] Fix condition that check for new security index (#110228) When we create a new index, we create it using the version from `IndexVersion.min(IndexVersion.current(), clusterState.nodes().getMaxDataNodeCompatibleIndexVersion())`, this means that the created index version will be compatible with all nodes in the cluster. When we want to check if an index was created on the latest version we don't want to include `getMaxDataNodeCompatibleIndexVersion` since we need to make sure that the index contains the latest migration, if it doesn't, we want to run the migration. In the BWC tests, the `getMaxDataNodeCompatibleIndexVersion` will return the index version form `7.X` as the "latest" version (which is wrong) and therefore it will skip the migration, instead it should only skip on the very latest version. --- .../xpack/security/support/SecurityIndexManager.java | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java index 1796b43b0726f..9c15356d1433d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java @@ -249,14 +249,11 @@ private SystemIndexDescriptor.MappingsVersion getMinSecurityIndexMappingVersion( /** * Check if the index was created on the latest index version available in the cluster */ - private static boolean isCreatedOnLatestVersion(IndexMetadata indexMetadata, ClusterState clusterState) { + private static boolean isCreatedOnLatestVersion(IndexMetadata indexMetadata) { final IndexVersion indexVersionCreated = indexMetadata != null ? SETTING_INDEX_VERSION_CREATED.get(indexMetadata.getSettings()) : null; - return indexVersionCreated != null - && indexVersionCreated.onOrAfter( - IndexVersion.min(IndexVersion.current(), clusterState.nodes().getMaxDataNodeCompatibleIndexVersion()) - ); + return indexVersionCreated != null && indexVersionCreated.onOrAfter(IndexVersion.current()); } @Override @@ -269,7 +266,7 @@ public void clusterChanged(ClusterChangedEvent event) { } final State previousState = state; final IndexMetadata indexMetadata = resolveConcreteIndex(systemIndexDescriptor.getAliasName(), event.state().metadata()); - final boolean createdOnLatestVersion = isCreatedOnLatestVersion(indexMetadata, event.state()); + final boolean createdOnLatestVersion = isCreatedOnLatestVersion(indexMetadata); final Instant creationTime = indexMetadata != null ? Instant.ofEpochMilli(indexMetadata.getCreationDate()) : null; final boolean isIndexUpToDate = indexMetadata == null || INDEX_FORMAT_SETTING.get(indexMetadata.getSettings()) == systemIndexDescriptor.getIndexFormat(); From ac2da6071c49e602270153c91627e6fe0e51f048 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Thu, 27 Jun 2024 16:57:30 +0200 Subject: [PATCH 016/216] Make sure object is mutated in InternalCardinalityTests (#110223) --- .../aggregations/metrics/InternalCardinalityTests.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalCardinalityTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalCardinalityTests.java index 0e615da36d7e4..74340ab5f1bf9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalCardinalityTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalCardinalityTests.java @@ -57,7 +57,7 @@ private InternalCardinality createTestInstance(String name, Map 1 ); algos.add(hllpp); - int values = between(0, 1000); + int values = between(20, 1000); for (int i = 0; i < values; i++) { hllpp.collect(0, BitMixer.mix64(randomInt())); } @@ -99,7 +99,8 @@ protected InternalCardinality mutateInstance(InternalCardinality instance) { new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()), 0 ); - for (int i = 0; i < 10; i++) { + int values = between(0, 10); + for (int i = 0; i < values; i++) { newState.collect(0, BitMixer.mix64(randomIntBetween(500, 10000))); } algos.add(newState); From 65ddbec0f5cf53405f53f604139bcd066d3e2415 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 28 Jun 2024 01:30:06 +1000 Subject: [PATCH 017/216] Mute org.elasticsearch.upgrades.SecurityIndexRolesMetadataMigrationIT testMetadataMigratedAfterUpgrade #110232 --- muted-tests.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 6b28a297b546c..a2d6400bceeac 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -10,8 +10,7 @@ tests: method: "testGuessIsDayFirstFromLocale" - class: "org.elasticsearch.test.rest.ClientYamlTestSuiteIT" issue: "https://github.com/elastic/elasticsearch/issues/108857" - method: "test {yaml=search/180_locale_dependent_mapping/Test Index and Search locale\ - \ dependent mappings / dates}" + method: "test {yaml=search/180_locale_dependent_mapping/Test Index and Search locale dependent mappings / dates}" - class: "org.elasticsearch.upgrades.SearchStatesIT" issue: "https://github.com/elastic/elasticsearch/issues/108991" method: "testCanMatch" @@ -20,8 +19,7 @@ tests: method: "testTrainedModelInference" - class: "org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT" issue: "https://github.com/elastic/elasticsearch/issues/109188" - method: "test {yaml=search/180_locale_dependent_mapping/Test Index and Search locale\ - \ dependent mappings / dates}" + method: "test {yaml=search/180_locale_dependent_mapping/Test Index and Search locale dependent mappings / dates}" - class: "org.elasticsearch.xpack.esql.qa.mixed.EsqlClientYamlIT" issue: "https://github.com/elastic/elasticsearch/issues/109189" method: "test {p0=esql/70_locale/Date format with Italian locale}" @@ -36,8 +34,7 @@ tests: method: "testTimestampFieldTypeExposedByAllIndicesServices" - class: "org.elasticsearch.analysis.common.CommonAnalysisClientYamlTestSuiteIT" issue: "https://github.com/elastic/elasticsearch/issues/109318" - method: "test {yaml=analysis-common/50_char_filters/pattern_replace error handling\ - \ (too complex pattern)}" + method: "test {yaml=analysis-common/50_char_filters/pattern_replace error handling (too complex pattern)}" - class: "org.elasticsearch.xpack.ml.integration.ClassificationHousePricingIT" issue: "https://github.com/elastic/elasticsearch/issues/101598" method: "testFeatureImportanceValues" @@ -88,6 +85,9 @@ tests: - class: "org.elasticsearch.xpack.security.authz.store.NativePrivilegeStoreCacheTests" issue: "https://github.com/elastic/elasticsearch/issues/110227" method: "testGetPrivilegesUsesCache" +- class: org.elasticsearch.upgrades.SecurityIndexRolesMetadataMigrationIT + method: testMetadataMigratedAfterUpgrade + issue: https://github.com/elastic/elasticsearch/issues/110232 # Examples: # From d6380ed84edeb3a0b11a73c7478f5923d56116fc Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Thu, 27 Jun 2024 17:31:28 +0200 Subject: [PATCH 018/216] ESQL: Fix Join references (#109989) * Make Join.references() return all the references to attributes in children used for matching the left and right hand sides. * Do not use leftAttribute == rightAttribute expressions to express how to match with a table in LOOKUP; these expressions get optimized away sometimes. This requires a breaking change to Join's serialization. --- docs/changelog/109989.yaml | 5 + .../xpack/esql/core/tree/Node.java | 9 ++ .../src/main/resources/lookup.csv-spec | 54 +++++----- .../xpack/esql/action/EsqlCapabilities.java | 3 +- .../esql/optimizer/PhysicalPlanOptimizer.java | 6 +- .../xpack/esql/plan/logical/Lookup.java | 12 +-- .../xpack/esql/plan/logical/join/Join.java | 28 ++++- .../esql/plan/logical/join/JoinConfig.java | 20 ++-- .../esql/plan/physical/HashJoinExec.java | 44 ++++---- .../esql/planner/LocalExecutionPlanner.java | 15 ++- .../xpack/esql/planner/Mapper.java | 27 +++-- .../optimizer/LogicalPlanOptimizerTests.java | 24 +++-- .../xpack/esql/plan/logical/JoinTests.java | 101 ++++++++++++++++++ .../esql/tree/EsqlNodeSubclassTests.java | 3 +- 14 files changed, 245 insertions(+), 106 deletions(-) create mode 100644 docs/changelog/109989.yaml create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinTests.java diff --git a/docs/changelog/109989.yaml b/docs/changelog/109989.yaml new file mode 100644 index 0000000000000..f1f5972b60eb3 --- /dev/null +++ b/docs/changelog/109989.yaml @@ -0,0 +1,5 @@ +pr: 109989 +summary: "ESQL: Fix Join references" +area: ES|QL +type: bug +issues: [] diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/tree/Node.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/tree/Node.java index f7561d0c2b34b..f42d454ef00bd 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/tree/Node.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/tree/Node.java @@ -262,6 +262,15 @@ protected final T transformNodeProps(Class typeToken, Function + * Normally, you want to use one of the static {@code create} methods to implement this. + *

    + * For {@link org.elasticsearch.xpack.esql.core.plan.QueryPlan}s, it is very important that + * the properties contain all of the expressions and references relevant to this node, and + * that all of the properties are used in the provided constructor; otherwise query plan + * transformations like + * {@link org.elasticsearch.xpack.esql.core.plan.QueryPlan#transformExpressionsOnly(Function)} + * will not have an effect. */ protected abstract NodeInfo info(); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup.csv-spec index 77d8e48d9e81f..b021beafb8614 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup.csv-spec @@ -1,5 +1,5 @@ keywordByInt -required_capability: lookup_v3 +required_capability: lookup_v4 FROM employees | SORT emp_no | LIMIT 4 @@ -17,7 +17,7 @@ emp_no:integer | languages:integer | lang_name:keyword ; keywordByMvInt -required_capability: lookup_v3 +required_capability: lookup_v4 ROW int=[1, 2, 3] | LOOKUP int_number_names ON int ; @@ -27,7 +27,7 @@ int:integer | name:keyword ; keywordByDupeInt -required_capability: lookup_v3 +required_capability: lookup_v4 ROW int=[1, 1, 1] | LOOKUP int_number_names ON int ; @@ -37,7 +37,7 @@ int:integer | name:keyword ; intByKeyword -required_capability: lookup_v3 +required_capability: lookup_v4 ROW name="two" | LOOKUP int_number_names ON name ; @@ -48,7 +48,7 @@ name:keyword | int:integer keywordByLong -required_capability: lookup_v3 +required_capability: lookup_v4 FROM employees | SORT emp_no | LIMIT 4 @@ -66,7 +66,7 @@ emp_no:integer | languages:long | lang_name:keyword ; longByKeyword -required_capability: lookup_v3 +required_capability: lookup_v4 ROW name="two" | LOOKUP long_number_names ON name ; @@ -76,7 +76,7 @@ name:keyword | long:long ; keywordByFloat -required_capability: lookup_v3 +required_capability: lookup_v4 FROM employees | SORT emp_no | LIMIT 4 @@ -94,7 +94,7 @@ emp_no:integer | height:double | height_name:keyword ; floatByKeyword -required_capability: lookup_v3 +required_capability: lookup_v4 ROW name="two point zero eight" | LOOKUP double_number_names ON name ; @@ -104,7 +104,7 @@ two point zero eight | 2.08 ; floatByNullMissing -required_capability: lookup_v3 +required_capability: lookup_v4 ROW name=null | LOOKUP double_number_names ON name ; @@ -114,7 +114,7 @@ name:null | double:double ; floatByNullMatching -required_capability: lookup_v3 +required_capability: lookup_v4 ROW name=null | LOOKUP double_number_names_with_null ON name ; @@ -124,7 +124,7 @@ name:null | double:double ; intIntByKeywordKeyword -required_capability: lookup_v3 +required_capability: lookup_v4 ROW aa="foo", ab="zoo" | LOOKUP big ON aa, ab ; @@ -134,7 +134,7 @@ foo | zoo | 1 | -1 ; intIntByKeywordKeywordMissing -required_capability: lookup_v3 +required_capability: lookup_v4 ROW aa="foo", ab="zoi" | LOOKUP big ON aa, ab ; @@ -144,7 +144,7 @@ foo | zoi | null | null ; intIntByKeywordKeywordSameValues -required_capability: lookup_v3 +required_capability: lookup_v4 ROW aa="foo", ab="foo" | LOOKUP big ON aa, ab ; @@ -154,7 +154,7 @@ foo | foo | 2 | -2 ; intIntByKeywordKeywordSameValuesMissing -required_capability: lookup_v3 +required_capability: lookup_v4 ROW aa="bar", ab="bar" | LOOKUP big ON aa, ab ; @@ -164,7 +164,7 @@ bar | bar | null | null ; lookupBeforeStats -required_capability: lookup_v3 +required_capability: lookup_v4 FROM employees | RENAME languages AS int | LOOKUP int_number_names ON int @@ -182,7 +182,7 @@ height:double | languages:keyword ; lookupAfterStats -required_capability: lookup_v3 +required_capability: lookup_v4 FROM employees | STATS int=TO_INT(AVG(height)) | LOOKUP int_number_names ON int @@ -194,7 +194,7 @@ two // Makes sure the LOOKUP squashes previous names doesNotDuplicateNames -required_capability: lookup_v3 +required_capability: lookup_v4 FROM employees | SORT emp_no | LIMIT 4 @@ -213,7 +213,7 @@ emp_no:integer | languages:long | name:keyword ; lookupBeforeSort -required_capability: lookup_v3 +required_capability: lookup_v4 FROM employees | WHERE emp_no < 10005 | RENAME languages AS int @@ -231,7 +231,7 @@ languages:keyword | emp_no:integer ; lookupAfterSort -required_capability: lookup_v3 +required_capability: lookup_v4 FROM employees | WHERE emp_no < 10005 | SORT languages ASC, emp_no ASC @@ -249,7 +249,7 @@ languages:keyword | emp_no:integer ; shadowing -required_capability: lookup_v3 +required_capability: lookup_v4 FROM employees | KEEP emp_no | WHERE emp_no == 10001 @@ -262,7 +262,7 @@ emp_no:integer | left:keyword | int:integer | right:keyword | name:keyword ; shadowingMulti -required_capability: lookup_v3 +required_capability: lookup_v4 FROM employees | KEEP emp_no | WHERE emp_no == 10001 @@ -279,7 +279,7 @@ emp_no:integer | left:keyword | nb:integer | middle:keyword | aa:keyword | right // named "lookup" // rowNamedLookup -required_capability: lookup_v3 +required_capability: lookup_v4 ROW lookup = "a" ; @@ -288,7 +288,7 @@ lookup:keyword ; rowNamedLOOKUP -required_capability: lookup_v3 +required_capability: lookup_v4 ROW LOOKUP = "a" ; @@ -297,7 +297,7 @@ LOOKUP:keyword ; evalNamedLookup -required_capability: lookup_v3 +required_capability: lookup_v4 ROW a = "a" | EVAL lookup = CONCAT(a, "1") ; @@ -306,7 +306,7 @@ a:keyword | lookup:keyword ; dissectNamedLookup -required_capability: lookup_v3 +required_capability: lookup_v4 row a = "foo bar" | dissect a "foo %{lookup}"; a:keyword | lookup:keyword @@ -314,7 +314,7 @@ a:keyword | lookup:keyword ; renameIntoLookup -required_capability: lookup_v3 +required_capability: lookup_v4 row a = "foo bar" | RENAME a AS lookup; lookup:keyword @@ -322,7 +322,7 @@ lookup:keyword ; sortOnLookup -required_capability: lookup_v3 +required_capability: lookup_v4 ROW lookup = "a" | SORT lookup ; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index ecbe25227616b..12b54126fbabb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -63,8 +63,9 @@ public enum Cap { * LOOKUP command with * - tables using syntax {@code "tables": {"type": []}} * - fixed variable shadowing + * - fixed Join.references(), requiring breaking change to Join serialization */ - LOOKUP_V3(true), + LOOKUP_V4(true), /** * Support for requesting the "REPEAT" command. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizer.java index a0a3874a2c2de..70c2a9007408a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizer.java @@ -10,6 +10,7 @@ import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.core.common.Failure; import org.elasticsearch.xpack.esql.core.expression.Alias; +import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.AttributeMap; import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -21,7 +22,6 @@ import org.elasticsearch.xpack.esql.core.rule.RuleExecutor; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.util.Holder; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Project; @@ -121,8 +121,8 @@ public PhysicalPlan apply(PhysicalPlan plan) { } if (p instanceof HashJoinExec join) { attributes.removeAll(join.addedFields()); - for (Equals cond : join.conditions()) { - attributes.remove(cond.right()); + for (Attribute rhs : join.rightFields()) { + attributes.remove(rhs); } } if (p instanceof EnrichExec ee) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java index 36bff408e3199..9f44fe49fb80a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java @@ -11,12 +11,10 @@ import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import org.elasticsearch.xpack.esql.plan.logical.join.Join; @@ -90,17 +88,19 @@ public LocalRelation localRelation() { } public JoinConfig joinConfig() { - List conditions = new ArrayList<>(matchFields.size()); + List leftFields = new ArrayList<>(matchFields.size()); + List rightFields = new ArrayList<>(matchFields.size()); List rhsOutput = Join.makeReference(localRelation.output()); - for (NamedExpression lhs : matchFields) { + for (Attribute lhs : matchFields) { for (Attribute rhs : rhsOutput) { if (lhs.name().equals(rhs.name())) { - conditions.add(new Equals(source(), lhs, rhs)); + leftFields.add(lhs); + rightFields.add(rhs); break; } } } - return new JoinConfig(JoinType.LEFT, matchFields, conditions); + return new JoinConfig(JoinType.LEFT, matchFields, leftFields, rightFields); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java index 57a52ad1a1cf8..d6d328686d8f1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java @@ -31,8 +31,6 @@ public class Join extends BinaryPlan { private final JoinConfig config; - // TODO: The matching attributes from the left and right logical plans should become part of the `expressions()` - // so that `references()` returns the attributes we actually rely on. private List lazyOutput; public Join(Source source, LogicalPlan left, LogicalPlan right, JoinConfig config) { @@ -40,6 +38,19 @@ public Join(Source source, LogicalPlan left, LogicalPlan right, JoinConfig confi this.config = config; } + public Join( + Source source, + LogicalPlan left, + LogicalPlan right, + JoinType type, + List matchFields, + List leftFields, + List rightFields + ) { + super(source, left, right); + this.config = new JoinConfig(type, matchFields, leftFields, rightFields); + } + public Join(PlanStreamInput in) throws IOException { super(Source.readFrom(in), in.readLogicalPlanNode(), in.readLogicalPlanNode()); this.config = new JoinConfig(in); @@ -58,7 +69,18 @@ public JoinConfig config() { @Override protected NodeInfo info() { - return NodeInfo.create(this, Join::new, left(), right(), config); + // Do not just add the JoinConfig as a whole - this would prevent correctly registering the + // expressions and references. + return NodeInfo.create( + this, + Join::new, + left(), + right(), + config.type(), + config.matchFields(), + config.leftFields(), + config.rightFields() + ); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinConfig.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinConfig.java index 6b603709b3972..68ad50f2f67a0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinConfig.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinConfig.java @@ -12,24 +12,25 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; import java.util.List; /** * Configuration for a {@code JOIN} style operation. - * @param matchFields fields that are merged from the left and right relations - * @param conditions when these conditions are true the rows are joined + * @param matchFields fields either from the left or right fields which decide which side is kept + * @param leftFields matched with the right fields + * @param rightFields matched with the left fields */ -public record JoinConfig(JoinType type, List matchFields, List conditions) implements Writeable { +public record JoinConfig(JoinType type, List matchFields, List leftFields, List rightFields) + implements + Writeable { public JoinConfig(StreamInput in) throws IOException { this( JoinType.readFrom(in), in.readNamedWriteableCollectionAsList(Attribute.class), - in.readCollectionAsList(i -> ((PlanStreamInput) i).readExpression()) + in.readNamedWriteableCollectionAsList(Attribute.class), + in.readNamedWriteableCollectionAsList(Attribute.class) ); } @@ -37,10 +38,11 @@ public JoinConfig(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { type.writeTo(out); out.writeNamedWriteableCollection(matchFields); - out.writeCollection(conditions, (o, v) -> ((PlanStreamOutput) o).writeExpression(v)); + out.writeNamedWriteableCollection(leftFields); + out.writeNamedWriteableCollection(rightFields); } public boolean expressionsResolved() { - return Resolvables.resolved(matchFields) && Resolvables.resolved(conditions); + return Resolvables.resolved(matchFields) && Resolvables.resolved(leftFields) && Resolvables.resolved(rightFields); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java index 29cf079f317be..0415a5cbb9132 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java @@ -11,8 +11,6 @@ import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.EsqlBinaryComparison; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; @@ -24,12 +22,8 @@ public class HashJoinExec extends UnaryExec implements EstimatesRowSize { private final LocalSourceExec joinData; private final List matchFields; - /** - * Conditions that must match for rows to be joined. The {@link Equals#left()} - * is always from the child and the {@link Equals#right()} is always from the - * {@link #joinData()}. - */ - private final List conditions; + private final List leftFields; + private final List rightFields; private final List output; private AttributeSet lazyAddedFields; @@ -38,13 +32,15 @@ public HashJoinExec( PhysicalPlan child, LocalSourceExec hashData, List matchFields, - List conditions, + List leftFields, + List rightFields, List output ) { super(source, child); this.joinData = hashData; this.matchFields = matchFields; - this.conditions = conditions; + this.leftFields = leftFields; + this.rightFields = rightFields; this.output = output; } @@ -52,7 +48,8 @@ public HashJoinExec(PlanStreamInput in) throws IOException { super(Source.readFrom(in), in.readPhysicalPlanNode()); this.joinData = new LocalSourceExec(in); this.matchFields = in.readNamedWriteableCollectionAsList(Attribute.class); - this.conditions = in.readCollectionAsList(i -> (Equals) EsqlBinaryComparison.readFrom(in)); + this.leftFields = in.readNamedWriteableCollectionAsList(Attribute.class); + this.rightFields = in.readNamedWriteableCollectionAsList(Attribute.class); this.output = in.readNamedWriteableCollectionAsList(Attribute.class); } @@ -61,7 +58,8 @@ public void writeTo(PlanStreamOutput out) throws IOException { out.writePhysicalPlanNode(child()); joinData.writeTo(out); out.writeNamedWriteableCollection(matchFields); - out.writeCollection(conditions, (o, v) -> v.writeTo(o)); + out.writeNamedWriteableCollection(leftFields); + out.writeNamedWriteableCollection(rightFields); out.writeNamedWriteableCollection(output); } @@ -73,13 +71,12 @@ public List matchFields() { return matchFields; } - /** - * Conditions that must match for rows to be joined. The {@link Equals#left()} - * is always from the child and the {@link Equals#right()} is always from the - * {@link #joinData()}. - */ - public List conditions() { - return conditions; + public List leftFields() { + return leftFields; + } + + public List rightFields() { + return rightFields; } public Set addedFields() { @@ -103,12 +100,12 @@ public List output() { @Override public HashJoinExec replaceChild(PhysicalPlan newChild) { - return new HashJoinExec(source(), newChild, joinData, matchFields, conditions, output); + return new HashJoinExec(source(), newChild, joinData, matchFields, leftFields, rightFields, output); } @Override protected NodeInfo info() { - return NodeInfo.create(this, HashJoinExec::new, child(), joinData, matchFields, conditions, output); + return NodeInfo.create(this, HashJoinExec::new, child(), joinData, matchFields, leftFields, rightFields, output); } @Override @@ -125,12 +122,13 @@ public boolean equals(Object o) { HashJoinExec hash = (HashJoinExec) o; return joinData.equals(hash.joinData) && matchFields.equals(hash.matchFields) - && conditions.equals(hash.conditions) + && leftFields.equals(hash.leftFields) + && rightFields.equals(hash.rightFields) && output.equals(hash.output); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), joinData, matchFields, conditions, output); + return Objects.hash(super.hashCode(), joinData, matchFields, leftFields, rightFields, output); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index b5700d38c2d10..84c18ed63bb4f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -63,7 +63,6 @@ import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; import org.elasticsearch.xpack.esql.evaluator.command.GrokEvaluatorExtracter; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.DissectExec; import org.elasticsearch.xpack.esql.plan.physical.EnrichExec; @@ -501,21 +500,21 @@ private PhysicalOperation planHashJoin(HashJoinExec join, LocalExecutionPlannerC Layout layout = layoutBuilder.build(); Block[] localData = join.joinData().supplier().get(); - RowInTableLookupOperator.Key[] keys = new RowInTableLookupOperator.Key[join.conditions().size()]; - int[] blockMapping = new int[join.conditions().size()]; - for (int k = 0; k < join.conditions().size(); k++) { - Equals cond = join.conditions().get(k); + RowInTableLookupOperator.Key[] keys = new RowInTableLookupOperator.Key[join.leftFields().size()]; + int[] blockMapping = new int[join.leftFields().size()]; + for (int k = 0; k < join.leftFields().size(); k++) { + Attribute left = join.leftFields().get(k); + Attribute right = join.rightFields().get(k); Block localField = null; for (int l = 0; l < join.joinData().output().size(); l++) { - if (join.joinData().output().get(l).name().equals((((NamedExpression) cond.right()).name()))) { + if (join.joinData().output().get(l).name().equals((((NamedExpression) right).name()))) { localField = localData[l]; } } if (localField == null) { - throw new IllegalArgumentException("can't find local data for [" + cond.right() + "]"); + throw new IllegalArgumentException("can't find local data for [" + right + "]"); } - NamedExpression left = (NamedExpression) cond.left(); keys[k] = new RowInTableLookupOperator.Key(left.name(), localField); Layout.ChannelAndType input = source.layout.get(left.id()); blockMapping[k] = input.channel(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java index 9518954f78c64..7cd2bf5729ca7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java @@ -9,7 +9,6 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; -import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.function.FunctionRegistry; import org.elasticsearch.xpack.esql.core.plan.logical.BinaryPlan; import org.elasticsearch.xpack.esql.core.plan.logical.Filter; @@ -17,7 +16,6 @@ import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Enrich; @@ -29,6 +27,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.TopN; import org.elasticsearch.xpack.esql.plan.logical.join.Join; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.meta.MetaFunctions; @@ -53,9 +52,6 @@ import org.elasticsearch.xpack.esql.plan.physical.ShowExec; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; -import java.util.ArrayList; -import java.util.List; - import static org.elasticsearch.xpack.esql.plan.physical.AggregateExec.Mode; import static org.elasticsearch.xpack.esql.plan.physical.AggregateExec.Mode.FINAL; import static org.elasticsearch.xpack.esql.plan.physical.AggregateExec.Mode.PARTIAL; @@ -278,19 +274,20 @@ private PhysicalPlan map(BinaryPlan p, PhysicalPlan lhs, PhysicalPlan rhs) { } private PhysicalPlan tryHashJoin(Join join, PhysicalPlan lhs, PhysicalPlan rhs) { - if (join.config().type() != JoinType.LEFT) { + JoinConfig config = join.config(); + if (config.type() != JoinType.LEFT) { return null; } - List conditions = new ArrayList<>(join.config().conditions().size()); - for (Expression cond : join.config().conditions()) { - if (cond instanceof Equals eq) { - conditions.add(eq); - } else { - return null; - } - } if (rhs instanceof LocalSourceExec local) { - return new HashJoinExec(join.source(), lhs, local, join.config().matchFields(), conditions, join.output()); + return new HashJoinExec( + join.source(), + lhs, + local, + config.matchFields(), + config.leftFields(), + config.rightFields(), + join.output() + ); } return null; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 3ce778d038875..9611924e9a6bb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -5029,12 +5029,14 @@ public void testLookupSimple() { assertThat(join.config().type(), equalTo(JoinType.LEFT)); assertThat(join.config().matchFields().stream().map(Object::toString).toList(), matchesList().item(startsWith("int{r}"))); - assertThat(join.config().conditions().size(), equalTo(1)); - Equals eq = as(join.config().conditions().get(0), Equals.class); - assertThat(eq.left().toString(), startsWith("int{r}")); - assertThat(eq.right().toString(), startsWith("int{r}")); - assertTrue(join.children().get(0).outputSet() + " contains " + eq.left(), join.children().get(0).outputSet().contains(eq.left())); - assertTrue(join.children().get(1).outputSet() + " contains " + eq.right(), join.children().get(1).outputSet().contains(eq.right())); + assertThat(join.config().leftFields().size(), equalTo(1)); + assertThat(join.config().rightFields().size(), equalTo(1)); + Attribute lhs = join.config().leftFields().get(0); + Attribute rhs = join.config().rightFields().get(0); + assertThat(lhs.toString(), startsWith("int{r}")); + assertThat(rhs.toString(), startsWith("int{r}")); + assertTrue(join.children().get(0).outputSet() + " contains " + lhs, join.children().get(0).outputSet().contains(lhs)); + assertTrue(join.children().get(1).outputSet() + " contains " + rhs, join.children().get(1).outputSet().contains(rhs)); // Join's output looks sensible too assertMap( @@ -5109,10 +5111,12 @@ public void testLookupStats() { assertThat(join.config().type(), equalTo(JoinType.LEFT)); assertThat(join.config().matchFields().stream().map(Object::toString).toList(), matchesList().item(startsWith("int{r}"))); - assertThat(join.config().conditions().size(), equalTo(1)); - Equals eq = as(join.config().conditions().get(0), Equals.class); - assertThat(eq.left().toString(), startsWith("int{r}")); - assertThat(eq.right().toString(), startsWith("int{r}")); + assertThat(join.config().leftFields().size(), equalTo(1)); + assertThat(join.config().rightFields().size(), equalTo(1)); + Attribute lhs = join.config().leftFields().get(0); + Attribute rhs = join.config().rightFields().get(0); + assertThat(lhs.toString(), startsWith("int{r}")); + assertThat(rhs.toString(), startsWith("int{r}")); // Join's output looks sensible too assertMap( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinTests.java new file mode 100644 index 0000000000000..91f25e6f83579 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinTests.java @@ -0,0 +1,101 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan.logical; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.core.expression.Alias; +import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.expression.AttributeSet; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.plan.logical.join.Join; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; + +import java.util.ArrayList; +import java.util.List; +import java.util.Set; + +public class JoinTests extends ESTestCase { + public void testExpressionsAndReferences() { + int numMatchFields = between(1, 10); + + List matchFields = new ArrayList<>(numMatchFields); + List leftFields = new ArrayList<>(numMatchFields); + List leftAttributes = new ArrayList<>(numMatchFields); + List rightFields = new ArrayList<>(numMatchFields); + List rightAttributes = new ArrayList<>(numMatchFields); + + for (int i = 0; i < numMatchFields; i++) { + Alias left = aliasForLiteral("left" + i); + Alias right = aliasForLiteral("right" + i); + + leftFields.add(left); + leftAttributes.add(left.toAttribute()); + rightFields.add(right); + rightAttributes.add(right.toAttribute()); + matchFields.add(randomBoolean() ? left.toAttribute() : right.toAttribute()); + } + + Row left = new Row(Source.EMPTY, leftFields); + Row right = new Row(Source.EMPTY, rightFields); + + JoinConfig joinConfig = new JoinConfig(JoinType.LEFT, matchFields, leftAttributes, rightAttributes); + Join join = new Join(Source.EMPTY, left, right, joinConfig); + + // matchfields are a subset of the left and right fields, so they don't contribute to the size of the references set. + assertEquals(2 * numMatchFields, join.references().size()); + + AttributeSet refs = join.references(); + assertTrue(refs.containsAll(matchFields)); + assertTrue(refs.containsAll(leftAttributes)); + assertTrue(refs.containsAll(rightAttributes)); + + Set exprs = Set.copyOf(join.expressions()); + assertTrue(exprs.containsAll(matchFields)); + assertTrue(exprs.containsAll(leftAttributes)); + assertTrue(exprs.containsAll(rightAttributes)); + } + + public void testTransformExprs() { + int numMatchFields = between(1, 10); + + List matchFields = new ArrayList<>(numMatchFields); + List leftFields = new ArrayList<>(numMatchFields); + List leftAttributes = new ArrayList<>(numMatchFields); + List rightFields = new ArrayList<>(numMatchFields); + List rightAttributes = new ArrayList<>(numMatchFields); + + for (int i = 0; i < numMatchFields; i++) { + Alias left = aliasForLiteral("left" + i); + Alias right = aliasForLiteral("right" + i); + + leftFields.add(left); + leftAttributes.add(left.toAttribute()); + rightFields.add(right); + rightAttributes.add(right.toAttribute()); + matchFields.add(randomBoolean() ? left.toAttribute() : right.toAttribute()); + } + + Row left = new Row(Source.EMPTY, leftFields); + Row right = new Row(Source.EMPTY, rightFields); + + JoinConfig joinConfig = new JoinConfig(JoinType.LEFT, matchFields, leftAttributes, rightAttributes); + Join join = new Join(Source.EMPTY, left, right, joinConfig); + assertTrue(join.config().matchFields().stream().allMatch(ref -> ref.dataType().equals(DataType.INTEGER))); + + Join transformedJoin = (Join) join.transformExpressionsOnly(Attribute.class, attr -> attr.withDataType(DataType.BOOLEAN)); + assertTrue(transformedJoin.config().matchFields().stream().allMatch(ref -> ref.dataType().equals(DataType.BOOLEAN))); + } + + private static Alias aliasForLiteral(String name) { + return new Alias(Source.EMPTY, name, new Literal(Source.EMPTY, 1, DataType.INTEGER)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java index c14245d212cf0..6d84a295889c3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java @@ -90,7 +90,8 @@ protected Object pluggableMakeArg(Class> toBuildClass, Class (Attribute) makeArg(Attribute.class)), - randomList(0, 10, () -> (Expression) makeArg(Expression.class)) + randomList(0, 10, () -> (Attribute) makeArg(Attribute.class)), + randomList(0, 10, () -> (Attribute) makeArg(Attribute.class)) ); } From 4a49c08f3417cae17ababbc4134bf8e49be66e98 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Thu, 27 Jun 2024 17:49:20 +0200 Subject: [PATCH 019/216] ESQL: Skip ENRICH csv tests for multi shadowing pre 8.14.0 (#110226) They use an enrich policy with geo data that doesn't work on older nodes. --- .../qa/testFixtures/src/main/resources/enrich.csv-spec | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec index bc79d1c62bd67..fc8c48afdf8cc 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec @@ -88,9 +88,9 @@ ROW left = "left", client_ip = "172.21.0.5", env = "env", right = "right" left:keyword | env:keyword | right:keyword | client_ip:keyword ; -shadowingMulti +shadowingMulti#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] required_capability: enrich_load -ROW left = "left", airport = "Zurich Airport ZRH", city = "Zürich", middle = "middle", region = "North-East Switzerland", right = "right" +ROW left = "left", airport = "Zurich Airport ZRH", city = "Zürich", middle = "middle", region = "North-East Switzerland", right = "right" | ENRICH city_names ON city WITH airport, region, city_boundary ; @@ -98,8 +98,8 @@ left:keyword | city:keyword | middle:keyword | right:keyword | airport:text | re left | Zürich | middle | right | Zurich Int'l | Bezirk Zürich | "POLYGON((8.448 47.3802,8.4977 47.3452,8.5032 47.3202,8.6254 47.3547,8.5832 47.3883,8.5973 47.4063,8.5431 47.4329,8.4858 47.431,8.4691 47.4169,8.473 47.3951,8.448 47.3802))" ; -shadowingMultiLimit0 -ROW left = "left", airport = "Zurich Airport ZRH", city = "Zurich", middle = "middle", region = "North-East Switzerland", right = "right" +shadowingMultiLimit0#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +ROW left = "left", airport = "Zurich Airport ZRH", city = "Zürich", middle = "middle", region = "North-East Switzerland", right = "right" | ENRICH city_names ON city WITH airport, region, city_boundary | LIMIT 0 ; From 736357a9fb0bb68dbf87f108cc331402cdfb7951 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Thu, 27 Jun 2024 10:11:26 -0700 Subject: [PATCH 020/216] Handle ignore_above in synthetic source for flattened fields (#110214) --- docs/changelog/110214.yaml | 5 + .../test/get/100_synthetic_source.yml | 8 +- .../index/mapper/DocumentParserContext.java | 2 +- .../flattened/FlattenedFieldMapper.java | 6 +- .../flattened/FlattenedFieldParser.java | 102 ++++++++++++++---- ...ortedSetDocValuesSyntheticFieldLoader.java | 28 +++-- .../flattened/FlattenedFieldMapperTests.java | 9 +- 7 files changed, 118 insertions(+), 42 deletions(-) create mode 100644 docs/changelog/110214.yaml diff --git a/docs/changelog/110214.yaml b/docs/changelog/110214.yaml new file mode 100644 index 0000000000000..20f61cac64454 --- /dev/null +++ b/docs/changelog/110214.yaml @@ -0,0 +1,5 @@ +pr: 110214 +summary: Handle `ignore_above` in synthetic source for flattened fields +area: Mapping +type: enhancement +issues: [] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/100_synthetic_source.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/100_synthetic_source.yml index b2b9e1b90cb3b..55605849de69c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/100_synthetic_source.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/100_synthetic_source.yml @@ -1062,8 +1062,8 @@ flattened field no doc values: --- flattened field with ignore_above: - requires: - cluster_features: ["gte_v8.8.0"] - reason: support for synthetic source on flattened fields added in 8.8.0 + cluster_features: ["mapper.track_ignored_source"] + reason: requires tracking ignored source - do: indices.create: @@ -1105,6 +1105,10 @@ flattened field with ignore_above: field: key1: key2: "key2" + key3: "key3_ignored" + key4: "key4_ignored" + key5: + key6: "key6_ignored" key7: "key7" - is_false: fields diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java index a0c78c727d854..3afadffe2f0ca 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java @@ -325,7 +325,7 @@ final boolean getClonedSource() { return clonedSource; } - final boolean canAddIgnoredField() { + public final boolean canAddIgnoredField() { return mappingLookup.isSourceSynthetic() && clonedSource == false; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java index 85407fe824275..3887c5a15cd5a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java @@ -737,8 +737,8 @@ public void validateMatchedRoutingPath(final String routingPath) { private final FlattenedFieldParser fieldParser; private final Builder builder; - private FlattenedFieldMapper(String simpleName, MappedFieldType mappedFieldType, Builder builder) { - super(simpleName, mappedFieldType, MultiFields.empty(), CopyTo.empty()); + private FlattenedFieldMapper(String leafName, MappedFieldType mappedFieldType, Builder builder) { + super(leafName, mappedFieldType, MultiFields.empty(), CopyTo.empty()); this.builder = builder; this.fieldParser = new FlattenedFieldParser( mappedFieldType.name(), @@ -819,7 +819,7 @@ public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { return SourceLoader.SyntheticFieldLoader.NOTHING; } if (fieldType().hasDocValues()) { - return new FlattenedSortedSetDocValuesSyntheticFieldLoader(fullPath() + "._keyed", leafName()); + return new FlattenedSortedSetDocValuesSyntheticFieldLoader(fullPath(), fullPath() + "._keyed", leafName()); } throw new IllegalArgumentException( diff --git a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldParser.java b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldParser.java index d373d683b73ad..1e3d55c01d74c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldParser.java @@ -17,7 +17,11 @@ import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.DocumentParserContext; +import org.elasticsearch.index.mapper.IgnoredSourceFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.XContentDataHelper; +import org.elasticsearch.xcontent.CopyingXContentParser; +import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; @@ -32,8 +36,8 @@ class FlattenedFieldParser { static final String SEPARATOR = "\0"; private static final byte SEPARATOR_BYTE = '\0'; - private final String rootFieldName; - private final String keyedFieldName; + private final String rootFieldFullPath; + private final String keyedFieldFullPath; private final MappedFieldType fieldType; private final int depthLimit; @@ -41,35 +45,55 @@ class FlattenedFieldParser { private final String nullValue; FlattenedFieldParser( - String rootFieldName, - String keyedFieldName, + String rootFieldFullPath, + String keyedFieldFullPath, MappedFieldType fieldType, int depthLimit, int ignoreAbove, String nullValue ) { - this.rootFieldName = rootFieldName; - this.keyedFieldName = keyedFieldName; + this.rootFieldFullPath = rootFieldFullPath; + this.keyedFieldFullPath = keyedFieldFullPath; this.fieldType = fieldType; this.depthLimit = depthLimit; this.ignoreAbove = ignoreAbove; this.nullValue = nullValue; } - public List parse(final DocumentParserContext context) throws IOException { - XContentParser parser = context.parser(); + public List parse(final DocumentParserContext documentParserContext) throws IOException { + XContentParser parser = documentParserContext.parser(); XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + XContentBuilder rawDataForSyntheticSource = null; + if (documentParserContext.canAddIgnoredField() && ignoreAbove < Integer.MAX_VALUE) { + var copyingParser = new CopyingXContentParser(parser); + rawDataForSyntheticSource = copyingParser.getBuilder(); + parser = copyingParser; + } + ContentPath path = new ContentPath(); List fields = new ArrayList<>(); + var context = new Context(parser, documentParserContext); parseObject(context, path, fields); + if (rawDataForSyntheticSource != null && context.isIgnoredValueEncountered()) { + // One or more inner fields are ignored due to `ignore_above`. + // Because of that we will store whole object as is in order to generate synthetic source. + documentParserContext.addIgnoredField( + IgnoredSourceFieldMapper.NameValue.fromContext( + documentParserContext, + rootFieldFullPath, + XContentDataHelper.encodeXContentBuilder(rawDataForSyntheticSource) + ) + ); + } + return fields; } - private void parseObject(DocumentParserContext context, ContentPath path, List fields) throws IOException { + private void parseObject(Context context, ContentPath path, List fields) throws IOException { String currentName = null; - XContentParser parser = context.parser(); + XContentParser parser = context.getParser(); while (true) { XContentParser.Token token = parser.nextToken(); if (token == XContentParser.Token.END_OBJECT) { @@ -85,9 +109,8 @@ private void parseObject(DocumentParserContext context, ContentPath path, List fields) - throws IOException { - XContentParser parser = context.parser(); + private void parseArray(Context context, ContentPath path, String currentName, List fields) throws IOException { + XContentParser parser = context.getParser(); while (true) { XContentParser.Token token = parser.nextToken(); if (token == XContentParser.Token.END_ARRAY) { @@ -98,13 +121,13 @@ private void parseArray(DocumentParserContext context, ContentPath path, String } private void parseFieldValue( - DocumentParserContext context, + Context context, XContentParser.Token token, ContentPath path, String currentName, List fields ) throws IOException { - XContentParser parser = context.parser(); + XContentParser parser = context.getParser(); if (token == XContentParser.Token.START_OBJECT) { path.add(currentName); validateDepthLimit(path); @@ -126,8 +149,9 @@ private void parseFieldValue( } } - private void addField(DocumentParserContext context, ContentPath path, String currentName, String value, List fields) { + private void addField(Context context, ContentPath path, String currentName, String value, List fields) { if (value.length() > ignoreAbove) { + context.onIgnoredValue(); return; } @@ -143,7 +167,7 @@ private void addField(DocumentParserContext context, ContentPath path, String cu // in that case we can already throw a more user friendly exception here which includes the offending fields key and value lengths if (bytesKeyedValue.length > IndexWriter.MAX_TERM_LENGTH) { String msg = "Flattened field [" - + rootFieldName + + rootFieldFullPath + "] contains one immense field" + " whose keyed encoding is longer than the allowed max length of " + IndexWriter.MAX_TERM_LENGTH @@ -158,13 +182,13 @@ private void addField(DocumentParserContext context, ContentPath path, String cu } BytesRef bytesValue = new BytesRef(value); if (fieldType.isIndexed()) { - fields.add(new StringField(rootFieldName, bytesValue, Field.Store.NO)); - fields.add(new StringField(keyedFieldName, bytesKeyedValue, Field.Store.NO)); + fields.add(new StringField(rootFieldFullPath, bytesValue, Field.Store.NO)); + fields.add(new StringField(keyedFieldFullPath, bytesKeyedValue, Field.Store.NO)); } if (fieldType.hasDocValues()) { - fields.add(new SortedSetDocValuesField(rootFieldName, bytesValue)); - fields.add(new SortedSetDocValuesField(keyedFieldName, bytesKeyedValue)); + fields.add(new SortedSetDocValuesField(rootFieldFullPath, bytesValue)); + fields.add(new SortedSetDocValuesField(keyedFieldFullPath, bytesKeyedValue)); if (fieldType.isDimension() == false) { return; @@ -173,7 +197,10 @@ private void addField(DocumentParserContext context, ContentPath path, String cu final String keyedFieldName = FlattenedFieldParser.extractKey(bytesKeyedValue).utf8ToString(); if (fieldType.isDimension() && fieldType.dimensions().contains(keyedFieldName)) { final BytesRef keyedFieldValue = FlattenedFieldParser.extractValue(bytesKeyedValue); - context.getDimensions().addString(rootFieldName + "." + keyedFieldName, keyedFieldValue).validate(context.indexSettings()); + context.getDocumentParserContext() + .getDimensions() + .addString(rootFieldFullPath + "." + keyedFieldName, keyedFieldValue) + .validate(context.getDocumentParserContext().indexSettings()); } } } @@ -181,7 +208,7 @@ private void addField(DocumentParserContext context, ContentPath path, String cu private void validateDepthLimit(ContentPath path) { if (path.length() + 1 > depthLimit) { throw new IllegalArgumentException( - "The provided [flattened] field [" + rootFieldName + "] exceeds the maximum depth limit of [" + depthLimit + "]." + "The provided [flattened] field [" + rootFieldFullPath + "] exceeds the maximum depth limit of [" + depthLimit + "]." ); } } @@ -210,4 +237,33 @@ static BytesRef extractValue(BytesRef keyedValue) { int valueStart = keyedValue.offset + length + 1; return new BytesRef(keyedValue.bytes, valueStart, keyedValue.length - valueStart); } + + private static class Context { + private final XContentParser parser; + private final DocumentParserContext documentParserContext; + + private boolean ignoredValueEncountered; + + private Context(XContentParser parser, DocumentParserContext documentParserContext) { + this.parser = parser; + this.documentParserContext = documentParserContext; + this.ignoredValueEncountered = false; + } + + public XContentParser getParser() { + return parser; + } + + public DocumentParserContext getDocumentParserContext() { + return documentParserContext; + } + + public void onIgnoredValue() { + this.ignoredValueEncountered = true; + } + + public boolean isIgnoredValueEncountered() { + return ignoredValueEncountered; + } + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedSortedSetDocValuesSyntheticFieldLoader.java b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedSortedSetDocValuesSyntheticFieldLoader.java index 3eb8f4ac52eaa..959460758ab30 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedSortedSetDocValuesSyntheticFieldLoader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedSortedSetDocValuesSyntheticFieldLoader.java @@ -19,24 +19,32 @@ public class FlattenedSortedSetDocValuesSyntheticFieldLoader extends SortedSetDocValuesSyntheticFieldLoader { private DocValuesFieldValues docValues = NO_VALUES; - private final String name; - private final String simpleName; + private final String fieldFullPath; + private final String keyedFieldFullPath; + private final String leafName; /** * Build a loader for flattened fields from doc values. * - * @param name the name of the field to load from doc values - * @param simpleName the name to give the field in the rendered {@code _source} + * @param fieldFullPath full path to the original field + * @param keyedFieldFullPath full path to the keyed field to load doc values from + * @param leafName the name of the leaf field to use in the rendered {@code _source} */ - public FlattenedSortedSetDocValuesSyntheticFieldLoader(String name, String simpleName) { - super(name, simpleName, null, false); - this.name = name; - this.simpleName = simpleName; + public FlattenedSortedSetDocValuesSyntheticFieldLoader(String fieldFullPath, String keyedFieldFullPath, String leafName) { + super(fieldFullPath, leafName, null, false); + this.fieldFullPath = fieldFullPath; + this.keyedFieldFullPath = keyedFieldFullPath; + this.leafName = leafName; + } + + @Override + public String fieldName() { + return fieldFullPath; } @Override public DocValuesLoader docValuesLoader(LeafReader reader, int[] docIdsInLeaf) throws IOException { - final SortedSetDocValues dv = DocValues.getSortedSet(reader, name); + final SortedSetDocValues dv = DocValues.getSortedSet(reader, keyedFieldFullPath); if (dv.getValueCount() == 0) { docValues = NO_VALUES; return null; @@ -56,7 +64,7 @@ public void write(XContentBuilder b) throws IOException { if (docValues.count() == 0) { return; } - b.startObject(simpleName); + b.startObject(leafName); docValues.write(b); b.endObject(); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapperTests.java index 4f23c86f53cca..aba20ec5d81c8 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapperTests.java @@ -698,12 +698,12 @@ private static void randomMapExample(final TreeMap example, int for (int i = 0; i < randomIntBetween(2, 5); i++) { int j = depth >= maxDepth ? randomIntBetween(1, 2) : randomIntBetween(1, 3); switch (j) { - case 1 -> example.put(randomAlphaOfLength(10), randomAlphaOfLength(10)); + case 1 -> example.put(randomAlphaOfLength(10), randomAlphaOfLengthBetween(1, 10)); case 2 -> { int size = randomIntBetween(2, 10); final Set stringSet = new HashSet<>(); while (stringSet.size() < size) { - stringSet.add(String.valueOf(randomIntBetween(10_000, 20_000))); + stringSet.add(String.valueOf(randomIntBetween(10_000, 2_000_000))); } final List randomList = new ArrayList<>(stringSet); Collections.sort(randomList); @@ -720,10 +720,10 @@ private static void randomMapExample(final TreeMap example, int } private static class FlattenedFieldSyntheticSourceSupport implements SyntheticSourceSupport { + private final Integer ignoreAbove = randomBoolean() ? randomIntBetween(4, 10) : null; @Override public SyntheticSourceExample example(int maxValues) throws IOException { - // NOTE: values must be keywords and we use a TreeMap to preserve order (doc values are sorted and the result // is created with keys and nested keys in sorted order). final TreeMap map = new TreeMap<>(); @@ -743,6 +743,9 @@ public List invalidExample() throws IOException { private void mapping(XContentBuilder b) throws IOException { b.field("type", "flattened"); + if (ignoreAbove != null) { + b.field("ignore_above", ignoreAbove); + } } } From 9c599dfacc84143239a2c11dc66bd3cf18d3976a Mon Sep 17 00:00:00 2001 From: Volodymyr Krasnikov <129072588+volodk85@users.noreply.github.com> Date: Thu, 27 Jun 2024 10:12:45 -0700 Subject: [PATCH 021/216] Add system property to allow `index.refresh_interval` be less than 5s (#110172) * Add system property to allow be less than 5s * bypass forbidden api check * follow comments * fix visibility * lazy init * spotless * Use LazyInitializable --- .../elasticsearch/index/IndexSettings.java | 31 +++++++---- .../index/IndexSettingsOverrideTests.java | 55 +++++++++++++++++++ 2 files changed, 76 insertions(+), 10 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/index/IndexSettingsOverrideTests.java diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettings.java b/server/src/main/java/org/elasticsearch/index/IndexSettings.java index 5446027a2ca40..5d864e4fa1e24 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.LazyInitializable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.ingest.IngestService; @@ -294,6 +295,13 @@ public final class IndexSettings { }, new RefreshIntervalValidator(), Property.Dynamic, Property.IndexScope, Property.ServerlessPublic); static class RefreshIntervalValidator implements Setting.Validator { + + static final String STATELESS_ALLOW_INDEX_REFRESH_INTERVAL_OVERRIDE = "es.stateless.allow.index.refresh_interval.override"; + + private LazyInitializable isOverrideAllowed = new LazyInitializable<>( + () -> Boolean.parseBoolean(System.getProperty(STATELESS_ALLOW_INDEX_REFRESH_INTERVAL_OVERRIDE, "false")) + ); + @Override public void validate(TimeValue value) {} @@ -308,16 +316,19 @@ public void validate(final TimeValue value, final Map, Object> settin && value.compareTo(TimeValue.ZERO) > 0 && value.compareTo(STATELESS_MIN_NON_FAST_REFRESH_INTERVAL) < 0 && indexVersion.after(IndexVersions.V_8_10_0)) { - throw new IllegalArgumentException( - "index setting [" - + IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey() - + "=" - + value - + "] should be either " - + TimeValue.MINUS_ONE - + " or equal to or greater than " - + STATELESS_MIN_NON_FAST_REFRESH_INTERVAL - ); + + if (isOverrideAllowed.getOrCompute() == false) { + throw new IllegalArgumentException( + "index setting [" + + IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey() + + "=" + + value + + "] should be either " + + TimeValue.MINUS_ONE + + " or equal to or greater than " + + STATELESS_MIN_NON_FAST_REFRESH_INTERVAL + ); + } } } diff --git a/server/src/test/java/org/elasticsearch/index/IndexSettingsOverrideTests.java b/server/src/test/java/org/elasticsearch/index/IndexSettingsOverrideTests.java new file mode 100644 index 0000000000000..e4f87805f2c4f --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/IndexSettingsOverrideTests.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index; + +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.ESTestCase; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_INDEX_VERSION_CREATED; +import static org.elasticsearch.cluster.routing.allocation.ExistingShardsAllocator.EXISTING_SHARDS_ALLOCATOR_SETTING; + +@ESTestCase.WithoutSecurityManager +@SuppressForbidden(reason = "manipulates system properties for testing") +public class IndexSettingsOverrideTests extends ESTestCase { + + public static IndexMetadata newIndexMeta(String name, Settings indexSettings) { + return IndexMetadata.builder(name) + .settings(indexSettings(IndexVersion.current(), randomIntBetween(1, 3), randomIntBetween(1, 3)).put(indexSettings)) + .build(); + } + + @BeforeClass + public static void setSystemProperty() { + System.setProperty(IndexSettings.RefreshIntervalValidator.STATELESS_ALLOW_INDEX_REFRESH_INTERVAL_OVERRIDE, "true"); + } + + public void testStatelessMinRefreshIntervalOverride() { + IndexMetadata metadata = newIndexMeta( + "index", + Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .put(EXISTING_SHARDS_ALLOCATOR_SETTING.getKey(), "stateless") + .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "1s") + .put(SETTING_INDEX_VERSION_CREATED.getKey(), IndexVersions.V_8_10_0.id() + 1) + .build() + ); + IndexSettings settings = new IndexSettings(metadata, Settings.EMPTY); + assertEquals(TimeValue.timeValueSeconds(1), settings.getRefreshInterval()); + } + + @AfterClass + public static void clearSystemProperty() { + System.clearProperty(IndexSettings.RefreshIntervalValidator.STATELESS_ALLOW_INDEX_REFRESH_INTERVAL_OVERRIDE); + } +} From 19fc0d9cad9e7687c59c64cb86c3af71f54a5d4e Mon Sep 17 00:00:00 2001 From: Kathleen DeRusso Date: Thu, 27 Jun 2024 13:24:57 -0400 Subject: [PATCH 022/216] Deprecate text_expansion and weighted_tokens queries (#109880) --- docs/changelog/109880.yaml | 10 + .../query-dsl/text-expansion-query.asciidoc | 32 ++- .../query-dsl/weighted-tokens-query.asciidoc | 18 +- docs/reference/search/rrf.asciidoc | 170 ++++++--------- .../retrievers-overview.asciidoc | 167 +++++++-------- .../search-application-api.asciidoc | 28 ++- .../semantic-search-elser.asciidoc | 202 +++++++----------- .../semantic-search/hybrid-search.asciidoc | 11 +- .../semantic-search/search.asciidoc | 32 ++- .../ml/search/WeightedTokensQueryBuilder.java | 14 ++ .../WeightedTokensQueryBuilderTests.java | 24 +++ .../entsearch/rules/40_rule_query_search.yml | 24 +-- .../inference/30_semantic_text_inference.yml | 124 +++++------ .../test/multi_cluster/40_text_expansion.yml | 70 ++++-- .../test/remote_cluster/40_text_expansion.yml | 70 ++++-- .../ml/integration/TextExpansionQueryIT.java | 5 + .../ml/queries/TextExpansionQueryBuilder.java | 12 ++ .../TextExpansionQueryBuilderTests.java | 24 +++ .../test/ml/text_expansion_search.yml | 74 +++++-- .../text_expansion_search_rank_features.yml | 5 + .../text_expansion_search_sparse_vector.yml | 5 + 21 files changed, 612 insertions(+), 509 deletions(-) create mode 100644 docs/changelog/109880.yaml diff --git a/docs/changelog/109880.yaml b/docs/changelog/109880.yaml new file mode 100644 index 0000000000000..71c7209824a8a --- /dev/null +++ b/docs/changelog/109880.yaml @@ -0,0 +1,10 @@ +pr: 109880 +summary: Deprecate `text_expansion` and `weighted_tokens` queries +area: Machine Learning +type: deprecation +issues: [ ] +deprecation: + title: Deprecate `text_expansion` and `weighted_tokens` queries + area: REST API + details: The `text_expansion` and `weighted_tokens` queries have been replaced by `sparse_vector`. + impact: Please update your existing `text_expansion` and `weighted_tokens` queries to use `sparse_vector.` diff --git a/docs/reference/query-dsl/text-expansion-query.asciidoc b/docs/reference/query-dsl/text-expansion-query.asciidoc index 5b3f98b5e1ea8..1c51429b5aa22 100644 --- a/docs/reference/query-dsl/text-expansion-query.asciidoc +++ b/docs/reference/query-dsl/text-expansion-query.asciidoc @@ -1,18 +1,19 @@ [[query-dsl-text-expansion-query]] == Text expansion query + ++++ Text expansion ++++ -The text expansion query uses a {nlp} model to convert the query text into a -list of token-weight pairs which are then used in a query against a +deprecated[8.15.0, This query has been replaced by <>.] + +The text expansion query uses a {nlp} model to convert the query text into a list of token-weight pairs which are then used in a query against a <> or <> field. [discrete] [[text-expansion-query-ex-request]] === Example request - [source,console] ---- GET _search @@ -34,28 +35,24 @@ GET _search === Top level parameters for `text_expansion` ``::: -(Required, object) -The name of the field that contains the token-weight pairs the NLP model created -based on the input text. +(Required, object) The name of the field that contains the token-weight pairs the NLP model created based on the input text. [discrete] [[text-expansion-rank-feature-field-params]] === Top level parameters for `` `model_id`:::: -(Required, string) -The ID of the model to use to convert the query text into token-weight pairs. It -must be the same model ID that was used to create the tokens from the input -text. +(Required, string) The ID of the model to use to convert the query text into token-weight pairs. +It must be the same model ID that was used to create the tokens from the input text. `model_text`:::: -(Required, string) -The query text you want to use for search. +(Required, string) The query text you want to use for search. `pruning_config` :::: (Optional, object) preview:[] -Optional pruning configuration. If enabled, this will omit non-significant tokens from the query in order to improve query performance. +Optional pruning configuration. +If enabled, this will omit non-significant tokens from the query in order to improve query performance. Default: Disabled. + -- @@ -89,10 +86,8 @@ NOTE: The default values for `tokens_freq_ratio_threshold` and `tokens_weight_th [[text-expansion-query-example]] === Example ELSER query -The following is an example of the `text_expansion` query that references the -ELSER model to perform semantic search. For a more detailed description of how -to perform semantic search by using ELSER and the `text_expansion` query, refer -to <>. +The following is an example of the `text_expansion` query that references the ELSER model to perform semantic search. +For a more detailed description of how to perform semantic search by using ELSER and the `text_expansion` query, refer to <>. [source,console] ---- @@ -155,8 +150,7 @@ GET my-index/_search ---- // TEST[skip: TBD] -This can also be achieved using <>, -through an <> with multiple +This can also be achieved using <>, through an <> with multiple <>. [source,console] diff --git a/docs/reference/query-dsl/weighted-tokens-query.asciidoc b/docs/reference/query-dsl/weighted-tokens-query.asciidoc index cbd88eb3290dc..d4318665a9778 100644 --- a/docs/reference/query-dsl/weighted-tokens-query.asciidoc +++ b/docs/reference/query-dsl/weighted-tokens-query.asciidoc @@ -1,9 +1,11 @@ [[query-dsl-weighted-tokens-query]] === Weighted tokens query + ++++ Weighted tokens ++++ +deprecated[8.15.0, This query has been replaced by the <> and will be removed in an upcoming release.] preview::[] The weighted tokens query requires a list of token-weight pairs that are sent in with a query rather than calculated using a {nlp} model. @@ -40,32 +42,28 @@ POST _search === Top level parameters for `weighted_token` ``::: -(Required, dictionary) -A dictionary of token-weight pairs. +(Required, dictionary) A dictionary of token-weight pairs. `pruning_config` :::: -(Optional, object) -Optional pruning configuration. If enabled, this will omit non-significant tokens from the query in order to improve query performance. +(Optional, object) Optional pruning configuration. +If enabled, this will omit non-significant tokens from the query in order to improve query performance. Default: Disabled. + -- Parameters for `` are: `tokens_freq_ratio_threshold`:: -(Optional, integer) -Tokens whose frequency is more than `tokens_freq_ratio_threshold` times the average frequency of all tokens in the specified field are considered outliers and pruned. +(Optional, integer) Tokens whose frequency is more than `tokens_freq_ratio_threshold` times the average frequency of all tokens in the specified field are considered outliers and pruned. This value must between 1 and 100. Default: `5`. `tokens_weight_threshold`:: -(Optional, float) -Tokens whose weight is less than `tokens_weight_threshold` are considered nonsignificant and pruned. +(Optional, float) Tokens whose weight is less than `tokens_weight_threshold` are considered nonsignificant and pruned. This value must be between 0 and 1. Default: `0.4`. `only_score_pruned_tokens`:: -(Optional, boolean) -If `true` we only input pruned tokens into scoring, and discard non-pruned tokens. +(Optional, boolean) If `true` we only input pruned tokens into scoring, and discard non-pruned tokens. It is strongly recommended to set this to `false` for the main query, but this can be set to `true` for a rescore query to get more relevant results. Default: `false`. diff --git a/docs/reference/search/rrf.asciidoc b/docs/reference/search/rrf.asciidoc index ba0f6c018b0eb..fb474fe6bf4e6 100644 --- a/docs/reference/search/rrf.asciidoc +++ b/docs/reference/search/rrf.asciidoc @@ -1,13 +1,13 @@ [[rrf]] === Reciprocal rank fusion -preview::["This functionality is in technical preview and may be changed or removed in a future release. The syntax will likely change before GA. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] +preview::["This functionality is in technical preview and may be changed or removed in a future release. +The syntax will likely change before GA. +Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] https://plg.uwaterloo.ca/~gvcormac/cormacksigir09-rrf.pdf[Reciprocal rank fusion (RRF)] -is a method for combining multiple result sets with different relevance -indicators into a single result set. RRF requires no tuning, and the different -relevance indicators do not have to be related to each other to achieve high-quality -results. +is a method for combining multiple result sets with different relevance indicators into a single result set. +RRF requires no tuning, and the different relevance indicators do not have to be related to each other to achieve high-quality results. RRF uses the following formula to determine the score for ranking each document: @@ -31,15 +31,13 @@ return score [[rrf-api]] ==== Reciprocal rank fusion API -You can use RRF as part of a <> to combine and rank -documents using separate sets of top documents (result sets) from a -combination of <> using an -<>. A minimum of *two* child retrievers is -required for ranking. +You can use RRF as part of a <> to combine and rank documents using separate sets of top documents (result sets) from a combination of <> using an +<>. +A minimum of *two* child retrievers is required for ranking. An RRF retriever is an optional object defined as part of a search request's -<>. The RRF retriever object contains -the following parameters: +<>. +The RRF retriever object contains the following parameters: include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=rrf-retrievers] @@ -82,21 +80,15 @@ GET example-index/_search ---- // TEST[skip:example fragment] -In the above example, we execute the `knn` and `standard` retrievers -independently of each other. Then we use the `rrf` retriever to combine -the results. +In the above example, we execute the `knn` and `standard` retrievers independently of each other. +Then we use the `rrf` retriever to combine the results. -<1> First, we execute the kNN search specified by the `knn` retriever to -get its global top 50 results. -<2> Second, we execute the query specified by the `standard` retriever to get -its global top 50 results. -<3> Then, on a coordinating node, we combine the kNN search top documents with -the query top documents and rank them based on the RRF formula using parameters from -the `rrf` retriever to get the combined top documents using the default `size` of `10`. +<1> First, we execute the kNN search specified by the `knn` retriever to get its global top 50 results. +<2> Second, we execute the query specified by the `standard` retriever to get its global top 50 results. +<3> Then, on a coordinating node, we combine the kNN search top documents with the query top documents and rank them based on the RRF formula using parameters from the `rrf` retriever to get the combined top documents using the default `size` of `10`. -Note that if `k` from a knn search is larger than `rank_window_size`, the results are -truncated to `rank_window_size`. If `k` is smaller than `rank_window_size`, the results are -`k` size. +Note that if `k` from a knn search is larger than `rank_window_size`, the results are truncated to `rank_window_size`. +If `k` is smaller than `rank_window_size`, the results are `k` size. [[rrf-supported-features]] ==== Reciprocal rank fusion supported features @@ -117,16 +109,13 @@ The `rrf` retriever does not currently support: * <> * <> -Using unsupported features as part of a search with an `rrf` retriever results -in an exception. +Using unsupported features as part of a search with an `rrf` retriever results in an exception. [[rrf-using-multiple-standard-retrievers]] ==== Reciprocal rank fusion using multiple standard retrievers -The `rrf` retriever provides a way to combine and rank multiple -`standard` retrievers. A primary use case is combining top documents -from a traditional BM25 query and an <> -query to achieve improved relevance. +The `rrf` retriever provides a way to combine and rank multiple `standard` retrievers. +A primary use case is combining top documents from a traditional BM25 query and an <> query to achieve improved relevance. An example request using RRF with multiple standard retrievers: @@ -149,11 +138,10 @@ GET example-index/_search { "standard": { <2> "query": { - "text_expansion":{ - "ml.tokens":{ - "model_id":"my_elser_model", - "model_text":"What blue shoes are on sale?" - } + "sparse_vector":{ + "field": "ml.tokens", + "inference_id": "my_elser_model", + "query": "What blue shoes are on sale?" } } } @@ -167,38 +155,28 @@ GET example-index/_search ---- // TEST[skip:example fragment] -In the above example, we execute each of the two `standard` retrievers -independently of each other. Then we use the `rrf` retriever to combine -the results. +In the above example, we execute each of the two `standard` retrievers independently of each other. +Then we use the `rrf` retriever to combine the results. -<1> First we run the `standard` retriever -specifying a term query for `blue shoes sales` using the standard BM25 -scoring algorithm. -<2> Next we run the `standard` retriever specifying a -text expansion query for `What blue shoes are on sale?` +<1> First we run the `standard` retriever specifying a term query for `blue shoes sales` using the standard BM25 scoring algorithm. +<2> Next we run the `standard` retriever specifying a sparse_vector query for `What blue shoes are on sale?` using our <> scoring algorithm. -<3> The `rrf` retriever allows us to combine the two top documents sets -generated by completely independent scoring algorithms with equal weighting. +<3> The `rrf` retriever allows us to combine the two top documents sets generated by completely independent scoring algorithms with equal weighting. -Not only does this remove the need to figure out what the appropriate -weighting is using linear combination, but RRF is also shown to give improved -relevance over either query individually. +Not only does this remove the need to figure out what the appropriate weighting is using linear combination, but RRF is also shown to give improved relevance over either query individually. [[rrf-using-sub-searches]] ==== Reciprocal rank fusion using sub searches -RRF using sub searches is no longer supported. Use the -<> instead. See -<> -for an example. +RRF using sub searches is no longer supported. +Use the <> instead. +See <> for an example. [[rrf-full-example]] ==== Reciprocal rank fusion full example -We begin by creating a mapping for an index with a text field, a vector field, -and an integer field along with indexing several documents. For this example we -are going to use a vector with only a single dimension to make the ranking easier -to explain. +We begin by creating a mapping for an index with a text field, a vector field, and an integer field along with indexing several documents. +For this example we are going to use a vector with only a single dimension to make the ranking easier to explain. [source,console] ---- @@ -262,9 +240,7 @@ POST example-index/_refresh ---- // TEST -We now execute a search using an `rrf` retriever with a `standard` retriever -specifying a BM25 query, a `knn` retriever specifying a kNN search, and -a terms aggregation. +We now execute a search using an `rrf` retriever with a `standard` retriever specifying a BM25 query, a `knn` retriever specifying a kNN search, and a terms aggregation. [source,console] ---- @@ -307,9 +283,8 @@ GET example-index/_search ---- // TEST[continued] -And we receive the response with ranked `hits` and the terms -aggregation result. We have both the ranker's `score` -and the `_rank` option to show our top-ranked documents. +And we receive the response with ranked `hits` and the terms aggregation result. +We have both the ranker's `score` and the `_rank` option to show our top-ranked documents. [source,console-response] ---- @@ -387,10 +362,8 @@ and the `_rank` option to show our top-ranked documents. ---- // TESTRESPONSE[s/: \.\.\./: $body.$_path/] -Let's break down how these hits were ranked. We -start by running the `standard` retriever specifying a query -and the `knn` retriever specifying a kNN search separately to -collect what their individual hits are. +Let's break down how these hits were ranked. +We start by running the `standard` retriever specifying a query and the `knn` retriever specifying a kNN search separately to collect what their individual hits are. First, we look at the hits for the query from the `standard` retriever. @@ -444,8 +417,8 @@ First, we look at the hits for the query from the `standard` retriever. <3> rank 3, `_id` 2 <4> rank 4, `_id` 1 -Note that our first hit doesn't have a value for the `vector` field. Now, -we look at the results for the kNN search from the `knn` retriever. +Note that our first hit doesn't have a value for the `vector` field. +Now, we look at the results for the kNN search from the `knn` retriever. [source,console-result] ---- @@ -497,9 +470,7 @@ we look at the results for the kNN search from the `knn` retriever. <3> rank 3, `_id` 1 <4> rank 4, `_id` 5 -We can now take the two individually ranked result sets and apply the -RRF formula to them using parameters from the `rrf` retriever to get -our final ranking. +We can now take the two individually ranked result sets and apply the RRF formula to them using parameters from the `rrf` retriever to get our final ranking. [source,python] ---- @@ -512,17 +483,14 @@ _id: 5 = 1.0/(1+4) = 0.2000 ---- // NOTCONSOLE -We rank the documents based on the RRF formula with a `rank_window_size` of `5` -truncating the bottom `2` docs in our RRF result set with a `size` of `3`. -We end with `_id: 3` as `_rank: 1`, `_id: 2` as `_rank: 2`, and -`_id: 4` as `_rank: 3`. This ranking matches the result set from the -original RRF search as expected. +We rank the documents based on the RRF formula with a `rank_window_size` of `5` truncating the bottom `2` docs in our RRF result set with a `size` of `3`. +We end with `_id: 3` as `_rank: 1`, `_id: 2` as `_rank: 2`, and `_id: 4` as `_rank: 3`. +This ranking matches the result set from the original RRF search as expected. ==== Explain in RRF -In addition to individual query scoring details, we can make use of the `explain=true` parameter to get information on how -the RRF scores for each document were computed. Working with the example above, and by adding -`explain=true` to the search request, we'd now have a response that looks like the following: +In addition to individual query scoring details, we can make use of the `explain=true` parameter to get information on how the RRF scores for each document were computed. +Working with the example above, and by adding `explain=true` to the search request, we'd now have a response that looks like the following: [source,js] ---- @@ -585,8 +553,8 @@ the RRF scores for each document were computed. Working with the example above, <6> the `value` heres specifies the `rank` of this document for the second (`knn`) query In addition to the above, explain in RRF also supports <> using the `_name` parameter. -Using named queries allows for easier and more intuitive understanding of the RRF score computation, -especially when dealing with multiple queries. So, we would now have: +Using named queries allows for easier and more intuitive understanding of the RRF score computation, especially when dealing with multiple queries. +So, we would now have: [source,js] ---- @@ -632,6 +600,7 @@ GET example-index/_search <1> Here we specify a `_name` for the `knn` retriever The response would now include the named query in the explanation: + [source,js] ---- { @@ -677,25 +646,19 @@ The response would now include the named query in the explanation: ==== Pagination in RRF When using `rrf` you can paginate through the results using the `from` parameter. -As the final ranking is solely dependent on the original query ranks, to ensure -consistency when paginating, we have to make sure that while `from` changes, the order -of what we have already seen remains intact. To that end, we're using a fixed `rank_window_size` -as the whole available result set upon which we can paginate. +As the final ranking is solely dependent on the original query ranks, to ensure consistency when paginating, we have to make sure that while `from` changes, the order of what we have already seen remains intact. +To that end, we're using a fixed `rank_window_size` as the whole available result set upon which we can paginate. This essentially means that if: -* `from + size` ≤ `rank_window_size` : we could get `results[from: from+size]` documents back from -the final `rrf` ranked result set +* `from + size` ≤ `rank_window_size` : we could get `results[from: from+size]` documents back from the final `rrf` ranked result set + +* `from + size` > `rank_window_size` : we would get 0 results back, as the request would fall outside the available `rank_window_size`-sized result set. -* `from + size` > `rank_window_size` : we would get 0 results back, as the request would fall outside the -available `rank_window_size`-sized result set. +An important thing to note here is that since `rank_window_size` is all the results that we'll get to see from the individual query components, pagination guarantees consistency, i.e. no documents are skipped or duplicated in multiple pages, iff `rank_window_size` remains the same. +If `rank_window_size` changes, then the order of the results might change as well, even for the same ranks. -An important thing to note here is that since `rank_window_size` is all the results that we'll get to see -from the individual query components, pagination guarantees consistency, i.e. no documents are skipped -or duplicated in multiple pages, iff `rank_window_size` remains the same. If `rank_window_size` changes, then the order -of the results might change as well, even for the same ranks. +To illustrate all of the above, let's consider the following simplified example where we have two queries, `queryA` and `queryB` and their ranked documents: -To illustrate all of the above, let's consider the following simplified example where we have -two queries, `queryA` and `queryB` and their ranked documents: [source,python] ---- | queryA | queryB | @@ -709,6 +672,7 @@ _id: | | 2 | For `rank_window_size=5` we would get to see all documents from both `queryA` and `queryB`. Assuming a `rank_constant=1`, the `rrf` scores would be: + [source,python] ---- # doc | queryA | queryB | score @@ -720,17 +684,17 @@ _id: 5 = 0 + 1.0/(1+1) = 0.5 ---- // NOTCONSOLE -So the final ranked result set would be [`1`, `4`, `2`, `3`, `5`] and we would paginate over that, since -`rank_window_size == len(results)`. In this scenario, we would have: +So the final ranked result set would be [`1`, `4`, `2`, `3`, `5`] and we would paginate over that, since `rank_window_size == len(results)`. +In this scenario, we would have: * `from=0, size=2` would return documents [`1`, `4`] with ranks `[1, 2]` * `from=2, size=2` would return documents [`2`, `3`] with ranks `[3, 4]` * `from=4, size=2` would return document [`5`] with rank `[5]` * `from=6, size=2` would return an empty result set as it there are no more results to iterate over -Now, if we had a `rank_window_size=2`, we would only get to see `[1, 2]` and `[5, 4]` documents -for queries `queryA` and `queryB` respectively. Working out the math, we would see that the results would now -be slightly different, because we would have no knowledge of the documents in positions `[3: end]` for either query. +Now, if we had a `rank_window_size=2`, we would only get to see `[1, 2]` and `[5, 4]` documents for queries `queryA` and `queryB` respectively. +Working out the math, we would see that the results would now be slightly different, because we would have no knowledge of the documents in positions `[3: end]` for either query. + [source,python] ---- # doc | queryA | queryB | score @@ -741,8 +705,8 @@ _id: 5 = 0 + 1.0/(1+1) = 0.5 ---- // NOTCONSOLE -The final ranked result set would be [`1`, `5`, `2`, `4`], and we would be able to paginate -on the top `rank_window_size` results, i.e. [`1`, `5`]. So for the same params as above, we would now have: +The final ranked result set would be [`1`, `5`, `2`, `4`], and we would be able to paginate on the top `rank_window_size` results, i.e. [`1`, `5`]. +So for the same params as above, we would now have: * `from=0, size=2` would return [`1`, `5`] with ranks `[1, 2]` * `from=2, size=2` would return an empty result set as it would fall outside the available `rank_window_size` results. diff --git a/docs/reference/search/search-your-data/retrievers-overview.asciidoc b/docs/reference/search/search-your-data/retrievers-overview.asciidoc index fdd984819558b..92cd085583916 100644 --- a/docs/reference/search/search-your-data/retrievers-overview.asciidoc +++ b/docs/reference/search/search-your-data/retrievers-overview.asciidoc @@ -1,46 +1,39 @@ [[retrievers-overview]] -== Retrievers +== Retrievers // Will move to a top level "Retrievers and reranking" section once reranking is live -preview::[] +preview::[] A retriever is an abstraction that was added to the Search API in *8.14.0*. -This abstraction enables the configuration of multi-stage retrieval -pipelines within a single `_search` call. This simplifies your search -application logic, because you no longer need to configure complex searches via -multiple {es} calls or implement additional client-side logic to -combine results from different queries. +This abstraction enables the configuration of multi-stage retrieval pipelines within a single `_search` call. +This simplifies your search application logic, because you no longer need to configure complex searches via multiple {es} calls or implement additional client-side logic to combine results from different queries. -This document provides a general overview of the retriever abstraction. -For implementation details, including notable restrictions, check out the -<> in the `_search` API docs. +This document provides a general overview of the retriever abstraction. +For implementation details, including notable restrictions, check out the +<> in the `_search` API docs. [discrete] [[retrievers-overview-types]] -=== Retriever types +=== Retriever types Retrievers come in various types, each tailored for different search operations. -The following retrievers are currently available: - -* <>. Returns top documents from a -traditional https://www.elastic.co/guide/en/elasticsearch/reference/master/query-dsl.html[query]. -Mimics a traditional query but in the context of a retriever framework. This -ensures backward compatibility as existing `_search` requests remain supported. -That way you can transition to the new abstraction at your own pace without -mixing syntaxes. -* <>. Returns top documents from a <>, -in the context of a retriever framework. -* <>. Combines and ranks multiple first-stage retrievers using -the reciprocal rank fusion (RRF) algorithm. Allows you to combine multiple result sets -with different relevance indicators into a single result set. -An RRF retriever is a *compound retriever*, where its `filter` element is -propagated to its sub retrievers. +The following retrievers are currently available: + +* <>. +Returns top documents from a traditional https://www.elastic.co/guide/en/elasticsearch/reference/master/query-dsl.html[query]. +Mimics a traditional query but in the context of a retriever framework. +This ensures backward compatibility as existing `_search` requests remain supported. +That way you can transition to the new abstraction at your own pace without mixing syntaxes. +* <>. +Returns top documents from a <>, in the context of a retriever framework. +* <>. +Combines and ranks multiple first-stage retrievers using the reciprocal rank fusion (RRF) algorithm. +Allows you to combine multiple result sets with different relevance indicators into a single result set. +An RRF retriever is a *compound retriever*, where its `filter` element is propagated to its sub retrievers. + -Sub retrievers may not use elements that -are restricted by having a compound retriever as part of the retriever tree. -See the <> for detailed -examples and information on how to use the RRF retriever. +Sub retrievers may not use elements that are restricted by having a compound retriever as part of the retriever tree. +See the <> for detailed examples and information on how to use the RRF retriever. [NOTE] ==== @@ -48,54 +41,41 @@ Stay tuned for more retriever types in future releases! ==== [discrete] -=== What makes retrievers useful? - -Here's an overview of what makes retrievers useful and how they differ from -regular queries. - -. *Simplified user experience*. Retrievers simplify the user experience by -allowing entire retrieval pipelines to be configured in a single API call. This -maintains backward compatibility with traditional query elements by -automatically translating them to the appropriate retriever. -. *Structured retrieval*. Retrievers provide a more structured way to define search -operations. They allow searches to be described using a "retriever tree", a -hierarchical structure that clarifies the sequence and logic of operations, -making complex searches more understandable and manageable. -. *Composability and flexibility*. Retrievers enable flexible composability, -allowing you to build pipelines and seamlessly integrate different retrieval -strategies into these pipelines. Retrievers make it easy to test out different -retrieval strategy combinations. -. *Compound operations*. A retriever can have sub retrievers. This -allows complex nested searches where the results of one retriever feed into -another, supporting sophisticated querying strategies that might involve -multiple stages or criteria. -. *Retrieval as a first-class concept*. Unlike -traditional queries, where the query is a part of a larger search API call, -retrievers are designed as standalone entities that can be combined or used in -isolation. This enables a more modular and flexible approach to constructing -searches. -. *Enhanced control over document scoring and ranking*. Retrievers -allow for more explicit control over how documents are scored and filtered. For -instance, you can specify minimum score thresholds, apply complex filters -without affecting scoring, and use parameters like `terminate_after` for -performance optimizations. -. *Integration with existing {es} functionalities*. Even though -retrievers can be used instead of existing `_search` API syntax (like the -`query` and `knn`), they are designed to integrate seamlessly with things like -pagination (`search_after`) and sorting. They also maintain compatibility with -aggregation operations by treating the combination of all leaf retrievers as +=== What makes retrievers useful? + +Here's an overview of what makes retrievers useful and how they differ from regular queries. + +. *Simplified user experience*. +Retrievers simplify the user experience by allowing entire retrieval pipelines to be configured in a single API call. +This maintains backward compatibility with traditional query elements by automatically translating them to the appropriate retriever. +. *Structured retrieval*. +Retrievers provide a more structured way to define search operations. +They allow searches to be described using a "retriever tree", a hierarchical structure that clarifies the sequence and logic of operations, making complex searches more understandable and manageable. +. *Composability and flexibility*. +Retrievers enable flexible composability, allowing you to build pipelines and seamlessly integrate different retrieval strategies into these pipelines. +Retrievers make it easy to test out different retrieval strategy combinations. +. *Compound operations*. +A retriever can have sub retrievers. +This allows complex nested searches where the results of one retriever feed into another, supporting sophisticated querying strategies that might involve multiple stages or criteria. +. *Retrieval as a first-class concept*. +Unlike traditional queries, where the query is a part of a larger search API call, retrievers are designed as standalone entities that can be combined or used in isolation. +This enables a more modular and flexible approach to constructing searches. +. *Enhanced control over document scoring and ranking*. +Retrievers allow for more explicit control over how documents are scored and filtered. +For instance, you can specify minimum score thresholds, apply complex filters without affecting scoring, and use parameters like `terminate_after` for performance optimizations. +. *Integration with existing {es} functionalities*. +Even though retrievers can be used instead of existing `_search` API syntax (like the +`query` and `knn`), they are designed to integrate seamlessly with things like pagination (`search_after`) and sorting. +They also maintain compatibility with aggregation operations by treating the combination of all leaf retrievers as `should` clauses in a boolean query. -. *Cleaner separation of concerns*. When using compound retrievers, only the -query element is allowed, which enforces a cleaner separation of concerns -and prevents the complexity that might arise from overly nested or -interdependent configurations. +. *Cleaner separation of concerns*. +When using compound retrievers, only the query element is allowed, which enforces a cleaner separation of concerns and prevents the complexity that might arise from overly nested or interdependent configurations. [discrete] [[retrievers-overview-example]] === Example -The following example demonstrates how using retrievers -simplify the composability of queries for RRF ranking. +The following example demonstrates how using retrievers simplify the composability of queries for RRF ranking. [source,js] ---- @@ -107,11 +87,10 @@ GET example-index/_search { "standard": { "query": { - "text_expansion": { - "vector.tokens": { - "model_id": ".elser_model_2", - "model_text": "What blue shoes are on sale?" - } + "sparse_vector": { + "field": "vector.tokens", + "inference_id": "my-elser-endpoint", + "query": "What blue shoes are on sale?" } } } @@ -132,8 +111,7 @@ GET example-index/_search ---- //NOTCONSOLE -This example demonstrates how you can combine different -retrieval strategies into a single `retriever` pipeline. +This example demonstrates how you can combine different retrieval strategies into a single `retriever` pipeline. Compare to `RRF` with `sub_searches` approach: @@ -155,14 +133,13 @@ GET example-index/_search }, { "query":{ - "text_expansion":{ - "vector.tokens":{ - "model_id":".elser_model_2", - "model_text":"What blue shoes are on sale?" + "sparse_vector": { + "field": "vector.tokens", + "inference_id": "my-elser-endoint", + "query": "What blue shoes are on sale?" } } } - } ], "rank":{ "rrf":{ @@ -179,16 +156,18 @@ GET example-index/_search [[retrievers-overview-glossary]] === Glossary -Here are some important terms: - -* *Retrieval Pipeline*. Defines the entire retrieval and ranking logic to -produce top hits. -* *Retriever Tree*. A hierarchical structure that defines how retrievers interact. -* *First-stage Retriever*. Returns an initial set of candidate documents. -* *Compound Retriever*. Builds on one or more retrievers, -enhancing document retrieval and ranking logic. -* *Combiners*. Compound retrievers that merge top hits -from multiple sub-retrievers. +Here are some important terms: + +* *Retrieval Pipeline*. +Defines the entire retrieval and ranking logic to produce top hits. +* *Retriever Tree*. +A hierarchical structure that defines how retrievers interact. +* *First-stage Retriever*. +Returns an initial set of candidate documents. +* *Compound Retriever*. +Builds on one or more retrievers, enhancing document retrieval and ranking logic. +* *Combiners*. +Compound retrievers that merge top hits from multiple sub-retrievers. //* NOT YET *Rerankers*. Special compound retrievers that reorder hits and may adjust the number of hits, with distinctions between first-stage and second-stage rerankers. [discrete] diff --git a/docs/reference/search/search-your-data/search-application-api.asciidoc b/docs/reference/search/search-your-data/search-application-api.asciidoc index 7c9308e78ebea..2fe28faed546f 100644 --- a/docs/reference/search/search-your-data/search-application-api.asciidoc +++ b/docs/reference/search/search-your-data/search-application-api.asciidoc @@ -417,11 +417,10 @@ PUT _application/search_application/my-search-app { "standard": { "query": { - "text_expansion": { - "ml.inference.{{.}}_expanded.predicted_value": { - "model_text": "{{query_string}}", - "model_id": "" - } + "sparse_vector": { + "field": "ml.inference.{{.}}_expanded.predicted_value", + "inference_id": "", + "query": "{{query_string}}" } } } @@ -509,12 +508,10 @@ PUT _application/search_application/my_search_application {{#elser}} {{#elser_fields}} { - "text_expansion": { - "ml.inference.{{name}}_expanded.predicted_value": { - "model_text": "{{query_string}}", - "model_id": ".elser_model_1", - "boost": "{{boost}}" - } + "sparse_vector": { + "field": "ml.inference.{{.}}_expanded.predicted_value", + "inference_id": "", + "query": "{{query_string}}" } }, {{/elser_fields}} @@ -673,12 +670,11 @@ PUT _application/search_application/my_search_application "should": [ {{#elser_fields}} { - "text_expansion": { - "ml.inference.{{name}}_expanded.predicted_value": { - "model_text": "{{query_string}}", - "model_id": "" + "sparse_vector": { + "field": "ml.inference.{{.}}_expanded.predicted_value", + "inference_id": "", + "query": "{{query_string}}" } - } }, {{/elser_fields}} ] diff --git a/docs/reference/search/search-your-data/semantic-search-elser.asciidoc b/docs/reference/search/search-your-data/semantic-search-elser.asciidoc index e7f503a4a6c70..bf700eb7b1ff4 100644 --- a/docs/reference/search/search-your-data/semantic-search-elser.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search-elser.asciidoc @@ -1,61 +1,44 @@ [[semantic-search-elser]] === Tutorial: semantic search with ELSER + ++++ Semantic search with ELSER ++++ -Elastic Learned Sparse EncodeR - or ELSER - is an NLP model trained by Elastic -that enables you to perform semantic search by using sparse vector -representation. Instead of literal matching on search terms, semantic search -retrieves results based on the intent and the contextual meaning of a search -query. +Elastic Learned Sparse EncodeR - or ELSER - is an NLP model trained by Elastic that enables you to perform semantic search by using sparse vector representation. +Instead of literal matching on search terms, semantic search retrieves results based on the intent and the contextual meaning of a search query. -The instructions in this tutorial shows you how to use ELSER to perform semantic -search on your data. +The instructions in this tutorial shows you how to use ELSER to perform semantic search on your data. IMPORTANT: For the easiest way to perform semantic search in the {stack}, refer to the <> end-to-end tutorial. -NOTE: Only the first 512 extracted tokens per field are considered during -semantic search with ELSER. Refer to -{ml-docs}/ml-nlp-limitations.html#ml-nlp-elser-v1-limit-512[this page] for more -information. - +NOTE: Only the first 512 extracted tokens per field are considered during semantic search with ELSER. +Refer to {ml-docs}/ml-nlp-limitations.html#ml-nlp-elser-v1-limit-512[this page] for more information. [discrete] [[requirements]] ==== Requirements -To perform semantic search by using ELSER, you must have the NLP model deployed -in your cluster. Refer to the -{ml-docs}/ml-nlp-elser.html[ELSER documentation] to learn how to download and -deploy the model. - -NOTE: The minimum dedicated ML node size for deploying and using the ELSER model -is 4 GB in Elasticsearch Service if -{cloud}/ec-autoscaling.html[deployment autoscaling] is turned off. Turning on -autoscaling is recommended because it allows your deployment to dynamically -adjust resources based on demand. Better performance can be achieved by using -more allocations or more threads per allocation, which requires bigger ML nodes. -Autoscaling provides bigger nodes when required. If autoscaling is turned off, -you must provide suitably sized nodes yourself. +To perform semantic search by using ELSER, you must have the NLP model deployed in your cluster. +Refer to the {ml-docs}/ml-nlp-elser.html[ELSER documentation] to learn how to download and deploy the model. +NOTE: The minimum dedicated ML node size for deploying and using the ELSER model is 4 GB in Elasticsearch Service if +{cloud}/ec-autoscaling.html[deployment autoscaling] is turned off. +Turning on autoscaling is recommended because it allows your deployment to dynamically adjust resources based on demand. +Better performance can be achieved by using more allocations or more threads per allocation, which requires bigger ML nodes. +Autoscaling provides bigger nodes when required. +If autoscaling is turned off, you must provide suitably sized nodes yourself. [discrete] [[elser-mappings]] ==== Create the index mapping -First, the mapping of the destination index - the index that contains the tokens -that the model created based on your text - must be created. The destination -index must have a field with the -<> or <> field -type to index the ELSER output. +First, the mapping of the destination index - the index that contains the tokens that the model created based on your text - must be created. +The destination index must have a field with the <> or <> field type to index the ELSER output. -NOTE: ELSER output must be ingested into a field with the `sparse_vector` or -`rank_features` field type. Otherwise, {es} interprets the token-weight pairs as -a massive amount of fields in a document. If you get an error similar to this -`"Limit of total fields [1000] has been exceeded while adding new fields"` then -the ELSER output field is not mapped properly and it has a field type different -than `sparse_vector` or `rank_features`. +NOTE: ELSER output must be ingested into a field with the `sparse_vector` or `rank_features` field type. +Otherwise, {es} interprets the token-weight pairs as a massive amount of fields in a document. +If you get an error similar to this: `"Limit of total fields [1000] has been exceeded while adding new fields"` then the ELSER output field is not mapped properly and it has a field type different than `sparse_vector` or `rank_features`. [source,console] ---- @@ -74,24 +57,22 @@ PUT my-index } ---- // TEST[skip:TBD] -<1> The name of the field to contain the generated tokens. It must be refrenced -in the {infer} pipeline configuration in the next step. +<1> The name of the field to contain the generated tokens. +It must be referenced in the {infer} pipeline configuration in the next step. <2> The field to contain the tokens is a `sparse_vector` field. <3> The name of the field from which to create the sparse vector representation. -In this example, the name of the field is `content`. It must be referenced in the -{infer} pipeline configuration in the next step. +In this example, the name of the field is `content`. +It must be referenced in the {infer} pipeline configuration in the next step. <4> The field type which is text in this example. To learn how to optimize space, refer to the <> section. - [discrete] [[inference-ingest-pipeline]] ==== Create an ingest pipeline with an inference processor Create an <> with an -<> to use ELSER to infer against the data -that is being ingested in the pipeline. +<> to use ELSER to infer against the data that is being ingested in the pipeline. [source,console] ---- @@ -112,8 +93,8 @@ PUT _ingest/pipeline/elser-v2-test ] } ---- -<1> Configuration object that defines the `input_field` for the {infer} process -and the `output_field` that will contain the {infer} results. + +<1> Configuration object that defines the `input_field` for the {infer} process and the `output_field` that will contain the {infer} results. //// [source,console] @@ -128,26 +109,23 @@ DELETE _ingest/pipeline/elser-v2-test [[load-data]] ==== Load data -In this step, you load the data that you later use in the {infer} ingest -pipeline to extract tokens from it. +In this step, you load the data that you later use in the {infer} ingest pipeline to extract tokens from it. -Use the `msmarco-passagetest2019-top1000` data set, which is a subset of the MS -MARCO Passage Ranking data set. It consists of 200 queries, each accompanied by -a list of relevant text passages. All unique passages, along with their IDs, -have been extracted from that data set and compiled into a +Use the `msmarco-passagetest2019-top1000` data set, which is a subset of the MS MARCO Passage Ranking data set. +It consists of 200 queries, each accompanied by a list of relevant text passages. +All unique passages, along with their IDs, have been extracted from that data set and compiled into a https://github.com/elastic/stack-docs/blob/main/docs/en/stack/ml/nlp/data/msmarco-passagetest2019-unique.tsv[tsv file]. -IMOPRTANT: The `msmarco-passagetest2019-top1000` dataset was not utilized to -train the model. It is only used in this tutorial as a sample dataset that is -easily accessible for demonstration purposes. You can use a different data set -to test the workflow and become familiar with it. +IMPORTANT: The `msmarco-passagetest2019-top1000` dataset was not utilized to train the model. +It is only used in this tutorial as a sample dataset that is easily accessible for demonstration purposes. +You can use a different data set to test the workflow and become familiar with it. Download the file and upload it to your cluster using the {kibana-ref}/connect-to-elasticsearch.html#upload-data-kibana[Data Visualizer] -in the {ml-app} UI. Assign the name `id` to the first column and `content` to -the second column. The index name is `test-data`. Once the upload is complete, -you can see an index named `test-data` with 182469 documents. - +in the {ml-app} UI. +Assign the name `id` to the first column and `content` to the second column. +The index name is `test-data`. +Once the upload is complete, you can see an index named `test-data` with 182469 documents. [discrete] [[reindexing-data-elser]] @@ -171,9 +149,7 @@ POST _reindex?wait_for_completion=false } ---- // TEST[skip:TBD] -<1> The default batch size for reindexing is 1000. Reducing `size` to a smaller -number makes the update of the reindexing process quicker which enables you to -follow the progress closely and detect errors early. +<1> The default batch size for reindexing is 1000. Reducing `size` to a smaller number makes the update of the reindexing process quicker which enables you to follow the progress closely and detect errors early. The call returns a task ID to monitor the progress: @@ -183,42 +159,35 @@ GET _tasks/ ---- // TEST[skip:TBD] -You can also open the Trained Models UI, select the Pipelines tab under ELSER to -follow the progress. - +You can also open the Trained Models UI, select the Pipelines tab under ELSER to follow the progress. [discrete] [[text-expansion-query]] -==== Semantic search by using the `text_expansion` query +==== Semantic search by using the `sparse_vector` query -To perform semantic search, use the <>, -and provide the query text and the ELSER model ID. The example below uses the -query text "How to avoid muscle soreness after running?", the `content_embedding` -field contains the generated ELSER output: +To perform semantic search, use the <>, and provide the query text and the inference ID associated with your ELSER model. +The example below uses the query text "How to avoid muscle soreness after running?", the `content_embedding` field contains the generated ELSER output: [source,console] ---- GET my-index/_search { "query":{ - "text_expansion":{ - "content_embedding":{ - "model_id":".elser_model_2", - "model_text":"How to avoid muscle soreness after running?" - } + "sparse_vector":{ + "field": "content_embedding", + "inference_id": "my-elser-endpoint", + "query": "How to avoid muscle soreness after running?" } } } ---- // TEST[skip:TBD] -The result is the top 10 documents that are closest in meaning to your query -text from the `my-index` index sorted by their relevancy. The result also -contains the extracted tokens for each of the relevant search results with their -weights. Tokens are learned associations capturing relevance, they are not -synonyms. To learn more about what tokens are, refer to -{ml-docs}/ml-nlp-elser.html#elser-tokens[this page]. It is possible to exclude -tokens from source, refer to <> to learn more. +The result is the top 10 documents that are closest in meaning to your query text from the `my-index` index sorted by their relevancy. +The result also contains the extracted tokens for each of the relevant search results with their weights. +Tokens are learned associations capturing relevance, they are not synonyms. +To learn more about what tokens are, refer to {ml-docs}/ml-nlp-elser.html#elser-tokens[this page]. +It is possible to exclude tokens from source, refer to <> to learn more. [source,consol-result] ---- @@ -265,17 +234,15 @@ tokens from source, refer to <> to learn more. [[text-expansion-compound-query]] ==== Combining semantic search with other queries -You can combine <> with other -queries in a <>. For example, use a filter clause -in a <> or a full text query with the same (or different) -query text as the `text_expansion` query. This enables you to combine the search -results from both queries. +You can combine <> with other queries in a <>. +For example, use a filter clause in a <> or a full text query with the same (or different) query text as the `sparse_vector` query. +This enables you to combine the search results from both queries. -The search hits from the `text_expansion` query tend to score higher than other -{es} queries. Those scores can be regularized by increasing or decreasing the -relevance scores of each query by using the `boost` parameter. Recall on the -`text_expansion` query can be high where there is a long tail of less relevant -results. Use the `min_score` parameter to prune those less relevant documents. +The search hits from the `sparse_vector` query tend to score higher than other +{es} queries. +Those scores can be regularized by increasing or decreasing the relevance scores of each query by using the `boost` parameter. +Recall on the `sparse_vector` query can be high where there is a long tail of less relevant results. +Use the `min_score` parameter to prune those less relevant documents. [source,console] ---- @@ -285,12 +252,11 @@ GET my-index/_search "bool": { <1> "should": [ { - "text_expansion": { - "content_embedding": { - "model_text": "How to avoid muscle soreness after running?", - "model_id": ".elser_model_2", - "boost": 1 <2> - } + "sparse_vector": { + "field": "content_embedding", + "inference_id": "my-elser-endpoint", + "query": "How to avoid muscle soreness after running?", + "boost": 1 <2> } }, { @@ -306,17 +272,13 @@ GET my-index/_search } ---- // TEST[skip:TBD] -<1> Both the `text_expansion` and the `query_string` queries are in a `should` -clause of a `bool` query. -<2> The `boost` value is `1` for the `text_expansion` query which is the default -value. This means that the relevance score of the results of this query are not -boosted. -<3> The `boost` value is `4` for the `query_string` query. The relevance score -of the results of this query is increased causing them to rank higher in the -search results. +<1> Both the `sparse_vector` and the `query_string` queries are in a `should` clause of a `bool` query. +<2> The `boost` value is `1` for the `sparse_vector` query which is the default value. +This means that the relevance score of the results of this query are not boosted. +<3> The `boost` value is `4` for the `query_string` query. +The relevance score of the results of this query is increased causing them to rank higher in the search results. <4> Only the results with a score equal to or higher than `10` are displayed. - [discrete] [[optimization]] === Optimizing performance @@ -325,24 +287,18 @@ search results. [[save-space]] ==== Saving disk space by excluding the ELSER tokens from document source -The tokens generated by ELSER must be indexed for use in the -<>. However, it is not -necessary to retain those terms in the document source. You can save disk space -by using the <> mapping to remove the ELSER -terms from the document source. +The tokens generated by ELSER must be indexed for use in the <>. +However, it is not necessary to retain those terms in the document source. +You can save disk space by using the <> mapping to remove the ELSER terms from the document source. WARNING: Reindex uses the document source to populate the destination index. -**Once the ELSER terms have been excluded from the source, they cannot be** -**recovered through reindexing.** Excluding the tokens from the source is a -space-saving optimsation that should only be applied if you are certain that -reindexing will not be required in the future! It's important to carefully -consider this trade-off and make sure that excluding the ELSER terms from the -source aligns with your specific requirements and use case. Review the -<> and <> sections carefully to learn -more about the possible consequences of excluding the tokens from the `_source`. - -The mapping that excludes `content_embedding` from the `_source` field can be -created by the following API call: +**Once the ELSER terms have been excluded from the source, they cannot be recovered through reindexing.** +Excluding the tokens from the source is a space-saving optimization that should only be applied if you are certain that reindexing will not be required in the future! +It's important to carefully consider this trade-off and make sure that excluding the ELSER terms from the source aligns with your specific requirements and use case. +Review the +<> and <> sections carefully to learn more about the possible consequences of excluding the tokens from the `_source`. + +The mapping that excludes `content_embedding` from the `_source` field can be created by the following API call: [source,console] ---- diff --git a/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc b/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc index 93edc0918614d..c7844c520e074 100644 --- a/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc +++ b/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc @@ -2,7 +2,7 @@ Hybrid search between a semantic and lexical query can be achieved by using an <> as part of your search request. Provide a -`text_expansion` query and a full-text query as +`sparse_vector` query and a full-text query as <> for the `rrf` retriever. The `rrf` retriever uses <> to rank the top documents. @@ -25,11 +25,10 @@ GET my-index/_search { "standard": { "query": { - "text_expansion": { - "my_tokens": { - "model_id": ".elser_model_2", - "model_text": "the query string" - } + "sparse_vector": { + "field": "my_tokens", + "inference_id": "my-elser-endpoint", + "query": "the query string" } } } diff --git a/docs/reference/tab-widgets/semantic-search/search.asciidoc b/docs/reference/tab-widgets/semantic-search/search.asciidoc index 315328add07f0..eb83efff53f06 100644 --- a/docs/reference/tab-widgets/semantic-search/search.asciidoc +++ b/docs/reference/tab-widgets/semantic-search/search.asciidoc @@ -1,35 +1,33 @@ // tag::elser[] -ELSER text embeddings can be queried using a -<>. The text expansion -query enables you to query a rank features field or a sparse vector field, by -providing the model ID of the NLP model, and the query text: +ELSER text embeddings can be queried using a +<>. The sparse vector +query enables you to query a <> field, by +providing the inference ID associated with the NLP model you want to use, and the query text: [source,console] ---- GET my-index/_search { "query":{ - "text_expansion":{ - "my_tokens":{ <1> - "model_id":".elser_model_2", - "model_text":"the query string" - } + "sparse_vector": { + "field": "my_tokens", + "inference_id": "my-elser-endpoint", + "query": "the query string" } } } ---- // TEST[skip:TBD] -<1> The field of type `sparse_vector`. // end::elser[] // tag::dense-vector[] -Text embeddings produced by dense vector models can be queried using a -<>. In the `knn` clause, provide the name of the -dense vector field, and a `query_vector_builder` clause with the model ID and +Text embeddings produced by dense vector models can be queried using a +<>. In the `knn` clause, provide the name of the +dense vector field, and a `query_vector_builder` clause with the model ID and the query text. [source,console] @@ -41,9 +39,9 @@ GET my-index/_search "k": 10, "num_candidates": 100, "query_vector_builder": { - "text_embedding": { - "model_id": "sentence-transformers__msmarco-minilm-l-12-v3", - "model_text": "the query string" + "text_embedding": { + "model_id": "sentence-transformers__msmarco-minilm-l-12-v3", + "model_text": "the query string" } } } @@ -51,4 +49,4 @@ GET my-index/_search ---- // TEST[skip:TBD] -// end::dense-vector[] \ No newline at end of file +// end::dense-vector[] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilder.java index 84d5dbdaaf536..256c90c3eaa62 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilder.java @@ -15,6 +15,8 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.AbstractQueryBuilder; @@ -29,7 +31,12 @@ import java.util.Objects; import java.util.Set; +/** + * @deprecated Replaced by sparse_vector query + */ +@Deprecated public class WeightedTokensQueryBuilder extends AbstractQueryBuilder { + public static final String NAME = "weighted_tokens"; public static final ParseField TOKENS_FIELD = new ParseField("tokens"); @@ -41,6 +48,10 @@ public class WeightedTokensQueryBuilder extends AbstractQueryBuilder ALLOWED_FIELD_TYPES = Set.of("sparse_vector", "rank_features"); + private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(ParseField.class); + public static final String WEIGHTED_TOKENS_DEPRECATION_MESSAGE = NAME + + " is deprecated and will be removed. Use sparse_vector instead."; + public WeightedTokensQueryBuilder(String fieldName, List tokens) { this(fieldName, tokens, null); } @@ -153,6 +164,9 @@ private static float parseWeight(String token, Object weight) { } public static WeightedTokensQueryBuilder fromXContent(XContentParser parser) throws IOException { + + deprecationLogger.critical(DeprecationCategory.API, NAME, WEIGHTED_TOKENS_DEPRECATION_MESSAGE); + String currentFieldName = null; String fieldName = null; List tokens = new ArrayList<>(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java index 43a531fcf8229..bb727204e2651 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java @@ -190,6 +190,30 @@ public void testToQuery() throws IOException { } } + @Override + public void testFromXContent() throws IOException { + super.testFromXContent(); + assertCriticalWarnings(WeightedTokensQueryBuilder.WEIGHTED_TOKENS_DEPRECATION_MESSAGE); + } + + @Override + public void testUnknownField() throws IOException { + super.testUnknownField(); + assertCriticalWarnings(WeightedTokensQueryBuilder.WEIGHTED_TOKENS_DEPRECATION_MESSAGE); + } + + @Override + public void testUnknownObjectException() throws IOException { + super.testUnknownObjectException(); + assertCriticalWarnings(WeightedTokensQueryBuilder.WEIGHTED_TOKENS_DEPRECATION_MESSAGE); + } + + @Override + public void testValidOutput() throws IOException { + super.testValidOutput(); + assertCriticalWarnings(WeightedTokensQueryBuilder.WEIGHTED_TOKENS_DEPRECATION_MESSAGE); + } + public void testPruningIsAppliedCorrectly() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { List documents = List.of( diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/40_rule_query_search.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/40_rule_query_search.yml index bfd4c5e8a831e..5cf0932f2fae2 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/40_rule_query_search.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/40_rule_query_search.yml @@ -455,10 +455,10 @@ teardown: query: rule: organic: - text_expansion: - ml.tokens: - model_id: text_expansion_model - model_text: "octopus comforter smells" + sparse_vector: + field: ml.tokens + inference_id: text_expansion_model + query: "octopus comforter smells" match_criteria: foo: bar ruleset_ids: @@ -473,10 +473,10 @@ teardown: query: rule: organic: - text_expansion: - ml.tokens: - model_id: text_expansion_model - model_text: "octopus comforter smells" + sparse_vector: + field: ml.tokens + inference_id: text_expansion_model + query: "octopus comforter smells" match_criteria: foo: baz ruleset_ids: @@ -491,10 +491,10 @@ teardown: query: rule: organic: - text_expansion: - ml.tokens: - model_id: text_expansion_model - model_text: "octopus comforter smells" + sparse_vector: + field: ml.tokens + inference_id: text_expansion_model + query: "octopus comforter smells" match_criteria: foo: puggle ruleset_ids: diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml index 9987b43822cc0..f467691600766 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml @@ -52,27 +52,27 @@ setup: --- "Calculates text expansion and embedding results for new documents": - - do: - index: - index: test-index - id: doc_1 - body: - sparse_field: "inference test" - dense_field: "another inference test" - non_inference_field: "non inference test" - - - do: - get: - index: test-index - id: doc_1 - - - match: { _source.sparse_field.text: "inference test" } - - exists: _source.sparse_field.inference.chunks.0.embeddings - - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } - - match: { _source.dense_field.text: "another inference test" } - - exists: _source.dense_field.inference.chunks.0.embeddings - - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } - - match: { _source.non_inference_field: "non inference test" } + - do: + index: + index: test-index + id: doc_1 + body: + sparse_field: "inference test" + dense_field: "another inference test" + non_inference_field: "non inference test" + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } + - match: { _source.dense_field.text: "another inference test" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } + - match: { _source.non_inference_field: "non inference test" } --- "Inference fields do not create new mappings": @@ -89,10 +89,10 @@ setup: indices.get_mapping: index: test-index - - match: {test-index.mappings.properties.sparse_field.type: semantic_text} - - match: {test-index.mappings.properties.dense_field.type: semantic_text} - - match: {test-index.mappings.properties.non_inference_field.type: text} - - length: {test-index.mappings.properties: 3} + - match: { test-index.mappings.properties.sparse_field.type: semantic_text } + - match: { test-index.mappings.properties.dense_field.type: semantic_text } + - match: { test-index.mappings.properties.non_inference_field.type: text } + - length: { test-index.mappings.properties: 3 } --- "Sparse vector results are indexed as nested chunks and searchable": @@ -114,13 +114,13 @@ setup: nested: path: sparse_field.inference.chunks query: - text_expansion: - sparse_field.inference.chunks.embeddings: - model_id: sparse-inference-id - model_text: "you know, for testing" + sparse_vector: + field: sparse_field.inference.chunks.embeddings + inference_id: sparse-inference-id + query: "you know, for testing" - - match: { hits.total.value: 2 } - - match: { hits.total.relation: eq } + - match: { hits.total.value: 2 } + - match: { hits.total.relation: eq } - length: { hits.hits.0._source.sparse_field.inference.chunks: 2 } - length: { hits.hits.1._source.sparse_field.inference.chunks: 2 } @@ -135,17 +135,17 @@ setup: path: sparse_field.inference.chunks inner_hits: _source: false - fields: [sparse_field.inference.chunks.text] + fields: [ sparse_field.inference.chunks.text ] query: - text_expansion: - sparse_field.inference.chunks.embeddings: - model_id: sparse-inference-id - model_text: "you know, for testing" + sparse_vector: + field: sparse_field.inference.chunks.embeddings + inference_id: sparse-inference-id + query: "you know, for testing" - - match: { hits.total.value: 2 } - - match: { hits.total.relation: eq } - - match: { hits.hits.0.inner_hits.sparse_field\.inference\.chunks.hits.total.value: 2 } - - match: { hits.hits.0.inner_hits.sparse_field\.inference\.chunks.hits.total.relation: eq } + - match: { hits.total.value: 2 } + - match: { hits.total.relation: eq } + - match: { hits.hits.0.inner_hits.sparse_field\.inference\.chunks.hits.total.value: 2 } + - match: { hits.hits.0.inner_hits.sparse_field\.inference\.chunks.hits.total.relation: eq } - length: { hits.hits.0.inner_hits.sparse_field\.inference\.chunks.hits.hits.0.fields.sparse_field\.inference\.chunks.0.text: 1 } - length: { hits.hits.0.inner_hits.sparse_field\.inference\.chunks.hits.hits.1.fields.sparse_field\.inference\.chunks.0.text: 1 } @@ -180,8 +180,8 @@ setup: model_id: dense-inference-id model_text: "you know, for testing" - - match: { hits.total.value: 2 } - - match: { hits.total.relation: eq } + - match: { hits.total.value: 2 } + - match: { hits.total.relation: eq } - length: { hits.hits.0._source.dense_field.inference.chunks: 2 } - length: { hits.hits.1._source.dense_field.inference.chunks: 2 } @@ -196,7 +196,7 @@ setup: path: dense_field.inference.chunks inner_hits: _source: false - fields: [dense_field.inference.chunks.text] + fields: [ dense_field.inference.chunks.text ] query: knn: field: dense_field.inference.chunks.embeddings @@ -205,10 +205,10 @@ setup: model_id: dense-inference-id model_text: "you know, for testing" - - match: { hits.total.value: 2 } - - match: { hits.total.relation: eq } - - match: { hits.hits.0.inner_hits.dense_field\.inference\.chunks.hits.total.value: 2 } - - match: { hits.hits.0.inner_hits.dense_field\.inference\.chunks.hits.total.relation: eq } + - match: { hits.total.value: 2 } + - match: { hits.total.relation: eq } + - match: { hits.hits.0.inner_hits.dense_field\.inference\.chunks.hits.total.value: 2 } + - match: { hits.hits.0.inner_hits.dense_field\.inference\.chunks.hits.total.relation: eq } - length: { hits.hits.0.inner_hits.dense_field\.inference\.chunks.hits.hits.0.fields.dense_field\.inference\.chunks.0.text: 1 } - length: { hits.hits.0.inner_hits.dense_field\.inference\.chunks.hits.hits.1.fields.dense_field\.inference\.chunks.0.text: 1 } @@ -265,13 +265,13 @@ setup: index: destination-index id: doc_1 - - match: { _source.sparse_field.text: "inference test" } - - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } - - match: { _source.sparse_field.inference.chunks.0.embeddings: $sparse_field_embedding } - - match: { _source.dense_field.text: "another inference test" } + - match: { _source.sparse_field.text: "inference test" } + - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } + - match: { _source.sparse_field.inference.chunks.0.embeddings: $sparse_field_embedding } + - match: { _source.dense_field.text: "another inference test" } - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } - match: { _source.dense_field.inference.chunks.0.embeddings: $dense_field_embedding } - - match: { _source.non_inference_field: "non inference test" } + - match: { _source.non_inference_field: "non inference test" } --- "Fails for non-existent inference": @@ -338,14 +338,14 @@ setup: index: test-copy-to-index id: doc_1 - - match: { _source.sparse_field.text: "inference test" } + - match: { _source.sparse_field.text: "inference test" } - length: { _source.sparse_field.inference.chunks: 3 } - - match: { _source.sparse_field.inference.chunks.0.text: "another copy_to inference test" } - - exists: _source.sparse_field.inference.chunks.0.embeddings - - match: { _source.sparse_field.inference.chunks.1.text: "copy_to inference test" } - - exists: _source.sparse_field.inference.chunks.1.embeddings - - match: { _source.sparse_field.inference.chunks.2.text: "inference test" } - - exists: _source.sparse_field.inference.chunks.2.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "another copy_to inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.1.text: "copy_to inference test" } + - exists: _source.sparse_field.inference.chunks.1.embeddings + - match: { _source.sparse_field.inference.chunks.2.text: "inference test" } + - exists: _source.sparse_field.inference.chunks.2.embeddings --- "Calculates embeddings for bulk operations - index": @@ -455,8 +455,8 @@ setup: id: doc_1 - match: { _source.sparse_field.text: "updated inference test" } - - match: { _source.sparse_field.inference.chunks.0.text: "updated inference test" } + - match: { _source.sparse_field.inference.chunks.0.text: "updated inference test" } - exists: _source.sparse_field.inference.chunks.0.embeddings - - match: { _source.dense_field.text: "another updated inference test" } - - match: { _source.dense_field.inference.chunks.0.text: "another updated inference test" } + - match: { _source.dense_field.text: "another updated inference test" } + - match: { _source.dense_field.inference.chunks.0.text: "another updated inference test" } - exists: _source.dense_field.inference.chunks.0.embeddings diff --git a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/multi_cluster/40_text_expansion.yml b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/multi_cluster/40_text_expansion.yml index e1ec400ce64bd..2253ecfc79f6f 100644 --- a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/multi_cluster/40_text_expansion.yml +++ b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/multi_cluster/40_text_expansion.yml @@ -131,6 +131,8 @@ teardown: --- "Test text expansion search": + - requires: + test_runner_features: [ "allowed_warnings" ] - do: search: index: index-with-sparse-vector @@ -140,13 +142,17 @@ teardown: ml.tokens: model_id: text_expansion_model model_text: "octopus comforter smells" + allowed_warnings: + - "text_expansion is deprecated. Use sparse_vector instead." + - match: { hits.total.value: 4 } - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } --- "Test text expansion search with pruning config": - requires: - cluster_features: ["gte_v8.13.0"] + test_runner_features: [ "allowed_warnings" ] + cluster_features: [ "gte_v8.13.0" ] reason: "pruning introduced in 8.13.0" - do: @@ -161,13 +167,17 @@ teardown: pruning_config: tokens_freq_ratio_threshold: 4 tokens_weight_threshold: 0.4 + allowed_warnings: + - "text_expansion is deprecated. Use sparse_vector instead." + - match: { hits.total.value: 4 } - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } --- "Test named, boosted text expansion search with pruning config": - requires: - cluster_features: ["gte_v8.13.0"] + test_runner_features: [ "allowed_warnings" ] + cluster_features: [ "gte_v8.13.0" ] reason: "pruning introduced in 8.13.0" - do: search: @@ -181,6 +191,9 @@ teardown: pruning_config: tokens_freq_ratio_threshold: 4 tokens_weight_threshold: 0.4 + allowed_warnings: + - "text_expansion is deprecated. Use sparse_vector instead." + - match: { hits.total.value: 4 } - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } - match: { hits.hits.0._score: 3.0 } @@ -199,15 +212,19 @@ teardown: tokens_weight_threshold: 0.4 _name: i-like-naming-my-queries boost: 100.0 + allowed_warnings: + - "text_expansion is deprecated. Use sparse_vector instead." + - match: { hits.total.value: 4 } - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } - - match: { hits.hits.0.matched_queries: ["i-like-naming-my-queries"] } + - match: { hits.hits.0.matched_queries: [ "i-like-naming-my-queries" ] } - match: { hits.hits.0._score: 300.0 } --- "Test text expansion search with default pruning config": - requires: - cluster_features: ["gte_v8.13.0"] + test_runner_features: [ "allowed_warnings" ] + cluster_features: [ "gte_v8.13.0" ] reason: "pruning introduced in 8.13.0" - do: @@ -219,14 +236,18 @@ teardown: ml.tokens: model_id: text_expansion_model model_text: "octopus comforter smells" - pruning_config: {} + pruning_config: { } + allowed_warnings: + - "text_expansion is deprecated. Use sparse_vector instead." + - match: { hits.total.value: 4 } - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } --- "Test text expansion search with weighted tokens rescoring only pruned tokens": - requires: - cluster_features: ["gte_v8.13.0"] + test_runner_features: [ "allowed_warnings" ] + cluster_features: [ "gte_v8.13.0" ] reason: "pruning introduced in 8.13.0" - do: @@ -242,12 +263,16 @@ teardown: tokens_freq_ratio_threshold: 4 tokens_weight_threshold: 0.4 only_score_pruned_tokens: true + allowed_warnings: + - "text_expansion is deprecated. Use sparse_vector instead." + - match: { hits.total.value: 0 } --- "Test weighted tokens search": - requires: - cluster_features: ["gte_v8.13.0"] + test_runner_features: [ "allowed_warnings" ] + cluster_features: [ "gte_v8.13.0" ] reason: "weighted token search introduced in 8.13.0" - do: @@ -257,18 +282,22 @@ teardown: query: weighted_tokens: ml.tokens: - tokens: [{"the": 1.0}, {"comforter":1.0}, {"smells":1.0}, {"bad": 1.0}] + tokens: [ { "the": 1.0 }, { "comforter": 1.0 }, { "smells": 1.0 }, { "bad": 1.0 } ] pruning_config: tokens_freq_ratio_threshold: 1 tokens_weight_threshold: 0.4 only_score_pruned_tokens: false + allowed_warnings: + - "weighted_tokens is deprecated and will be removed. Use sparse_vector instead." + - match: { hits.total.value: 5 } - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } --- "Test weighted tokens search with default pruning config": - requires: - cluster_features: ["gte_v8.13.0"] + test_runner_features: [ "allowed_warnings" ] + cluster_features: [ "gte_v8.13.0" ] reason: "weighted token search introduced in 8.13.0" - do: @@ -278,15 +307,19 @@ teardown: query: weighted_tokens: ml.tokens: - tokens: [{"the": 1.0}, {"comforter":1.0}, {"smells":1.0}, {"bad": 1.0}] - pruning_config: {} + tokens: [ { "the": 1.0 }, { "comforter": 1.0 }, { "smells": 1.0 }, { "bad": 1.0 } ] + pruning_config: { } + allowed_warnings: + - "weighted_tokens is deprecated and will be removed. Use sparse_vector instead." + - match: { hits.total.value: 5 } - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } --- "Test weighted tokens search only scoring pruned tokens": - requires: - cluster_features: ["gte_v8.13.0"] + test_runner_features: [ "allowed_warnings" ] + cluster_features: [ "gte_v8.13.0" ] reason: "weighted token search introduced in 8.13.0" - do: @@ -296,17 +329,21 @@ teardown: query: weighted_tokens: ml.tokens: - tokens: [{"the": 1.0}, {"comforter":1.0}, {"smells":1.0}, {"bad": 1.0}] + tokens: [ { "the": 1.0 }, { "comforter": 1.0 }, { "smells": 1.0 }, { "bad": 1.0 } ] pruning_config: tokens_freq_ratio_threshold: 4 tokens_weight_threshold: 0.4 only_score_pruned_tokens: true + allowed_warnings: + - "weighted_tokens is deprecated and will be removed. Use sparse_vector instead." + - match: { hits.total.value: 0 } --- "Test weighted tokens search that prunes tokens based on frequency": - requires: - cluster_features: ["gte_v8.13.0"] + test_runner_features: [ "allowed_warnings" ] + cluster_features: [ "gte_v8.13.0" ] reason: "weighted token search introduced in 8.13.0" - do: @@ -316,9 +353,12 @@ teardown: query: weighted_tokens: ml.tokens: - tokens: [{"the": 1.0}, {"octopus":1.0}, {"comforter":1.0}, {"is": 1.0}, {"the": 1.0}, {"best": 1.0}, {"of": 1.0}, {"the": 1.0}, {"bunch": 1.0}] + tokens: [ { "the": 1.0 }, { "octopus": 1.0 }, { "comforter": 1.0 }, { "is": 1.0 }, { "the": 1.0 }, { "best": 1.0 }, { "of": 1.0 }, { "the": 1.0 }, { "bunch": 1.0 } ] pruning_config: tokens_freq_ratio_threshold: 3 tokens_weight_threshold: 0.4 only_score_pruned_tokens: true + allowed_warnings: + - "weighted_tokens is deprecated and will be removed. Use sparse_vector instead." + - match: { hits.total.value: 0 } diff --git a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/remote_cluster/40_text_expansion.yml b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/remote_cluster/40_text_expansion.yml index e1ec400ce64bd..2253ecfc79f6f 100644 --- a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/remote_cluster/40_text_expansion.yml +++ b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/remote_cluster/40_text_expansion.yml @@ -131,6 +131,8 @@ teardown: --- "Test text expansion search": + - requires: + test_runner_features: [ "allowed_warnings" ] - do: search: index: index-with-sparse-vector @@ -140,13 +142,17 @@ teardown: ml.tokens: model_id: text_expansion_model model_text: "octopus comforter smells" + allowed_warnings: + - "text_expansion is deprecated. Use sparse_vector instead." + - match: { hits.total.value: 4 } - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } --- "Test text expansion search with pruning config": - requires: - cluster_features: ["gte_v8.13.0"] + test_runner_features: [ "allowed_warnings" ] + cluster_features: [ "gte_v8.13.0" ] reason: "pruning introduced in 8.13.0" - do: @@ -161,13 +167,17 @@ teardown: pruning_config: tokens_freq_ratio_threshold: 4 tokens_weight_threshold: 0.4 + allowed_warnings: + - "text_expansion is deprecated. Use sparse_vector instead." + - match: { hits.total.value: 4 } - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } --- "Test named, boosted text expansion search with pruning config": - requires: - cluster_features: ["gte_v8.13.0"] + test_runner_features: [ "allowed_warnings" ] + cluster_features: [ "gte_v8.13.0" ] reason: "pruning introduced in 8.13.0" - do: search: @@ -181,6 +191,9 @@ teardown: pruning_config: tokens_freq_ratio_threshold: 4 tokens_weight_threshold: 0.4 + allowed_warnings: + - "text_expansion is deprecated. Use sparse_vector instead." + - match: { hits.total.value: 4 } - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } - match: { hits.hits.0._score: 3.0 } @@ -199,15 +212,19 @@ teardown: tokens_weight_threshold: 0.4 _name: i-like-naming-my-queries boost: 100.0 + allowed_warnings: + - "text_expansion is deprecated. Use sparse_vector instead." + - match: { hits.total.value: 4 } - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } - - match: { hits.hits.0.matched_queries: ["i-like-naming-my-queries"] } + - match: { hits.hits.0.matched_queries: [ "i-like-naming-my-queries" ] } - match: { hits.hits.0._score: 300.0 } --- "Test text expansion search with default pruning config": - requires: - cluster_features: ["gte_v8.13.0"] + test_runner_features: [ "allowed_warnings" ] + cluster_features: [ "gte_v8.13.0" ] reason: "pruning introduced in 8.13.0" - do: @@ -219,14 +236,18 @@ teardown: ml.tokens: model_id: text_expansion_model model_text: "octopus comforter smells" - pruning_config: {} + pruning_config: { } + allowed_warnings: + - "text_expansion is deprecated. Use sparse_vector instead." + - match: { hits.total.value: 4 } - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } --- "Test text expansion search with weighted tokens rescoring only pruned tokens": - requires: - cluster_features: ["gte_v8.13.0"] + test_runner_features: [ "allowed_warnings" ] + cluster_features: [ "gte_v8.13.0" ] reason: "pruning introduced in 8.13.0" - do: @@ -242,12 +263,16 @@ teardown: tokens_freq_ratio_threshold: 4 tokens_weight_threshold: 0.4 only_score_pruned_tokens: true + allowed_warnings: + - "text_expansion is deprecated. Use sparse_vector instead." + - match: { hits.total.value: 0 } --- "Test weighted tokens search": - requires: - cluster_features: ["gte_v8.13.0"] + test_runner_features: [ "allowed_warnings" ] + cluster_features: [ "gte_v8.13.0" ] reason: "weighted token search introduced in 8.13.0" - do: @@ -257,18 +282,22 @@ teardown: query: weighted_tokens: ml.tokens: - tokens: [{"the": 1.0}, {"comforter":1.0}, {"smells":1.0}, {"bad": 1.0}] + tokens: [ { "the": 1.0 }, { "comforter": 1.0 }, { "smells": 1.0 }, { "bad": 1.0 } ] pruning_config: tokens_freq_ratio_threshold: 1 tokens_weight_threshold: 0.4 only_score_pruned_tokens: false + allowed_warnings: + - "weighted_tokens is deprecated and will be removed. Use sparse_vector instead." + - match: { hits.total.value: 5 } - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } --- "Test weighted tokens search with default pruning config": - requires: - cluster_features: ["gte_v8.13.0"] + test_runner_features: [ "allowed_warnings" ] + cluster_features: [ "gte_v8.13.0" ] reason: "weighted token search introduced in 8.13.0" - do: @@ -278,15 +307,19 @@ teardown: query: weighted_tokens: ml.tokens: - tokens: [{"the": 1.0}, {"comforter":1.0}, {"smells":1.0}, {"bad": 1.0}] - pruning_config: {} + tokens: [ { "the": 1.0 }, { "comforter": 1.0 }, { "smells": 1.0 }, { "bad": 1.0 } ] + pruning_config: { } + allowed_warnings: + - "weighted_tokens is deprecated and will be removed. Use sparse_vector instead." + - match: { hits.total.value: 5 } - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } --- "Test weighted tokens search only scoring pruned tokens": - requires: - cluster_features: ["gte_v8.13.0"] + test_runner_features: [ "allowed_warnings" ] + cluster_features: [ "gte_v8.13.0" ] reason: "weighted token search introduced in 8.13.0" - do: @@ -296,17 +329,21 @@ teardown: query: weighted_tokens: ml.tokens: - tokens: [{"the": 1.0}, {"comforter":1.0}, {"smells":1.0}, {"bad": 1.0}] + tokens: [ { "the": 1.0 }, { "comforter": 1.0 }, { "smells": 1.0 }, { "bad": 1.0 } ] pruning_config: tokens_freq_ratio_threshold: 4 tokens_weight_threshold: 0.4 only_score_pruned_tokens: true + allowed_warnings: + - "weighted_tokens is deprecated and will be removed. Use sparse_vector instead." + - match: { hits.total.value: 0 } --- "Test weighted tokens search that prunes tokens based on frequency": - requires: - cluster_features: ["gte_v8.13.0"] + test_runner_features: [ "allowed_warnings" ] + cluster_features: [ "gte_v8.13.0" ] reason: "weighted token search introduced in 8.13.0" - do: @@ -316,9 +353,12 @@ teardown: query: weighted_tokens: ml.tokens: - tokens: [{"the": 1.0}, {"octopus":1.0}, {"comforter":1.0}, {"is": 1.0}, {"the": 1.0}, {"best": 1.0}, {"of": 1.0}, {"the": 1.0}, {"bunch": 1.0}] + tokens: [ { "the": 1.0 }, { "octopus": 1.0 }, { "comforter": 1.0 }, { "is": 1.0 }, { "the": 1.0 }, { "best": 1.0 }, { "of": 1.0 }, { "the": 1.0 }, { "bunch": 1.0 } ] pruning_config: tokens_freq_ratio_threshold: 3 tokens_weight_threshold: 0.4 only_score_pruned_tokens: true + allowed_warnings: + - "weighted_tokens is deprecated and will be removed. Use sparse_vector instead." + - match: { hits.total.value: 0 } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TextExpansionQueryIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TextExpansionQueryIT.java index 26a18bc6d1d79..f1e8c9a67df44 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TextExpansionQueryIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TextExpansionQueryIT.java @@ -8,8 +8,10 @@ package org.elasticsearch.xpack.ml.integration; import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.WarningsHandler; import org.elasticsearch.core.Strings; import org.elasticsearch.xpack.core.ml.utils.MapHelper; @@ -269,6 +271,8 @@ public void testSearchWithMissingModel() throws IOException { protected Response textExpansionSearch(String index, String modelText, String modelId, String fieldName) throws IOException { Request request = new Request("GET", index + "/_search?error_trace=true"); + // Handle REST deprecation for text_expansion query + request.setOptions(RequestOptions.DEFAULT.toBuilder().setWarningsHandler(WarningsHandler.PERMISSIVE)); request.setJsonEntity(Strings.format(""" { @@ -281,6 +285,7 @@ protected Response textExpansionSearch(String index, String modelText, String mo } } }""", fieldName, modelId, modelText)); + return client().performRequest(request); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java index 587638e9ef7c9..6d972bcf5863a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java @@ -15,6 +15,8 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -41,6 +43,10 @@ import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ml.search.WeightedTokensQueryBuilder.PRUNING_CONFIG; +/** + * @deprecated Replaced by sparse_vector query + */ +@Deprecated public class TextExpansionQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "text_expansion"; @@ -53,6 +59,9 @@ public class TextExpansionQueryBuilder extends AbstractQueryBuilder weightedTokensSupplier; private final TokenPruningConfig tokenPruningConfig; + private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(ParseField.class); + public static final String TEXT_EXPANSION_DEPRECATION_MESSAGE = NAME + " is deprecated. Use sparse_vector instead."; + public TextExpansionQueryBuilder(String fieldName, String modelText, String modelId) { this(fieldName, modelText, modelId, null); } @@ -242,6 +251,9 @@ protected int doHashCode() { } public static TextExpansionQueryBuilder fromXContent(XContentParser parser) throws IOException { + + deprecationLogger.warn(DeprecationCategory.API, NAME, TEXT_EXPANSION_DEPRECATION_MESSAGE); + String fieldName = null; String modelText = null; String modelId = null; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java index b086fef6f10f4..8da6fc843614e 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java @@ -182,6 +182,30 @@ public void testToQuery() throws IOException { } } + @Override + public void testFromXContent() throws IOException { + super.testFromXContent(); + assertCriticalWarnings(TextExpansionQueryBuilder.TEXT_EXPANSION_DEPRECATION_MESSAGE); + } + + @Override + public void testUnknownField() throws IOException { + super.testUnknownField(); + assertCriticalWarnings(TextExpansionQueryBuilder.TEXT_EXPANSION_DEPRECATION_MESSAGE); + } + + @Override + public void testUnknownObjectException() throws IOException { + super.testUnknownObjectException(); + assertCriticalWarnings(TextExpansionQueryBuilder.TEXT_EXPANSION_DEPRECATION_MESSAGE); + } + + @Override + public void testValidOutput() throws IOException { + super.testValidOutput(); + assertCriticalWarnings(TextExpansionQueryBuilder.TEXT_EXPANSION_DEPRECATION_MESSAGE); + } + public void testIllegalValues() { { IllegalArgumentException e = expectThrows( diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search.yml index 8dee722bbb185..21a5a4736675d 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search.yml @@ -96,6 +96,8 @@ setup: --- "Test text expansion search": + - requires: + test_runner_features: [ "allowed_warnings" ] - do: search: index: index-with-rank-features @@ -105,13 +107,17 @@ setup: ml.tokens: model_id: text_expansion_model model_text: "octopus comforter smells" + allowed_warnings: + - "text_expansion is deprecated. Use sparse_vector instead." + - match: { hits.total.value: 4 } - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } --- "Test text expansion search with pruning config": - requires: - cluster_features: ["gte_v8.13.0"] + test_runner_features: [ "allowed_warnings" ] + cluster_features: [ "gte_v8.13.0" ] reason: "pruning introduced in 8.13.0" - do: @@ -126,13 +132,17 @@ setup: pruning_config: tokens_freq_ratio_threshold: 4 tokens_weight_threshold: 0.4 + allowed_warnings: + - "text_expansion is deprecated. Use sparse_vector instead." + - match: { hits.total.value: 4 } - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } --- "Test named, boosted text expansion search with pruning config": - requires: - cluster_features: ["gte_v8.13.0"] + test_runner_features: [ "allowed_warnings" ] + cluster_features: [ "gte_v8.13.0" ] reason: "pruning introduced in 8.13.0" - do: search: @@ -146,6 +156,9 @@ setup: pruning_config: tokens_freq_ratio_threshold: 4 tokens_weight_threshold: 0.4 + allowed_warnings: + - "text_expansion is deprecated. Use sparse_vector instead." + - match: { hits.total.value: 4 } - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } - match: { hits.hits.0._score: 3.0 } @@ -164,15 +177,19 @@ setup: tokens_weight_threshold: 0.4 _name: i-like-naming-my-queries boost: 100.0 + allowed_warnings: + - "text_expansion is deprecated. Use sparse_vector instead." + - match: { hits.total.value: 4 } - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } - - match: { hits.hits.0.matched_queries: ["i-like-naming-my-queries"] } + - match: { hits.hits.0.matched_queries: [ "i-like-naming-my-queries" ] } - match: { hits.hits.0._score: 300.0 } --- "Test text expansion search with default pruning config": - requires: - cluster_features: ["gte_v8.13.0"] + test_runner_features: [ "allowed_warnings" ] + cluster_features: [ "gte_v8.13.0" ] reason: "pruning introduced in 8.13.0" - do: @@ -184,14 +201,18 @@ setup: ml.tokens: model_id: text_expansion_model model_text: "octopus comforter smells" - pruning_config: {} + pruning_config: { } + allowed_warnings: + - "text_expansion is deprecated. Use sparse_vector instead." + - match: { hits.total.value: 4 } - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } --- "Test text expansion search with weighted tokens rescoring only pruned tokens": - requires: - cluster_features: ["gte_v8.13.0"] + test_runner_features: [ "allowed_warnings" ] + cluster_features: [ "gte_v8.13.0" ] reason: "pruning introduced in 8.13.0" - do: @@ -207,12 +228,16 @@ setup: tokens_freq_ratio_threshold: 4 tokens_weight_threshold: 0.4 only_score_pruned_tokens: true + allowed_warnings: + - "text_expansion is deprecated. Use sparse_vector instead." + - match: { hits.total.value: 0 } --- "Test weighted tokens search": - requires: - cluster_features: ["gte_v8.13.0"] + test_runner_features: [ "allowed_warnings" ] + cluster_features: [ "gte_v8.13.0" ] reason: "weighted token search introduced in 8.13.0" - do: @@ -222,18 +247,22 @@ setup: query: weighted_tokens: ml.tokens: - tokens: [{"the": 1.0}, {"comforter":1.0}, {"smells":1.0}, {"bad": 1.0}] + tokens: [ { "the": 1.0 }, { "comforter": 1.0 }, { "smells": 1.0 }, { "bad": 1.0 } ] pruning_config: tokens_freq_ratio_threshold: 1 tokens_weight_threshold: 0.4 only_score_pruned_tokens: false + allowed_warnings: + - "weighted_tokens is deprecated and will be removed. Use sparse_vector instead." + - match: { hits.total.value: 5 } - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } --- "Test weighted tokens search with default pruning config": - requires: - cluster_features: ["gte_v8.13.0"] + test_runner_features: [ "allowed_warnings" ] + cluster_features: [ "gte_v8.13.0" ] reason: "weighted token search introduced in 8.13.0" - do: @@ -243,15 +272,19 @@ setup: query: weighted_tokens: ml.tokens: - tokens: [{"the": 1.0}, {"comforter":1.0}, {"smells":1.0}, {"bad": 1.0}] - pruning_config: {} + tokens: [ { "the": 1.0 }, { "comforter": 1.0 }, { "smells": 1.0 }, { "bad": 1.0 } ] + pruning_config: { } + allowed_warnings: + - "weighted_tokens is deprecated and will be removed. Use sparse_vector instead." + - match: { hits.total.value: 5 } - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } --- "Test weighted tokens search only scoring pruned tokens": - requires: - cluster_features: ["gte_v8.13.0"] + test_runner_features: [ "allowed_warnings" ] + cluster_features: [ "gte_v8.13.0" ] reason: "weighted token search introduced in 8.13.0" - do: @@ -261,17 +294,21 @@ setup: query: weighted_tokens: ml.tokens: - tokens: [{"the": 1.0}, {"comforter":1.0}, {"smells":1.0}, {"bad": 1.0}] + tokens: [ { "the": 1.0 }, { "comforter": 1.0 }, { "smells": 1.0 }, { "bad": 1.0 } ] pruning_config: tokens_freq_ratio_threshold: 4 tokens_weight_threshold: 0.4 only_score_pruned_tokens: true + allowed_warnings: + - "weighted_tokens is deprecated and will be removed. Use sparse_vector instead." + - match: { hits.total.value: 0 } --- "Test weighted tokens search that prunes tokens based on frequency": - requires: - cluster_features: ["gte_v8.13.0"] + test_runner_features: [ "allowed_warnings" ] + cluster_features: [ "gte_v8.13.0" ] reason: "weighted token search introduced in 8.13.0" - do: @@ -281,17 +318,20 @@ setup: query: weighted_tokens: ml.tokens: - tokens: [{"the": 1.0}, {"octopus":1.0}, {"comforter":1.0}, {"is": 1.0}, {"the": 1.0}, {"best": 1.0}, {"of": 1.0}, {"the": 1.0}, {"bunch": 1.0}] + tokens: [ { "the": 1.0 }, { "octopus": 1.0 }, { "comforter": 1.0 }, { "is": 1.0 }, { "the": 1.0 }, { "best": 1.0 }, { "of": 1.0 }, { "the": 1.0 }, { "bunch": 1.0 } ] pruning_config: tokens_freq_ratio_threshold: 3 tokens_weight_threshold: 0.4 only_score_pruned_tokens: true + allowed_warnings: + - "weighted_tokens is deprecated and will be removed. Use sparse_vector instead." + - match: { hits.total.value: 0 } --- "Test text-expansion that displays error for invalid queried field type": - requires: - cluster_features: ["gte_v8.14.0"] + cluster_features: [ "gte_v8.14.0" ] reason: "validation for invalid field type introduced in 8.14.0" - do: @@ -304,4 +344,4 @@ setup: source_text: model_id: text_expansion_model model_text: "octopus comforter smells" - pruning_config: {} + pruning_config: { } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_rank_features.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_rank_features.yml index 7991566bfe818..6da86010205af 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_rank_features.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_rank_features.yml @@ -98,6 +98,8 @@ setup: --- "Test text expansion search": + - requires: + test_runner_features: [ "allowed_warnings" ] - do: search: index: index-with-rank-features @@ -107,5 +109,8 @@ setup: ml.tokens: model_id: text_expansion_model model_text: "octopus comforter smells" + allowed_warnings: + - "text_expansion is deprecated. Use sparse_vector instead." + - match: { hits.total.value: 4 } - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_sparse_vector.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_sparse_vector.yml index 50a3fa7e22d58..08ce51c8d17f9 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_sparse_vector.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search_sparse_vector.yml @@ -98,6 +98,8 @@ setup: --- "Test text expansion search": + - requires: + test_runner_features: [ "allowed_warnings" ] - do: search: index: index-with-rank-features @@ -107,5 +109,8 @@ setup: ml.tokens: model_id: text_expansion_model model_text: "octopus comforter smells" + allowed_warnings: + - "text_expansion is deprecated. Use sparse_vector instead." + - match: { hits.total.value: 4 } - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } From 2e748d1b9446837239ff8f45d5c110498debcb46 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Thu, 27 Jun 2024 19:35:03 +0100 Subject: [PATCH 023/216] Fix flaky ShardBulkInferenceActionFilterTests#testManyRandomDocs (#109839) --- muted-tests.yml | 3 --- .../filter/ShardBulkInferenceActionFilterTests.java | 8 ++++---- .../elasticsearch/xpack/inference/model/TestModel.java | 2 +- 3 files changed, 5 insertions(+), 8 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index a2d6400bceeac..1ecb5436faabf 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -1,7 +1,4 @@ tests: -- class: "org.elasticsearch.xpack.inference.action.filter.ShardBulkInferenceActionFilterTests" - issue: "https://github.com/elastic/elasticsearch/issues/108649" - method: "testManyRandomDocs" - class: "org.elasticsearch.cluster.coordination.CoordinatorVotingConfigurationTests" issue: "https://github.com/elastic/elasticsearch/issues/108729" method: "testClusterUUIDLogging" diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java index d501c9a65d80e..f63a6369b21a6 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java @@ -205,13 +205,13 @@ public void testItemFailures() throws Exception { @SuppressWarnings({ "unchecked", "rawtypes" }) public void testManyRandomDocs() throws Exception { Map inferenceModelMap = new HashMap<>(); - int numModels = randomIntBetween(1, 5); + int numModels = randomIntBetween(1, 3); for (int i = 0; i < numModels; i++) { StaticModel model = StaticModel.createRandomInstance(); inferenceModelMap.put(model.getInferenceEntityId(), model); } - int numInferenceFields = randomIntBetween(1, 5); + int numInferenceFields = randomIntBetween(1, 3); Map inferenceFieldMap = new HashMap<>(); for (int i = 0; i < numInferenceFields; i++) { String field = randomAlphaOfLengthBetween(5, 10); @@ -219,7 +219,7 @@ public void testManyRandomDocs() throws Exception { inferenceFieldMap.put(field, new InferenceFieldMetadata(field, inferenceId, new String[] { field })); } - int numRequests = randomIntBetween(100, 1000); + int numRequests = atLeast(100); BulkItemRequest[] originalRequests = new BulkItemRequest[numRequests]; BulkItemRequest[] modifiedRequests = new BulkItemRequest[numRequests]; for (int id = 0; id < numRequests; id++) { @@ -331,7 +331,7 @@ private static BulkItemRequest[] randomBulkItemRequest( for (var entry : fieldInferenceMap.values()) { String field = entry.getName(); var model = modelMap.get(entry.getInferenceId()); - String text = randomAlphaOfLengthBetween(10, 100); + String text = randomAlphaOfLengthBetween(10, 20); docMap.put(field, text); expectedDocMap.put(field, text); if (model == null) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java index ced6e3ff43e2c..c454bd6ff9ce6 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java @@ -37,7 +37,7 @@ public static TestModel createRandomInstance() { } public static TestModel createRandomInstance(TaskType taskType) { - var dimensions = taskType == TaskType.TEXT_EMBEDDING ? randomInt(1024) : null; + var dimensions = taskType == TaskType.TEXT_EMBEDDING ? randomInt(64) : null; var similarity = taskType == TaskType.TEXT_EMBEDDING ? randomFrom(SimilarityMeasure.values()) : null; return new TestModel( randomAlphaOfLength(4), From 76de97498362af31ec68e24e0c144ab0e452e843 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 28 Jun 2024 07:08:07 +1000 Subject: [PATCH 024/216] Mute org.elasticsearch.compute.lucene.ValueSourceReaderTypeConversionTests testLoadAll #110244 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 1ecb5436faabf..a4c150124172e 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -85,6 +85,9 @@ tests: - class: org.elasticsearch.upgrades.SecurityIndexRolesMetadataMigrationIT method: testMetadataMigratedAfterUpgrade issue: https://github.com/elastic/elasticsearch/issues/110232 +- class: org.elasticsearch.compute.lucene.ValueSourceReaderTypeConversionTests + method: testLoadAll + issue: https://github.com/elastic/elasticsearch/issues/110244 # Examples: # From ae5c767efc550eaf392128ab423ec3837f730b26 Mon Sep 17 00:00:00 2001 From: Ankita Kumar Date: Thu, 27 Jun 2024 15:30:27 -0700 Subject: [PATCH 025/216] Enhance comments for SubscribableListener (#110209) This PR enhances class level comments for the SubscribableListener class. Fixes issue #103833. --- .../action/support/SubscribableListener.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java b/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java index 1dec470f8c140..41949c7ce3c22 100644 --- a/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java +++ b/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java @@ -31,8 +31,12 @@ import java.util.concurrent.Executor; /** - * An {@link ActionListener} to which other {@link ActionListener} instances can subscribe, such that when this listener is completed it - * fans-out its result to the subscribed listeners. + * An {@link ActionListener} to which other {@link ActionListener} instances can subscribe, such that when this listener is + * completed it fans-out its result to the subscribed listeners. + *

    + * If this listener is complete, {@link #addListener} completes the subscribing listener immediately + * with the result with which this listener was completed. Otherwise, the subscribing listener is retained + * and completed when this listener is completed. *

    * Exceptions are passed to subscribed listeners without modification. {@link ListenableActionFuture} and {@link ListenableFuture} are child * classes that provide additional exception handling. From 8edb3b07e7135ef509e146559879ef80bddba754 Mon Sep 17 00:00:00 2001 From: Nick Tindall Date: Fri, 28 Jun 2024 09:07:20 +1000 Subject: [PATCH 026/216] Make repository analysis API available to non-operators (#110179) Closes #100381 --- docs/changelog/110179.yaml | 6 ++++++ .../operator-only-functionality.asciidoc | 1 - .../snapshot-restore/apis/repo-analysis-api.asciidoc | 3 --- .../elasticsearch/xpack/security/operator/Constants.java | 1 + .../xpack/security/operator/OperatorPrivilegesIT.java | 8 ++++++++ .../security/operator/DefaultOperatorOnlyRegistry.java | 2 -- 6 files changed, 15 insertions(+), 6 deletions(-) create mode 100644 docs/changelog/110179.yaml diff --git a/docs/changelog/110179.yaml b/docs/changelog/110179.yaml new file mode 100644 index 0000000000000..b99a390c8586f --- /dev/null +++ b/docs/changelog/110179.yaml @@ -0,0 +1,6 @@ +pr: 110179 +summary: Make repository analysis API available to non-operators +area: Snapshot/Restore +type: enhancement +issues: + - 100318 diff --git a/docs/reference/security/operator-privileges/operator-only-functionality.asciidoc b/docs/reference/security/operator-privileges/operator-only-functionality.asciidoc index 9c1f5c9332706..5fc6add4d6396 100644 --- a/docs/reference/security/operator-privileges/operator-only-functionality.asciidoc +++ b/docs/reference/security/operator-privileges/operator-only-functionality.asciidoc @@ -21,7 +21,6 @@ given {es} version. * <> * <> * <> -* <> * <> * <> * <> diff --git a/docs/reference/snapshot-restore/apis/repo-analysis-api.asciidoc b/docs/reference/snapshot-restore/apis/repo-analysis-api.asciidoc index 2b2090405af60..f18ef1ee6e826 100644 --- a/docs/reference/snapshot-restore/apis/repo-analysis-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/repo-analysis-api.asciidoc @@ -39,9 +39,6 @@ POST /_snapshot/my_repository/_analyze?blob_count=10&max_blob_size=1mb&timeout=1 <> to use this API. For more information, see <>. -* If the <> is enabled, only operator -users can use this API. - [[repo-analysis-api-desc]] ==== {api-description-title} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 3dd8d780d6f82..878d01abd02e3 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -57,6 +57,7 @@ public class Constants { "cluster:admin/persistent/update_status", "cluster:admin/reindex/rethrottle", "cluster:admin/repository/_cleanup", + "cluster:admin/repository/analyze", "cluster:admin/repository/delete", "cluster:admin/repository/get", "cluster:admin/repository/put", diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/OperatorPrivilegesIT.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/OperatorPrivilegesIT.java index 6889c81664173..9e680688edeaa 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/OperatorPrivilegesIT.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/OperatorPrivilegesIT.java @@ -8,6 +8,7 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; @@ -325,6 +326,13 @@ public void testNonOperatorUserWillFailToCallDesiredNodesAPI() throws IOExceptio assertThat(responseException.getResponse().getStatusLine().getStatusCode(), equalTo(403)); } + public void testNonOperatorUserCanCallAnalyzeRepositoryAPI() throws IOException { + createSnapshotRepo("testAnalysisRepo"); + var request = new Request("POST", "/_snapshot/testAnalysisRepo/_analyze"); + Response response = client().performRequest(request); + assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); + } + private void createSnapshotRepo(String repoName) throws IOException { Request request = new Request("PUT", "/_snapshot/" + repoName); request.setJsonEntity( diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/DefaultOperatorOnlyRegistry.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/DefaultOperatorOnlyRegistry.java index 39813a2af5dfd..02dc32c4f3f63 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/DefaultOperatorOnlyRegistry.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/DefaultOperatorOnlyRegistry.java @@ -39,8 +39,6 @@ public class DefaultOperatorOnlyRegistry implements OperatorOnlyRegistry { // Autoscaling does not publish its actions to core, literal strings are needed. "cluster:admin/autoscaling/put_autoscaling_policy", "cluster:admin/autoscaling/delete_autoscaling_policy", - // Repository analysis is not mentioned in core, a literal string is needed. - "cluster:admin/repository/analyze", // Node shutdown APIs are operator only "cluster:admin/shutdown/create", "cluster:admin/shutdown/get", From ef52f8ccb828a2e096b972e681fd53c2b3ab598b Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 27 Jun 2024 19:40:38 -0400 Subject: [PATCH 027/216] ESQL: Add Block#doesHaveMultivaluedFields (#110242) This adds a method to `Block` which returns `true` if it contains multivalued fields. This is like `mayHaveMultivaluedFields` except it won't make false positives - instead it'll always find a real multivalued field. We use this in LOOKUP right now, but we'll want it in apache arrow support soon too. --- .../compute/aggregation/table/RowInTableLookup.java | 9 ++------- .../compute/data/AbstractArrayBlock.java | 13 +++++++++++++ .../compute/data/AbstractVectorBlock.java | 5 +++++ .../java/org/elasticsearch/compute/data/Block.java | 13 ++++++++++++- .../elasticsearch/compute/data/CompositeBlock.java | 8 ++++++++ .../compute/data/ConstantNullBlock.java | 5 +++++ .../compute/data/OrdinalBytesRefBlock.java | 5 +++++ .../elasticsearch/compute/data/BasicBlockTests.java | 1 + .../compute/data/BlockMultiValuedTests.java | 3 +++ 9 files changed, 54 insertions(+), 8 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/table/RowInTableLookup.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/table/RowInTableLookup.java index 1303fc701c595..4fa582e761e18 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/table/RowInTableLookup.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/table/RowInTableLookup.java @@ -45,13 +45,8 @@ public static RowInTableLookup build(BlockFactory blockFactory, Block[] keys) { "keys must have the same number of positions but [" + positions + "] != [" + keys[k].getPositionCount() + "]" ); } - if (keys[k].mayHaveMultivaluedFields()) { - for (int p = 0; p < keys[k].getPositionCount(); p++) { - if (keys[k].getValueCount(p) > 1) { - // TODO double check these errors over REST once we have LOOKUP - throw new IllegalArgumentException("only single valued keys are supported"); - } - } + if (keys[k].doesHaveMultivaluedFields()) { + throw new IllegalArgumentException("only single valued keys are supported"); } } if (positions == 0) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractArrayBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractArrayBlock.java index 9b56c2f6bd63f..22f8dded57320 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractArrayBlock.java @@ -46,6 +46,19 @@ public final boolean mayHaveMultivaluedFields() { return firstValueIndexes != null; } + @Override + public boolean doesHaveMultivaluedFields() { + if (false == mayHaveMultivaluedFields()) { + return false; + } + for (int p = 0; p < getPositionCount(); p++) { + if (getValueCount(p) > 1) { + return true; + } + } + return false; + } + @Override public final MvOrdering mvOrdering() { return mvOrdering; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java index fb52cc39f44d2..b70d738946048 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java @@ -46,6 +46,11 @@ public final boolean mayHaveMultivaluedFields() { return false; } + @Override + public boolean doesHaveMultivaluedFields() { + return false; + } + @Override public final MvOrdering mvOrdering() { return MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index 282bc9064b308..302defa03473f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -116,10 +116,21 @@ public interface Block extends Accountable, BlockLoader.Block, NamedWriteable, R /** * Can this block have multivalued fields? Blocks that return {@code false} - * will never return more than one from {@link #getValueCount}. + * will never return more than one from {@link #getValueCount}. This may + * return {@code true} for Blocks that do not have multivalued fields, but + * it will always answer quickly. */ boolean mayHaveMultivaluedFields(); + /** + * Does this block have multivalued fields? Unlike {@link #mayHaveMultivaluedFields} + * this will never return a false positive. In other words, if this returns + * {@code true} then there are positions for which {@link #getValueCount} + * will return more than 1. This will answer quickly if it can but may have + * to check all positions. + */ + boolean doesHaveMultivaluedFields(); + /** * Creates a new block that only exposes the positions provided. * @param positions the positions to retain diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/CompositeBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/CompositeBlock.java index c107ea53bd7f4..8d7b8d57bbaa5 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/CompositeBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/CompositeBlock.java @@ -132,6 +132,14 @@ public boolean mayHaveMultivaluedFields() { return Arrays.stream(blocks).anyMatch(Block::mayHaveMultivaluedFields); } + @Override + public boolean doesHaveMultivaluedFields() { + if (false == Arrays.stream(blocks).anyMatch(Block::mayHaveMultivaluedFields)) { + return false; + } + return Arrays.stream(blocks).anyMatch(Block::doesHaveMultivaluedFields); + } + @Override public CompositeBlock filter(int... positions) { CompositeBlock result = null; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java index 2c0f4c8946753..876cbc499bec6 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java @@ -68,6 +68,11 @@ public boolean mayHaveMultivaluedFields() { return false; } + @Override + public boolean doesHaveMultivaluedFields() { + return false; + } + @Override public ElementType elementType() { return ElementType.NULL; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefBlock.java index 321c319f06671..b8ac46e790acd 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefBlock.java @@ -191,6 +191,11 @@ public boolean mayHaveMultivaluedFields() { return ordinals.mayHaveMultivaluedFields(); } + @Override + public boolean doesHaveMultivaluedFields() { + return ordinals.mayHaveMultivaluedFields(); + } + @Override public MvOrdering mvOrdering() { return ordinals.mvOrdering(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java index 81c32670289c2..311446f184ebf 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java @@ -184,6 +184,7 @@ static void assertSingleValueDenseBlock(Block initialBlock) { assertThat(block.mayHaveNulls(), is(false)); assertThat(block.areAllValuesNull(), is(false)); assertThat(block.mayHaveMultivaluedFields(), is(false)); + assertThat(block.doesHaveMultivaluedFields(), is(false)); initialBlock = block.asVector().asBlock(); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockMultiValuedTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockMultiValuedTests.java index 89e44a1763b0f..c5e130726844d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockMultiValuedTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockMultiValuedTests.java @@ -66,6 +66,7 @@ public void testMultiValued() { } assertThat(b.block().mayHaveMultivaluedFields(), equalTo(b.values().stream().anyMatch(l -> l != null && l.size() > 1))); + assertThat(b.block().doesHaveMultivaluedFields(), equalTo(b.values().stream().anyMatch(l -> l != null && l.size() > 1))); } finally { b.block().close(); } @@ -151,6 +152,8 @@ private void assertFiltered(boolean all, boolean shuffled) { filtered.close(); } assertThat(b.block().mayHaveMultivaluedFields(), equalTo(b.values().stream().anyMatch(l -> l != null && l.size() > 1))); + assertThat(b.block().doesHaveMultivaluedFields(), equalTo(b.values().stream().anyMatch(l -> l != null && l.size() > 1))); + } finally { b.block().close(); } From 6f72d4ccaad1580bd8138948138b94bcb369dc95 Mon Sep 17 00:00:00 2001 From: Nick Tindall Date: Fri, 28 Jun 2024 11:01:49 +1000 Subject: [PATCH 028/216] Handle response correctly when request already cancelled (#110249) Related to #109866 --- .../http/netty4/Netty4ChunkedContinuationsIT.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4ChunkedContinuationsIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4ChunkedContinuationsIT.java index d2f7f6ab61977..4b6c820638b40 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4ChunkedContinuationsIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4ChunkedContinuationsIT.java @@ -646,7 +646,11 @@ public void accept(RestChannel channel) { client.execute(TYPE, new Request(), new RestActionListener<>(channel) { @Override protected void processResponse(Response response) { - localRefs.mustIncRef(); + // incRef can fail if the request was already cancelled + if (localRefs.tryIncRef() == false) { + assert localRefs.hasReferences() == false : "tryIncRef failed but RefCounted not completed"; + return; + } channel.sendResponse(RestResponse.chunked(RestStatus.OK, response.getResponseBodyPart(), () -> { // cancellation notification only happens while processing a continuation, not while computing // the next one; prompt cancellation requires use of something like RestCancellableNodeClient From a099c9f7e676341b8e6bc74635993404d4dac34b Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Fri, 28 Jun 2024 08:42:24 +0200 Subject: [PATCH 029/216] Upgrade to Lucene-9.11.1 (#110234) --- build-tools-internal/version.properties | 2 +- docs/Versions.asciidoc | 4 +- docs/changelog/110234.yaml | 5 + gradle/verification-metadata.xml | 144 +++++++++--------- .../elasticsearch/index/IndexVersions.java | 2 +- 5 files changed, 81 insertions(+), 76 deletions(-) create mode 100644 docs/changelog/110234.yaml diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 12417239cc7dc..0fa6142789381 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.15.0 -lucene = 9.11.0 +lucene = 9.11.1 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/docs/Versions.asciidoc b/docs/Versions.asciidoc index e8efa4c72589d..fb99ef498df17 100644 --- a/docs/Versions.asciidoc +++ b/docs/Versions.asciidoc @@ -1,8 +1,8 @@ include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] -:lucene_version: 9.11.0 -:lucene_version_path: 9_11_0 +:lucene_version: 9.11.1 +:lucene_version_path: 9_11_1 :jdk: 11.0.2 :jdk_major: 11 :build_type: tar diff --git a/docs/changelog/110234.yaml b/docs/changelog/110234.yaml new file mode 100644 index 0000000000000..0656ba5fb6636 --- /dev/null +++ b/docs/changelog/110234.yaml @@ -0,0 +1,5 @@ +pr: 110234 +summary: Upgrade to Lucene-9.11.1 +area: Search +type: upgrade +issues: [] diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 6e4beb0953b56..d8df128668b45 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2724,124 +2724,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index f08b97cd7033e..0035b54c63f8d 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -110,7 +110,7 @@ private static IndexVersion def(int id, Version luceneVersion) { public static final IndexVersion UPGRADE_TO_LUCENE_9_11 = def(8_508_00_0, Version.LUCENE_9_11_0); public static final IndexVersion UNIQUE_TOKEN_FILTER_POS_FIX = def(8_509_00_0, Version.LUCENE_9_11_0); public static final IndexVersion ADD_SECURITY_MIGRATION = def(8_510_00_0, Version.LUCENE_9_11_0); - + public static final IndexVersion UPGRADE_TO_LUCENE_9_11_1 = def(8_511_00_0, Version.LUCENE_9_11_1); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ From 2f3f09fb1f6a19aa83945a9a4f40001d4c63436f Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Fri, 28 Jun 2024 08:49:08 +0200 Subject: [PATCH 030/216] Fix FsDirectoryFactoryTests (#110224) Unwrap the created directory to get access to original directory doing the work, so we are not affected by directory wrappers enabled by feature flags Resolves #109681, #110210, #110211 --- .../index/store/FsDirectoryFactoryTests.java | 20 +++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java b/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java index 2fdeda052381b..8bc90b3000dc8 100644 --- a/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java +++ b/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java @@ -49,7 +49,7 @@ public void testPreload() throws IOException { .build(); try (Directory directory = newDirectory(build)) { assertTrue(FsDirectoryFactory.isHybridFs(directory)); - FsDirectoryFactory.HybridDirectory hybridDirectory = (FsDirectoryFactory.HybridDirectory) directory; + FsDirectoryFactory.HybridDirectory hybridDirectory = (FsDirectoryFactory.HybridDirectory) FilterDirectory.unwrap(directory); assertTrue(FsDirectoryFactory.HybridDirectory.useDelegate("foo.dvd", newIOContext(random()))); assertTrue(FsDirectoryFactory.HybridDirectory.useDelegate("foo.nvd", newIOContext(random()))); assertTrue(FsDirectoryFactory.HybridDirectory.useDelegate("foo.tim", newIOContext(random()))); @@ -109,15 +109,16 @@ private void doTestPreload(String... preload) throws IOException { try (Directory dir = directory) { assertSame(dir, directory); // prevent warnings assertFalse(directory instanceof SleepingLockWrapper); + var mmapDirectory = FilterDirectory.unwrap(directory); if (preload.length == 0) { - assertTrue(directory.toString(), directory instanceof MMapDirectory); - assertFalse(((MMapDirectory) directory).getPreload()); + assertTrue(directory.toString(), mmapDirectory instanceof MMapDirectory); + assertFalse(((MMapDirectory) mmapDirectory).getPreload()); } else if (Arrays.asList(preload).contains("*")) { - assertTrue(directory.toString(), directory instanceof MMapDirectory); - assertTrue(((MMapDirectory) directory).getPreload()); + assertTrue(directory.toString(), mmapDirectory instanceof MMapDirectory); + assertTrue(((MMapDirectory) mmapDirectory).getPreload()); } else { - assertTrue(directory.toString(), directory instanceof FsDirectoryFactory.PreLoadMMapDirectory); - FsDirectoryFactory.PreLoadMMapDirectory preLoadMMapDirectory = (FsDirectoryFactory.PreLoadMMapDirectory) directory; + assertTrue(directory.toString(), mmapDirectory instanceof FsDirectoryFactory.PreLoadMMapDirectory); + FsDirectoryFactory.PreLoadMMapDirectory preLoadMMapDirectory = (FsDirectoryFactory.PreLoadMMapDirectory) mmapDirectory; for (String ext : preload) { assertTrue("ext: " + ext, preLoadMMapDirectory.useDelegate("foo." + ext)); assertTrue("ext: " + ext, preLoadMMapDirectory.getDelegate().getPreload()); @@ -166,7 +167,10 @@ private void doTestStoreDirectory(Path tempDir, String typeSettingValue, IndexMo assertTrue(type + " " + directory.toString(), directory instanceof NIOFSDirectory); break; case MMAPFS: - assertTrue(type + " " + directory.getClass().getName() + " " + directory, directory instanceof MMapDirectory); + assertTrue( + type + " " + directory.getClass().getName() + " " + directory, + FilterDirectory.unwrap(directory) instanceof MMapDirectory + ); break; case FS: if (Constants.JRE_IS_64BIT && MMapDirectory.UNMAP_SUPPORTED) { From 9b4d8430d76218c44722a32d1ef8ffd098c8db1b Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Fri, 28 Jun 2024 11:00:32 +0300 Subject: [PATCH 031/216] Document-level security test for synthetic source (#110188) Fixes #109804 --- ...cument_level_security_synthetic_source.yml | 403 ++++++++++++++++++ 1 file changed, 403 insertions(+) create mode 100644 x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz_api_keys/40_document_level_security_synthetic_source.yml diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz_api_keys/40_document_level_security_synthetic_source.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz_api_keys/40_document_level_security_synthetic_source.yml new file mode 100644 index 0000000000000..769b9d848ba35 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz_api_keys/40_document_level_security_synthetic_source.yml @@ -0,0 +1,403 @@ +--- +setup: + - skip: + features: headers + + - do: + cluster.health: + wait_for_status: yellow + +--- +Filter on single field: + - do: + indices.create: + index: index_dls + body: + mappings: + _source: + mode: synthetic + properties: + name: + type: keyword + + - do: + bulk: + index: index_dls + refresh: true + body: + - '{"create": { "_id": "1a" }}' + - '{"name": "A", "type": "foo"}' + - '{"create": { "_id": "2a" }}' + - '{"name": "B", "type": "bar"}' + - match: { errors: false } + + - do: + security.create_api_key: + body: + name: "test-fls" + expiration: "1d" + role_descriptors: + index_access: + indices: + - names: [ "index_dls" ] + privileges: [ "read" ] + query: + match: + name: A + - match: { name: "test-fls" } + - is_true: id + - set: + id: api_key_id + encoded: credentials + + # With superuser... + - do: + search: + index: index_dls + sort: name + - match: { hits.total.value: 2 } + - match: { hits.hits.0._id: 1a } + - match: { hits.hits.0._source.name: A } + - match: { hits.hits.0._source.type: foo } + - match: { hits.hits.1._id: 2a } + - match: { hits.hits.1._source.name: B } + - match: { hits.hits.1._source.type: bar } + + - do: + get: + index: index_dls + id: 2a + - match: { _source.name: B } + - match: { _source.type: bar } + + # With FLS API Key + - do: + headers: + Authorization: "ApiKey ${credentials}" + search: + index: index_dls + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: 1a } + - match: { hits.hits.0._source.name: A } + - match: { hits.hits.0._source.type: foo } + + - do: + catch: missing + headers: + Authorization: "ApiKey ${credentials}" + get: + index: index_dls + id: 2a + +--- +Filter on nested field: + - do: + indices.create: + index: index_dls + body: + mappings: + _source: + mode: synthetic + properties: + name: + type: keyword + object: + type: nested + properties: + secret: + type: keyword + + - do: + bulk: + index: index_dls + refresh: true + body: + - '{"create": { }}' + - '{"name": "A", "object": [ { "secret": "mission" }, { "secret": "nomatch" } ] }' + - '{"create": { }}' + - '{"name": "B", "object": { "secret": "mission", "public": "interest" } }' + - '{"create": { }}' + - '{"name": "C", "object": { "foo": "bar" } }' + - match: { errors: false } + + - do: + security.create_api_key: + body: + name: "test-fls" + expiration: "1d" + role_descriptors: + index_access: + indices: + - names: [ "index_dls" ] + privileges: [ "read" ] + query: + nested: + path: object + query: + term: + object.secret: mission + - match: { name: "test-fls" } + - is_true: id + - set: + id: api_key_id + encoded: credentials + + # With superuser... + - do: + search: + index: index_dls + sort: name + - match: { hits.total.value: 3 } + - match: { hits.hits.0._source.name: A } + - match: { hits.hits.0._source.object.0.secret: mission } + - match: { hits.hits.0._source.object.1.secret: nomatch } + - match: { hits.hits.1._source.name: B } + - match: { hits.hits.1._source.object.secret: mission } + - match: { hits.hits.1._source.object.public: interest } + - match: { hits.hits.2._source.name: C } + - match: { hits.hits.2._source.object.foo: bar } + + # With FLS API Key + - do: + headers: + Authorization: "ApiKey ${credentials}" + search: + index: index_dls + sort: name + - match: { hits.total.value: 2 } + - match: { hits.hits.0._source.name: A } + - match: { hits.hits.0._source.object.0.secret: mission } + - match: { hits.hits.0._source.object.1.secret: nomatch } + - match: { hits.hits.1._source.name: B } + - match: { hits.hits.1._source.object.secret: mission } + - match: { hits.hits.1._source.object.public: interest } + +--- +Filter on object with stored source: + - do: + indices.create: + index: index_dls + body: + mappings: + _source: + mode: synthetic + properties: + name: + type: keyword + obj: + type: object + store_array_source: true + properties: + secret: + type: keyword + runtime: + secret: + type: keyword + script: + source: "emit(params._source.obj.0.secret)" + + - do: + bulk: + index: index_dls + refresh: true + body: + - '{"create": { }}' + - '{"name": "A", "obj": [ { "secret": "mission" }, { "foo": "bar" } ] }' + - '{"create": { }}' + - '{"name": "B", "obj": [ { "secret": "common" }, {"foo": "baz"} ] }' + - match: { errors: false } + + - do: + security.create_api_key: + body: + name: "test-fls" + expiration: "1d" + role_descriptors: + index_access: + indices: + - names: [ "index_dls" ] + privileges: [ "read" ] + query: + term: + secret: mission + - match: { name: "test-fls" } + - is_true: id + - set: + id: api_key_id + encoded: credentials + + # With superuser... + - do: + search: + index: index_dls + sort: name + - match: { hits.total.value: 2 } + - match: { hits.hits.0._source.name: A } + - match: { hits.hits.0._source.obj.0.secret: mission } + - match: { hits.hits.0._source.obj.1.foo: bar } + - match: { hits.hits.1._source.name: B } + - match: { hits.hits.1._source.obj.0.secret: common } + - match: { hits.hits.1._source.obj.1.foo: baz } + + # With FLS API Key + - do: + headers: + Authorization: "ApiKey ${credentials}" + search: + index: index_dls + sort: name + - match: { hits.total.value: 1 } + - match: { hits.hits.0._source.name: A } + - match: { hits.hits.0._source.obj.0.secret: mission } + - match: { hits.hits.0._source.obj.1.foo: bar } + + +--- +Filter on field within a disabled object: + - do: + indices.create: + index: index_dls + body: + mappings: + _source: + mode: synthetic + properties: + name: + type: keyword + object: + type: object + enabled: false + runtime: + secret: + type: keyword + script: + source: "emit(params._source.object.secret)" + + - do: + bulk: + index: index_dls + refresh: true + body: + - '{"create": { }}' + - '{"name": "A", "object": { "secret":"mission", "public":"interest" } }' + - '{"create": { }}' + - '{"name": "B", "object": { "secret":"common", "foo":"bar" } }' + - match: { errors: false } + + - do: + security.create_api_key: + body: + name: "test-fls" + expiration: "1d" + role_descriptors: + index_access: + indices: + - names: [ "index_dls" ] + privileges: [ "read", "monitor" ] + query: + term: + secret: mission + - match: { name: "test-fls" } + - is_true: id + - set: + id: api_key_id + encoded: credentials + + # With superuser... + - do: + search: + index: index_dls + sort: name + - match: { hits.total.value: 2 } + - match: { hits.hits.0._source.name: A } + - match: { hits.hits.0._source.object.secret: mission } + - match: { hits.hits.0._source.object.public: interest } + - match: { hits.hits.1._source.name: B } + - match: { hits.hits.1._source.object.secret: common } + - match: { hits.hits.1._source.object.foo: bar } + + # With FLS API Key + - do: + headers: + Authorization: "ApiKey ${credentials}" + search: + index: index_dls + sort: name + - match: { hits.total.value: 1 } + - match: { hits.hits.0._source.name: A } + - match: { hits.hits.0._source.object.secret: mission } + - match: { hits.hits.0._source.object.public: interest } + + +--- +Filter on field with ignored_malformed: + - do: + indices.create: + index: index_dls + body: + mappings: + _source: + mode: synthetic + properties: + name: + type: keyword + secret: + type: integer + ignore_malformed: true + runtime: + rt_secret: + type: keyword + script: + source: "emit(params._source.secret)" + + - do: + bulk: + index: index_dls + refresh: true + body: + - '{"create": { }}' + - '{"name": "A", "secret": "mission"}' + - '{"create": { }}' + - '{"name": "B", "secret": "top" }' + - match: { errors: false } + + - do: + security.create_api_key: + body: + name: "test-fls" + expiration: "1d" + role_descriptors: + index_access: + indices: + - names: [ "index_dls" ] + privileges: [ "read" ] + query: + term: + rt_secret: mission + - match: { name: "test-fls" } + - is_true: id + - set: + id: api_key_id + encoded: credentials + + # With superuser... + - do: + search: + index: index_dls + sort: name + - match: { hits.total.value: 2 } + - match: { hits.hits.0._source.name: A } + - match: { hits.hits.0._source.secret: mission } + - match: { hits.hits.1._source.name: B } + - match: { hits.hits.1._source.secret: top } + + # With FLS API Key + - do: + headers: + Authorization: "ApiKey ${credentials}" + search: + index: index_dls + sort: name + - match: { hits.total.value: 1 } + - match: { hits.hits.0._source.name: A } + - match: { hits.hits.0._source.secret: mission } From 0bc2b19ead83c1c9e4d9765048655e5aea084251 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Fri, 28 Jun 2024 11:15:35 +0200 Subject: [PATCH 032/216] Add AVX-512 optimised vector distance functions for int7 on x64 (#109084) * Add vec_caps and inner implementation for AVX-512-F (without VNNI) * select FNNI function name based on vec_caps; native templated implementation for manual unrolling * Switched compiler to clang for x64, as gcc has a bug --- docs/changelog/109084.yaml | 5 + libs/native/libraries/build.gradle | 2 +- .../nativeaccess/jdk/JdkVectorLibrary.java | 44 ++-- .../native/{Dockerfile => Dockerfile.aarch64} | 0 libs/simdvec/native/Dockerfile.amd64 | 16 ++ libs/simdvec/native/build.gradle | 17 +- libs/simdvec/native/publish_vec_binaries.sh | 6 +- libs/simdvec/native/src/vec/c/amd64/vec.c | 38 ++-- libs/simdvec/native/src/vec/c/amd64/vec_2.cpp | 201 ++++++++++++++++++ 9 files changed, 292 insertions(+), 37 deletions(-) create mode 100644 docs/changelog/109084.yaml rename libs/simdvec/native/{Dockerfile => Dockerfile.aarch64} (100%) create mode 100644 libs/simdvec/native/Dockerfile.amd64 create mode 100644 libs/simdvec/native/src/vec/c/amd64/vec_2.cpp diff --git a/docs/changelog/109084.yaml b/docs/changelog/109084.yaml new file mode 100644 index 0000000000000..67ff5610c5a66 --- /dev/null +++ b/docs/changelog/109084.yaml @@ -0,0 +1,5 @@ +pr: 109084 +summary: Add AVX-512 optimised vector distance functions for int7 on x64 +area: Search +type: enhancement +issues: [] diff --git a/libs/native/libraries/build.gradle b/libs/native/libraries/build.gradle index b7e6a1c704e6e..8f1a12055bd7e 100644 --- a/libs/native/libraries/build.gradle +++ b/libs/native/libraries/build.gradle @@ -18,7 +18,7 @@ configurations { } var zstdVersion = "1.5.5" -var vecVersion = "1.0.9" +var vecVersion = "1.0.10" repositories { exclusiveContent { diff --git a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkVectorLibrary.java b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkVectorLibrary.java index db2e7b85c30d0..c92ad654c9b9a 100644 --- a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkVectorLibrary.java +++ b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkVectorLibrary.java @@ -23,6 +23,9 @@ public final class JdkVectorLibrary implements VectorLibrary { + static final MethodHandle dot7u$mh; + static final MethodHandle sqr7u$mh; + static final VectorSimilarityFunctions INSTANCE; static { @@ -32,8 +35,33 @@ public final class JdkVectorLibrary implements VectorLibrary { try { int caps = (int) vecCaps$mh.invokeExact(); if (caps != 0) { + if (caps == 2) { + dot7u$mh = downcallHandle( + "dot7u_2", + FunctionDescriptor.of(JAVA_INT, ADDRESS, ADDRESS, JAVA_INT), + LinkerHelperUtil.critical() + ); + sqr7u$mh = downcallHandle( + "sqr7u_2", + FunctionDescriptor.of(JAVA_INT, ADDRESS, ADDRESS, JAVA_INT), + LinkerHelperUtil.critical() + ); + } else { + dot7u$mh = downcallHandle( + "dot7u", + FunctionDescriptor.of(JAVA_INT, ADDRESS, ADDRESS, JAVA_INT), + LinkerHelperUtil.critical() + ); + sqr7u$mh = downcallHandle( + "sqr7u", + FunctionDescriptor.of(JAVA_INT, ADDRESS, ADDRESS, JAVA_INT), + LinkerHelperUtil.critical() + ); + } INSTANCE = new JdkVectorSimilarityFunctions(); } else { + dot7u$mh = null; + sqr7u$mh = null; INSTANCE = null; } } catch (Throwable t) { @@ -49,18 +77,6 @@ public VectorSimilarityFunctions getVectorSimilarityFunctions() { } private static final class JdkVectorSimilarityFunctions implements VectorSimilarityFunctions { - - static final MethodHandle dot7u$mh = downcallHandle( - "dot7u", - FunctionDescriptor.of(JAVA_INT, ADDRESS, ADDRESS, JAVA_INT), - LinkerHelperUtil.critical() - ); - static final MethodHandle sqr7u$mh = downcallHandle( - "sqr7u", - FunctionDescriptor.of(JAVA_INT, ADDRESS, ADDRESS, JAVA_INT), - LinkerHelperUtil.critical() - ); - /** * Computes the dot product of given unsigned int7 byte vectors. * @@ -103,7 +119,7 @@ static int squareDistance7u(MemorySegment a, MemorySegment b, int length) { private static int dot7u(MemorySegment a, MemorySegment b, int length) { try { - return (int) dot7u$mh.invokeExact(a, b, length); + return (int) JdkVectorLibrary.dot7u$mh.invokeExact(a, b, length); } catch (Throwable t) { throw new AssertionError(t); } @@ -111,7 +127,7 @@ private static int dot7u(MemorySegment a, MemorySegment b, int length) { private static int sqr7u(MemorySegment a, MemorySegment b, int length) { try { - return (int) sqr7u$mh.invokeExact(a, b, length); + return (int) JdkVectorLibrary.sqr7u$mh.invokeExact(a, b, length); } catch (Throwable t) { throw new AssertionError(t); } diff --git a/libs/simdvec/native/Dockerfile b/libs/simdvec/native/Dockerfile.aarch64 similarity index 100% rename from libs/simdvec/native/Dockerfile rename to libs/simdvec/native/Dockerfile.aarch64 diff --git a/libs/simdvec/native/Dockerfile.amd64 b/libs/simdvec/native/Dockerfile.amd64 new file mode 100644 index 0000000000000..77acf8e42cdd2 --- /dev/null +++ b/libs/simdvec/native/Dockerfile.amd64 @@ -0,0 +1,16 @@ +FROM debian:latest + +RUN apt update +RUN apt install -y wget +RUN echo "deb http://apt.llvm.org/bookworm/ llvm-toolchain-bookworm-18 main" > /etc/apt/sources.list.d/clang.list +RUN wget -qO- https://apt.llvm.org/llvm-snapshot.gpg.key | tee /etc/apt/trusted.gpg.d/apt.llvm.org.asc +RUN apt update +RUN apt install -y clang-18 openjdk-17-jdk +RUN ln -s /usr/bin/clang-18 /usr/bin/clang +RUN ln -s /usr/bin/clang++-18 /usr/bin/clang++ +COPY . /workspace +WORKDIR /workspace +RUN ./gradlew --quiet --console=plain clean buildSharedLibrary +RUN strip --strip-unneeded build/output/libvec.so + +CMD cat build/output/libvec.so diff --git a/libs/simdvec/native/build.gradle b/libs/simdvec/native/build.gradle index ef9120680646a..073477c3aebf2 100644 --- a/libs/simdvec/native/build.gradle +++ b/libs/simdvec/native/build.gradle @@ -6,14 +6,15 @@ * Side Public License, v 1. */ apply plugin: 'c' +apply plugin: 'cpp' var os = org.gradle.internal.os.OperatingSystem.current() // To update this library run publish_vec_binaries.sh ( or ./gradlew vecSharedLibrary ) // Or // For local development, build the docker image with: -// docker build --platform linux/arm64 --progress=plain . (for aarch64) -// docker build --platform linux/amd64 --progress=plain . (for x64) +// docker build --platform linux/arm64 --progress=plain --file=Dockerfile.aarch64 . (for aarch64) +// docker build --platform linux/amd64 --progress=plain --file=Dockerfile.amd64 . (for x64) // Grab the image id from the console output, then, e.g. // docker run 9c9f36564c148b275aeecc42749e7b4580ded79dcf51ff6ccc008c8861e7a979 > build/libs/vec/shared/$arch/libvec.so // @@ -51,6 +52,8 @@ model { target("amd64") { cCompiler.executable = "/usr/bin/gcc" cCompiler.withArguments { args -> args.addAll(["-O3", "-std=c99", "-march=core-avx2", "-Wno-incompatible-pointer-types"]) } + cppCompiler.executable = "/usr/bin/g++" + cppCompiler.withArguments { args -> args.addAll(["-O3", "-march=core-avx2"]) } } } cl(VisualCpp) { @@ -68,6 +71,7 @@ model { target("amd64") { cCompiler.withArguments { args -> args.addAll(["-O3", "-std=c99", "-march=core-avx2"]) } + cppCompiler.withArguments { args -> args.addAll(["-O3", "-march=core-avx2"]) } } } } @@ -86,6 +90,15 @@ model { srcDir "src/vec/headers/" } } + cpp { + source { + srcDir "src/vec/c/${platformName}/" + include "*.cpp" + } + exportedHeaders { + srcDir "src/vec/headers/" + } + } } } } diff --git a/libs/simdvec/native/publish_vec_binaries.sh b/libs/simdvec/native/publish_vec_binaries.sh index d11645ff71c4a..ddb3d2c71e448 100755 --- a/libs/simdvec/native/publish_vec_binaries.sh +++ b/libs/simdvec/native/publish_vec_binaries.sh @@ -19,7 +19,7 @@ if [ -z "$ARTIFACTORY_API_KEY" ]; then exit 1; fi -VERSION="1.0.9" +VERSION="1.0.10" ARTIFACTORY_REPOSITORY="${ARTIFACTORY_REPOSITORY:-https://artifactory.elastic.dev/artifactory/elasticsearch-native/}" TEMP=$(mktemp -d) @@ -33,11 +33,11 @@ echo 'Building Darwin binary...' echo 'Building Linux binary...' mkdir -p build/libs/vec/shared/aarch64/ -DOCKER_IMAGE=$(docker build --platform linux/arm64 --quiet .) +DOCKER_IMAGE=$(docker build --platform linux/arm64 --quiet --file=Dockerfile.aarch64 .) docker run $DOCKER_IMAGE > build/libs/vec/shared/aarch64/libvec.so echo 'Building Linux x64 binary...' -DOCKER_IMAGE=$(docker build --platform linux/amd64 --quiet .) +DOCKER_IMAGE=$(docker build --platform linux/amd64 --quiet --file=Dockerfile.amd64 .) mkdir -p build/libs/vec/shared/amd64 docker run --platform linux/amd64 $DOCKER_IMAGE > build/libs/vec/shared/amd64/libvec.so diff --git a/libs/simdvec/native/src/vec/c/amd64/vec.c b/libs/simdvec/native/src/vec/c/amd64/vec.c index c9a49ad2d1d4d..0fa17109fac6b 100644 --- a/libs/simdvec/native/src/vec/c/amd64/vec.c +++ b/libs/simdvec/native/src/vec/c/amd64/vec.c @@ -13,20 +13,16 @@ #include #include -#ifndef DOT7U_STRIDE_BYTES_LEN -#define DOT7U_STRIDE_BYTES_LEN 32 // Must be a power of 2 -#endif - -#ifndef SQR7U_STRIDE_BYTES_LEN -#define SQR7U_STRIDE_BYTES_LEN 32 // Must be a power of 2 +#ifndef STRIDE_BYTES_LEN +#define STRIDE_BYTES_LEN sizeof(__m256i) // Must be a power of 2 #endif #ifdef _MSC_VER #include -#elif __GNUC__ -#include #elif __clang__ #include +#elif __GNUC__ +#include #endif // Multi-platform CPUID "intrinsic"; it takes as input a "functionNumber" (or "leaf", the eax registry). "Subleaf" @@ -67,9 +63,19 @@ EXPORT int vec_caps() { if (functionIds >= 7) { cpuid(cpuInfo, 7); int ebx = cpuInfo[1]; + int ecx = cpuInfo[2]; // AVX2 flag is the 5th bit // We assume that all processors that have AVX2 also have FMA3 - return (ebx & (1 << 5)) != 0; + int avx2 = (ebx & 0x00000020) != 0; + int avx512 = (ebx & 0x00010000) != 0; + // int avx512_vnni = (ecx & 0x00000800) != 0; + // if (avx512 && avx512_vnni) { + if (avx512) { + return 2; + } + if (avx2) { + return 1; + } } return 0; } @@ -81,7 +87,7 @@ static inline int32_t dot7u_inner(int8_t* a, int8_t* b, size_t dims) { __m256i acc1 = _mm256_setzero_si256(); #pragma GCC unroll 4 - for(int i = 0; i < dims; i += DOT7U_STRIDE_BYTES_LEN) { + for(int i = 0; i < dims; i += STRIDE_BYTES_LEN) { // Load packed 8-bit integers __m256i va1 = _mm256_loadu_si256(a + i); __m256i vb1 = _mm256_loadu_si256(b + i); @@ -101,8 +107,8 @@ static inline int32_t dot7u_inner(int8_t* a, int8_t* b, size_t dims) { EXPORT int32_t dot7u(int8_t* a, int8_t* b, size_t dims) { int32_t res = 0; int i = 0; - if (dims > DOT7U_STRIDE_BYTES_LEN) { - i += dims & ~(DOT7U_STRIDE_BYTES_LEN - 1); + if (dims > STRIDE_BYTES_LEN) { + i += dims & ~(STRIDE_BYTES_LEN - 1); res = dot7u_inner(a, b, i); } for (; i < dims; i++) { @@ -118,7 +124,7 @@ static inline int32_t sqr7u_inner(int8_t *a, int8_t *b, size_t dims) { const __m256i ones = _mm256_set1_epi16(1); #pragma GCC unroll 4 - for(int i = 0; i < dims; i += SQR7U_STRIDE_BYTES_LEN) { + for(int i = 0; i < dims; i += STRIDE_BYTES_LEN) { // Load packed 8-bit integers __m256i va1 = _mm256_loadu_si256(a + i); __m256i vb1 = _mm256_loadu_si256(b + i); @@ -126,7 +132,6 @@ static inline int32_t sqr7u_inner(int8_t *a, int8_t *b, size_t dims) { const __m256i dist1 = _mm256_sub_epi8(va1, vb1); const __m256i abs_dist1 = _mm256_sign_epi8(dist1, dist1); const __m256i sqr1 = _mm256_maddubs_epi16(abs_dist1, abs_dist1); - acc1 = _mm256_add_epi32(_mm256_madd_epi16(ones, sqr1), acc1); } @@ -137,8 +142,8 @@ static inline int32_t sqr7u_inner(int8_t *a, int8_t *b, size_t dims) { EXPORT int32_t sqr7u(int8_t* a, int8_t* b, size_t dims) { int32_t res = 0; int i = 0; - if (dims > SQR7U_STRIDE_BYTES_LEN) { - i += dims & ~(SQR7U_STRIDE_BYTES_LEN - 1); + if (dims > STRIDE_BYTES_LEN) { + i += dims & ~(STRIDE_BYTES_LEN - 1); res = sqr7u_inner(a, b, i); } for (; i < dims; i++) { @@ -147,4 +152,3 @@ EXPORT int32_t sqr7u(int8_t* a, int8_t* b, size_t dims) { } return res; } - diff --git a/libs/simdvec/native/src/vec/c/amd64/vec_2.cpp b/libs/simdvec/native/src/vec/c/amd64/vec_2.cpp new file mode 100644 index 0000000000000..1606b31907405 --- /dev/null +++ b/libs/simdvec/native/src/vec/c/amd64/vec_2.cpp @@ -0,0 +1,201 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +#include +#include +#include "vec.h" + +#ifdef _MSC_VER +#include +#elif __clang__ +#pragma clang attribute push(__attribute__((target("arch=skylake-avx512"))), apply_to=function) +#include +#elif __GNUC__ +#pragma GCC push_options +#pragma GCC target ("arch=skylake-avx512") +#include +#endif + +#include +#include + +#ifndef STRIDE_BYTES_LEN +#define STRIDE_BYTES_LEN sizeof(__m512i) // Must be a power of 2 +#endif + +// Returns acc + ( p1 * p2 ), for 64-wide int lanes. +template +inline __m512i fma8(__m512i acc, const int8_t* p1, const int8_t* p2) { + constexpr int lanes = offsetRegs * STRIDE_BYTES_LEN; + const __m512i a = _mm512_loadu_si512((const __m512i*)(p1 + lanes)); + const __m512i b = _mm512_loadu_si512((const __m512i*)(p2 + lanes)); + // Perform multiplication and create 16-bit values + // Vertically multiply each unsigned 8-bit integer from a with the corresponding + // signed 8-bit integer from b, producing intermediate signed 16-bit integers. + // These values will be at max 32385, at min −32640 + const __m512i dot = _mm512_maddubs_epi16(a, b); + const __m512i ones = _mm512_set1_epi16(1); + // Horizontally add adjacent pairs of intermediate signed 16-bit ints, and pack the results in 32-bit ints. + // Using madd with 1, as this is faster than extract 2 halves, add 16-bit ints, and convert to 32-bit ints. + return _mm512_add_epi32(_mm512_madd_epi16(ones, dot), acc); +} + +static inline int32_t dot7u_inner_avx512(int8_t* a, int8_t* b, size_t dims) { + constexpr int stride8 = 8 * STRIDE_BYTES_LEN; + constexpr int stride4 = 4 * STRIDE_BYTES_LEN; + const int8_t* p1 = a; + const int8_t* p2 = b; + + // Init accumulator(s) with 0 + __m512i acc0 = _mm512_setzero_si512(); + __m512i acc1 = _mm512_setzero_si512(); + __m512i acc2 = _mm512_setzero_si512(); + __m512i acc3 = _mm512_setzero_si512(); + __m512i acc4 = _mm512_setzero_si512(); + __m512i acc5 = _mm512_setzero_si512(); + __m512i acc6 = _mm512_setzero_si512(); + __m512i acc7 = _mm512_setzero_si512(); + + const int8_t* p1End = a + (dims & ~(stride8 - 1)); + while (p1 < p1End) { + acc0 = fma8<0>(acc0, p1, p2); + acc1 = fma8<1>(acc1, p1, p2); + acc2 = fma8<2>(acc2, p1, p2); + acc3 = fma8<3>(acc3, p1, p2); + acc4 = fma8<4>(acc4, p1, p2); + acc5 = fma8<5>(acc5, p1, p2); + acc6 = fma8<6>(acc6, p1, p2); + acc7 = fma8<7>(acc7, p1, p2); + p1 += stride8; + p2 += stride8; + } + + p1End = a + (dims & ~(stride4 - 1)); + while (p1 < p1End) { + acc0 = fma8<0>(acc0, p1, p2); + acc1 = fma8<1>(acc1, p1, p2); + acc2 = fma8<2>(acc2, p1, p2); + acc3 = fma8<3>(acc3, p1, p2); + p1 += stride4; + p2 += stride4; + } + + p1End = a + (dims & ~(STRIDE_BYTES_LEN - 1)); + while (p1 < p1End) { + acc0 = fma8<0>(acc0, p1, p2); + p1 += STRIDE_BYTES_LEN; + p2 += STRIDE_BYTES_LEN; + } + + // reduce (accumulate all) + acc0 = _mm512_add_epi32(_mm512_add_epi32(acc0, acc1), _mm512_add_epi32(acc2, acc3)); + acc4 = _mm512_add_epi32(_mm512_add_epi32(acc4, acc5), _mm512_add_epi32(acc6, acc7)); + return _mm512_reduce_add_epi32(_mm512_add_epi32(acc0, acc4)); +} + +extern "C" +EXPORT int32_t dot7u_2(int8_t* a, int8_t* b, size_t dims) { + int32_t res = 0; + int i = 0; + if (dims > STRIDE_BYTES_LEN) { + i += dims & ~(STRIDE_BYTES_LEN - 1); + res = dot7u_inner_avx512(a, b, i); + } + for (; i < dims; i++) { + res += a[i] * b[i]; + } + return res; +} + +template +inline __m512i sqr8(__m512i acc, const int8_t* p1, const int8_t* p2) { + constexpr int lanes = offsetRegs * STRIDE_BYTES_LEN; + const __m512i a = _mm512_loadu_si512((const __m512i*)(p1 + lanes)); + const __m512i b = _mm512_loadu_si512((const __m512i*)(p2 + lanes)); + + const __m512i dist = _mm512_sub_epi8(a, b); + const __m512i abs_dist = _mm512_abs_epi8(dist); + const __m512i sqr_add = _mm512_maddubs_epi16(abs_dist, abs_dist); + const __m512i ones = _mm512_set1_epi16(1); + // Horizontally add adjacent pairs of intermediate signed 16-bit integers, and pack the results. + return _mm512_add_epi32(_mm512_madd_epi16(ones, sqr_add), acc); +} + +static inline int32_t sqr7u_inner_avx512(int8_t *a, int8_t *b, size_t dims) { + constexpr int stride8 = 8 * STRIDE_BYTES_LEN; + constexpr int stride4 = 4 * STRIDE_BYTES_LEN; + const int8_t* p1 = a; + const int8_t* p2 = b; + + // Init accumulator(s) with 0 + __m512i acc0 = _mm512_setzero_si512(); + __m512i acc1 = _mm512_setzero_si512(); + __m512i acc2 = _mm512_setzero_si512(); + __m512i acc3 = _mm512_setzero_si512(); + __m512i acc4 = _mm512_setzero_si512(); + __m512i acc5 = _mm512_setzero_si512(); + __m512i acc6 = _mm512_setzero_si512(); + __m512i acc7 = _mm512_setzero_si512(); + + const int8_t* p1End = a + (dims & ~(stride8 - 1)); + while (p1 < p1End) { + acc0 = sqr8<0>(acc0, p1, p2); + acc1 = sqr8<1>(acc1, p1, p2); + acc2 = sqr8<2>(acc2, p1, p2); + acc3 = sqr8<3>(acc3, p1, p2); + acc4 = sqr8<4>(acc4, p1, p2); + acc5 = sqr8<5>(acc5, p1, p2); + acc6 = sqr8<6>(acc6, p1, p2); + acc7 = sqr8<7>(acc7, p1, p2); + p1 += stride8; + p2 += stride8; + } + + p1End = a + (dims & ~(stride4 - 1)); + while (p1 < p1End) { + acc0 = sqr8<0>(acc0, p1, p2); + acc1 = sqr8<1>(acc1, p1, p2); + acc2 = sqr8<2>(acc2, p1, p2); + acc3 = sqr8<3>(acc3, p1, p2); + p1 += stride4; + p2 += stride4; + } + + p1End = a + (dims & ~(STRIDE_BYTES_LEN - 1)); + while (p1 < p1End) { + acc0 = sqr8<0>(acc0, p1, p2); + p1 += STRIDE_BYTES_LEN; + p2 += STRIDE_BYTES_LEN; + } + + // reduce (accumulate all) + acc0 = _mm512_add_epi32(_mm512_add_epi32(acc0, acc1), _mm512_add_epi32(acc2, acc3)); + acc4 = _mm512_add_epi32(_mm512_add_epi32(acc4, acc5), _mm512_add_epi32(acc6, acc7)); + return _mm512_reduce_add_epi32(_mm512_add_epi32(acc0, acc4)); +} + +extern "C" +EXPORT int32_t sqr7u_2(int8_t* a, int8_t* b, size_t dims) { + int32_t res = 0; + int i = 0; + if (dims > STRIDE_BYTES_LEN) { + i += dims & ~(STRIDE_BYTES_LEN - 1); + res = sqr7u_inner_avx512(a, b, i); + } + for (; i < dims; i++) { + int32_t dist = a[i] - b[i]; + res += dist * dist; + } + return res; +} + +#ifdef __clang__ +#pragma clang attribute pop +#elif __GNUC__ +#pragma GCC pop_options +#endif From 2876e059f3728565b28517b351c744520827e797 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Fri, 28 Jun 2024 11:47:30 +0200 Subject: [PATCH 033/216] Aggs: Improve scripted metric agg allow list tests (#110153) * Add an override to the aggs tests to override the allow list default setting. This makes it possible to run the scripted metric aggs tests on Serverless, even when we disallow these aggs per default on Serverless. * Move the allow list tests next to the scripted metric tests since these belong together. --- modules/aggregations/build.gradle | 2 +- .../AggregationsClientYamlTestSuiteIT.java | 20 ++++++++++++++----- .../scripted_metric_allow_list.yml | 0 qa/ccs-common-rest/build.gradle | 3 +-- 4 files changed, 17 insertions(+), 8 deletions(-) rename rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_disallow_scripted_metrics.yml => modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/scripted_metric_allow_list.yml (100%) diff --git a/modules/aggregations/build.gradle b/modules/aggregations/build.gradle index 2724ac8ba1139..a773c751eeaf5 100644 --- a/modules/aggregations/build.gradle +++ b/modules/aggregations/build.gradle @@ -19,7 +19,7 @@ esplugin { restResources { restApi { - include '_common', 'indices', 'cluster', 'index', 'search', 'nodes', 'bulk', 'scripts_painless_execute' + include '_common', 'indices', 'cluster', 'index', 'search', 'nodes', 'bulk', 'scripts_painless_execute', 'put_script' } restTests { // Pulls in all aggregation tests from core AND the forwards v7's core for forwards compatibility diff --git a/modules/aggregations/src/yamlRestTest/java/org/elasticsearch/aggregations/AggregationsClientYamlTestSuiteIT.java b/modules/aggregations/src/yamlRestTest/java/org/elasticsearch/aggregations/AggregationsClientYamlTestSuiteIT.java index a3c737e2795d8..33cb223569b9b 100644 --- a/modules/aggregations/src/yamlRestTest/java/org/elasticsearch/aggregations/AggregationsClientYamlTestSuiteIT.java +++ b/modules/aggregations/src/yamlRestTest/java/org/elasticsearch/aggregations/AggregationsClientYamlTestSuiteIT.java @@ -18,11 +18,21 @@ public class AggregationsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @ClassRule - public static ElasticsearchCluster cluster = ElasticsearchCluster.local() - .module("aggregations") - .module("lang-painless") - .feature(FeatureFlag.TIME_SERIES_MODE) - .build(); + public static ElasticsearchCluster cluster = makeCluster(); + + private static ElasticsearchCluster makeCluster() { + var cluster = ElasticsearchCluster.local().module("aggregations").module("lang-painless").feature(FeatureFlag.TIME_SERIES_MODE); + + // On Serverless, we want to disallow scripted metrics aggs per default. + // The following override allows us to still run the scripted metrics agg tests without breaking bwc. + boolean disableAllowListPerDefault = Boolean.parseBoolean( + System.getProperty("tests.disable_scripted_metric_allow_list_per_default") + ); + if (disableAllowListPerDefault) { + return cluster.setting("search.aggs.only_allowed_metric_scripts", "false").build(); + } + return cluster.build(); + } public AggregationsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_disallow_scripted_metrics.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/scripted_metric_allow_list.yml similarity index 100% rename from rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_disallow_scripted_metrics.yml rename to modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/scripted_metric_allow_list.yml diff --git a/qa/ccs-common-rest/build.gradle b/qa/ccs-common-rest/build.gradle index 82fe7c48d87f8..e5e8c5a489d5b 100644 --- a/qa/ccs-common-rest/build.gradle +++ b/qa/ccs-common-rest/build.gradle @@ -11,8 +11,7 @@ apply plugin: 'elasticsearch.internal-yaml-rest-test' restResources { restApi { include '_common', 'bulk', 'count', 'cluster', 'field_caps', 'get', 'knn_search', 'index', 'indices', 'msearch', - 'search', 'async_search', 'graph', '*_point_in_time', 'info', 'scroll', 'clear_scroll', 'search_mvt', 'eql', 'sql', - 'put_script' + 'search', 'async_search', 'graph', '*_point_in_time', 'info', 'scroll', 'clear_scroll', 'search_mvt', 'eql', 'sql' } restTests { includeCore 'field_caps', 'msearch', 'search', 'suggest', 'scroll', "indices.resolve_index" From 64505b4c3362b91fccbf16796951b34dfc83d57b Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Fri, 28 Jun 2024 13:02:11 +0200 Subject: [PATCH 034/216] Fix CoordinatorVotingConfigurationTests.testClusterUUIDLogging to add (#110256) log expectation before creating cluster When randomly picking a single node cluster, the cluster is formed before adding the log expectation and the expected log line can't be observed any more. Fixes #108729 --- muted-tests.yml | 3 -- .../CoordinatorVotingConfigurationTests.java | 42 ++++++++++--------- 2 files changed, 22 insertions(+), 23 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index a4c150124172e..f4a0695b6f19e 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -1,7 +1,4 @@ tests: -- class: "org.elasticsearch.cluster.coordination.CoordinatorVotingConfigurationTests" - issue: "https://github.com/elastic/elasticsearch/issues/108729" - method: "testClusterUUIDLogging" - class: "org.elasticsearch.xpack.textstructure.structurefinder.TimestampFormatFinderTests" issue: "https://github.com/elastic/elasticsearch/issues/108855" method: "testGuessIsDayFirstFromLocale" diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorVotingConfigurationTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorVotingConfigurationTests.java index 7f665cf241230..49c5060240809 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorVotingConfigurationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorVotingConfigurationTests.java @@ -458,7 +458,7 @@ public void testSettingInitialConfigurationTriggersElection() { value = "org.elasticsearch.cluster.coordination.ClusterBootstrapService:INFO" ) public void testClusterUUIDLogging() { - try (var mockLog = MockLog.capture(ClusterBootstrapService.class); var cluster = new Cluster(randomIntBetween(1, 3))) { + try (var mockLog = MockLog.capture(ClusterBootstrapService.class)) { mockLog.addExpectation( new MockLog.SeenEventExpectation( "fresh node message", @@ -468,25 +468,27 @@ public void testClusterUUIDLogging() { ) ); - cluster.runRandomly(); - cluster.stabilise(); - mockLog.assertAllExpectationsMatched(); - - final var restartingNode = cluster.getAnyNode(); - mockLog.addExpectation( - new MockLog.SeenEventExpectation( - "restarted node message", - ClusterBootstrapService.class.getCanonicalName(), - Level.INFO, - "this node is locked into cluster UUID [" - + restartingNode.getLastAppliedClusterState().metadata().clusterUUID() - + "] and will not attempt further cluster bootstrapping" - ) - ); - restartingNode.close(); - cluster.clusterNodes.replaceAll(cn -> cn == restartingNode ? cn.restartedNode() : cn); - cluster.stabilise(); - mockLog.assertAllExpectationsMatched(); + try (var cluster = new Cluster(randomIntBetween(1, 3))) { + cluster.runRandomly(); + cluster.stabilise(); + mockLog.assertAllExpectationsMatched(); + + final var restartingNode = cluster.getAnyNode(); + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "restarted node message", + ClusterBootstrapService.class.getCanonicalName(), + Level.INFO, + "this node is locked into cluster UUID [" + + restartingNode.getLastAppliedClusterState().metadata().clusterUUID() + + "] and will not attempt further cluster bootstrapping" + ) + ); + restartingNode.close(); + cluster.clusterNodes.replaceAll(cn -> cn == restartingNode ? cn.restartedNode() : cn); + cluster.stabilise(); + mockLog.assertAllExpectationsMatched(); + } } } From df47e26fc6e108c01761accbff54e19f40fd49f5 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Fri, 28 Jun 2024 13:09:13 +0200 Subject: [PATCH 035/216] Fix ClassCastException with MV_EXPAND on missing field (#110096) --- docs/changelog/110096.yaml | 6 ++++ .../optimizer/LocalLogicalPlanOptimizer.java | 7 ++++ .../LocalLogicalPlanOptimizerTests.java | 33 +++++++++++++++++++ 3 files changed, 46 insertions(+) create mode 100644 docs/changelog/110096.yaml diff --git a/docs/changelog/110096.yaml b/docs/changelog/110096.yaml new file mode 100644 index 0000000000000..3d6616c289266 --- /dev/null +++ b/docs/changelog/110096.yaml @@ -0,0 +1,6 @@ +pr: 110096 +summary: Fix `ClassCastException` with MV_EXPAND on missing field +area: ES|QL +type: bug +issues: + - 109974 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java index 384d3a8cea840..90ce68cb55b64 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java @@ -38,6 +38,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.TopN; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; @@ -162,6 +163,12 @@ else if (plan instanceof Project project) { plan = new Eval(project.source(), project.child(), new ArrayList<>(nullLiteral.values())); plan = new Project(project.source(), plan, newProjections); } + } else if (plan instanceof MvExpand) { + // We cannot replace the target (NamedExpression) with a Literal + // https://github.com/elastic/elasticsearch/issues/109974 + // Unfortunately we cannot remove the MvExpand right away, or we'll lose the output field (layout problems) + // TODO but this could be a follow-up optimization + return plan; } // otherwise transform fields in place else { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java index 40c45a288ae88..7a3ed09d66f02 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java @@ -34,7 +34,9 @@ import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.Project; +import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.stats.SearchStats; @@ -181,6 +183,37 @@ public void testMissingFieldInSort() { var source = as(limit.child(), EsRelation.class); } + /** + * Expects + * EsqlProject[[first_name{f}#6]] + * \_Limit[1000[INTEGER]] + * \_MvExpand[last_name{f}#9,last_name{r}#15] + * \_Limit[1000[INTEGER]] + * \_EsRelation[test][_meta_field{f}#11, emp_no{f}#5, first_name{f}#6, ge..] + */ + public void testMissingFieldInMvExpand() { + var plan = plan(""" + from test + | mv_expand last_name + | keep first_name, last_name + """); + + var testStats = statsForMissingField("last_name"); + var localPlan = localPlan(plan, testStats); + + var project = as(localPlan, EsqlProject.class); + var projections = project.projections(); + assertThat(Expressions.names(projections), contains("first_name", "last_name")); + + var limit = as(project.child(), Limit.class); + // MvExpand cannot be optimized (yet) because the target NamedExpression cannot be replaced with a NULL literal + // https://github.com/elastic/elasticsearch/issues/109974 + // See LocalLogicalPlanOptimizer.ReplaceMissingFieldWithNull + var mvExpand = as(limit.child(), MvExpand.class); + var limit2 = as(mvExpand.child(), Limit.class); + as(limit2.child(), EsRelation.class); + } + /** * Expects * EsqlProject[[x{r}#3]] From 883f76bd93a2bcef9b52ffb7dd89f993acb8cc67 Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Fri, 28 Jun 2024 15:12:51 +0300 Subject: [PATCH 036/216] Fixing RankFeature tests in SearchServiceTests - ensuring waiting for assertions to complete (#110258) --- .../java/org/elasticsearch/search/SearchServiceTests.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index 7aa894f0e8aed..4609c7327c798 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -596,7 +596,7 @@ public RankShardResult buildRankFeatureShardResult(SearchHits hits, int shardId) // execute fetch phase and perform any validations once we retrieve the response // the difference in how we do assertions here is needed because once the transport service sends back the response // it decrements the reference to the FetchSearchResult (through the ActionListener#respondAndRelease) and sets hits to null - service.executeFetchPhase(fetchRequest, searchTask, new ActionListener<>() { + PlainActionFuture fetchListener = new PlainActionFuture<>() { @Override public void onResponse(FetchSearchResult fetchSearchResult) { assertNotNull(fetchSearchResult); @@ -610,13 +610,17 @@ public void onResponse(FetchSearchResult fetchSearchResult) { assertNotNull(hit.getFields().get(fetchFieldName)); assertEquals(hit.getFields().get(fetchFieldName).getValue(), fetchFieldValue + "_" + hit.docId()); } + super.onResponse(fetchSearchResult); } @Override public void onFailure(Exception e) { + super.onFailure(e); throw new AssertionError("No failure should have been raised", e); } - }); + }; + service.executeFetchPhase(fetchRequest, searchTask, fetchListener); + fetchListener.get(); } catch (Exception ex) { if (queryResult != null) { if (queryResult.hasReferences()) { From 959d07f5ee848f3b6c5507cf1ba13f316f622ff9 Mon Sep 17 00:00:00 2001 From: Kathleen DeRusso Date: Fri, 28 Jun 2024 08:19:08 -0400 Subject: [PATCH 037/216] Rename query rules namespace in rest api spec (#110208) * Rename query rules namespace in rest api spec * Rename per Specification PR feedback --- ...lete.json => query_rules.delete_rule.json} | 2 +- ...e.json => query_rules.delete_ruleset.json} | 2 +- ...ule.get.json => query_rules.get_rule.json} | 2 +- ....get.json => query_rules.get_ruleset.json} | 2 +- ...st.json => query_rules.list_rulesets.json} | 2 +- ...ule.put.json => query_rules.put_rule.json} | 2 +- ....put.json => query_rules.put_ruleset.json} | 2 +- x-pack/plugin/ent-search/qa/rest/build.gradle | 3 +- .../rest-api-spec/test/entsearch/20_usage.yml | 30 ++++++++-------- .../entsearch/rules/10_query_ruleset_put.yml | 14 ++++---- .../entsearch/rules/20_query_ruleset_list.yml | 34 +++++++++---------- .../rules/30_query_ruleset_delete.yml | 10 +++--- .../entsearch/rules/40_rule_query_search.yml | 14 ++++---- .../entsearch/rules/50_query_rule_put.yml | 32 ++++++++--------- .../rules/5_query_rulesets_before_setup.yml | 14 ++++---- .../entsearch/rules/60_query_rule_delete.yml | 32 ++++++++--------- 16 files changed, 98 insertions(+), 99 deletions(-) rename rest-api-spec/src/main/resources/rest-api-spec/api/{query_rule.delete.json => query_rules.delete_rule.json} (96%) rename rest-api-spec/src/main/resources/rest-api-spec/api/{query_ruleset.delete.json => query_rules.delete_ruleset.json} (95%) rename rest-api-spec/src/main/resources/rest-api-spec/api/{query_rule.get.json => query_rules.get_rule.json} (96%) rename rest-api-spec/src/main/resources/rest-api-spec/api/{query_ruleset.get.json => query_rules.get_ruleset.json} (95%) rename rest-api-spec/src/main/resources/rest-api-spec/api/{query_ruleset.list.json => query_rules.list_rulesets.json} (95%) rename rest-api-spec/src/main/resources/rest-api-spec/api/{query_rule.put.json => query_rules.put_rule.json} (97%) rename rest-api-spec/src/main/resources/rest-api-spec/api/{query_ruleset.put.json => query_rules.put_ruleset.json} (96%) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/query_rule.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.delete_rule.json similarity index 96% rename from rest-api-spec/src/main/resources/rest-api-spec/api/query_rule.delete.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.delete_rule.json index 8d68a9f2ee537..8a97dcd311237 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/query_rule.delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.delete_rule.json @@ -1,5 +1,5 @@ { - "query_rule.delete": { + "query_rules.delete_rule": { "documentation": { "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-query-rule.html", "description": "Deletes an individual query rule within a ruleset." diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/query_ruleset.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.delete_ruleset.json similarity index 95% rename from rest-api-spec/src/main/resources/rest-api-spec/api/query_ruleset.delete.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.delete_ruleset.json index 8753c50c4252b..90144ca9f3cf5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/query_ruleset.delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.delete_ruleset.json @@ -1,5 +1,5 @@ { - "query_ruleset.delete": { + "query_rules.delete_ruleset": { "documentation": { "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-query-ruleset.html", "description": "Deletes a query ruleset." diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/query_rule.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.get_rule.json similarity index 96% rename from rest-api-spec/src/main/resources/rest-api-spec/api/query_rule.get.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.get_rule.json index 874214705ed4c..681b68ab583d8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/query_rule.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.get_rule.json @@ -1,5 +1,5 @@ { - "query_rule.get": { + "query_rules.get_rule": { "documentation": { "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/get-query-rule.html", "description": "Returns the details about an individual query rule within a ruleset." diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/query_ruleset.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.get_ruleset.json similarity index 95% rename from rest-api-spec/src/main/resources/rest-api-spec/api/query_ruleset.get.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.get_ruleset.json index c492538c2d076..28268ea667b8c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/query_ruleset.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.get_ruleset.json @@ -1,5 +1,5 @@ { - "query_ruleset.get": { + "query_rules.get_ruleset": { "documentation": { "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/get-query-ruleset.html", "description": "Returns the details about a query ruleset." diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/query_ruleset.list.json b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.list_rulesets.json similarity index 95% rename from rest-api-spec/src/main/resources/rest-api-spec/api/query_ruleset.list.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.list_rulesets.json index 0516e6578025f..e3e98adedb147 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/query_ruleset.list.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.list_rulesets.json @@ -1,5 +1,5 @@ { - "query_ruleset.list": { + "query_rules.list_rulesets": { "documentation": { "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/list-query-rulesets.html", "description": "Lists query rulesets." diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/query_rule.put.json b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.put_rule.json similarity index 97% rename from rest-api-spec/src/main/resources/rest-api-spec/api/query_rule.put.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.put_rule.json index eba6a923eae3b..5cc21b4f3249c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/query_rule.put.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.put_rule.json @@ -1,5 +1,5 @@ { - "query_rule.put": { + "query_rules.put_rule": { "documentation": { "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/put-query-rule.html", "description": "Creates or updates a query rule within a ruleset." diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/query_ruleset.put.json b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.put_ruleset.json similarity index 96% rename from rest-api-spec/src/main/resources/rest-api-spec/api/query_ruleset.put.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.put_ruleset.json index 6dcd6c24b15bf..12cbccc6b7651 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/query_ruleset.put.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/query_rules.put_ruleset.json @@ -1,5 +1,5 @@ { - "query_ruleset.put": { + "query_rules.put_ruleset": { "documentation": { "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/put-query-ruleset.html", "description": "Creates or updates a query ruleset." diff --git a/x-pack/plugin/ent-search/qa/rest/build.gradle b/x-pack/plugin/ent-search/qa/rest/build.gradle index c24b0ffd44c65..e47bcf82f0f8c 100644 --- a/x-pack/plugin/ent-search/qa/rest/build.gradle +++ b/x-pack/plugin/ent-search/qa/rest/build.gradle @@ -14,8 +14,7 @@ restResources { 'nodes', 'indices', 'index', - 'query_ruleset', - 'query_rule', + 'query_rules', 'search_application', 'xpack', 'security', diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/20_usage.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/20_usage.yml index 2d7b56bc175eb..81eaa24fd6f5d 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/20_usage.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/20_usage.yml @@ -28,7 +28,7 @@ teardown: --- "xpack usage includes Enterprise Search": - do: - xpack.usage: {} + xpack.usage: { } - match: { enterprise_search: { @@ -36,7 +36,7 @@ teardown: available: true, search_applications: { count: 0 }, analytics_collections: { count: 0 }, - query_rulesets: { total_count: 0, total_rule_count: 0, min_rule_count: 0, max_rule_count: 0 } + query_rulesets: { total_count: 0, total_rule_count: 0, min_rule_count: 0, max_rule_count: 0 } } } @@ -53,7 +53,7 @@ teardown: query: "{{query_string}}" - do: - xpack.usage: {} + xpack.usage: { } - match: { enterprise_search: { @@ -61,7 +61,7 @@ teardown: available: true, search_applications: { count: 1 }, analytics_collections: { count: 0 }, - query_rulesets: { total_count: 0, total_rule_count: 0, min_rule_count: 0, max_rule_count: 0 } + query_rulesets: { total_count: 0, total_rule_count: 0, min_rule_count: 0, max_rule_count: 0 } } } @@ -82,7 +82,7 @@ teardown: name: test-analytics-collection - do: - xpack.usage: {} + xpack.usage: { } - match: { enterprise_search: { @@ -90,7 +90,7 @@ teardown: available: true, search_applications: { count: 2 }, analytics_collections: { count: 1 }, - query_rulesets: { total_count: 0, total_rule_count: 0, min_rule_count: 0, max_rule_count: 0 } + query_rulesets: { total_count: 0, total_rule_count: 0, min_rule_count: 0, max_rule_count: 0 } } } @@ -99,7 +99,7 @@ teardown: name: test-search-application-2 - do: - xpack.usage: {} + xpack.usage: { } - match: { enterprise_search: { @@ -107,7 +107,7 @@ teardown: available: true, search_applications: { count: 1 }, analytics_collections: { count: 1 }, - query_rulesets: { total_count: 0, total_rule_count: 0, min_rule_count: 0, max_rule_count: 0 } + query_rulesets: { total_count: 0, total_rule_count: 0, min_rule_count: 0, max_rule_count: 0 } } } @@ -116,7 +116,7 @@ teardown: name: test-analytics-collection - do: - xpack.usage: {} + xpack.usage: { } - match: { enterprise_search: { @@ -124,12 +124,12 @@ teardown: available: true, search_applications: { count: 1 }, analytics_collections: { count: 0 }, - query_rulesets: { total_count: 0, total_rule_count: 0, min_rule_count: 0, max_rule_count: 0 } + query_rulesets: { total_count: 0, total_rule_count: 0, min_rule_count: 0, max_rule_count: 0 } } } - do: - query_ruleset.put: + query_rules.put_ruleset: ruleset_id: test-query-ruleset body: rules: @@ -155,7 +155,7 @@ teardown: - 'id4' - do: - query_ruleset.put: + query_rules.put_ruleset: ruleset_id: test-query-ruleset2 body: rules: @@ -199,12 +199,12 @@ teardown: available: true, search_applications: { count: 1 }, analytics_collections: { count: 0 }, - query_rulesets: { total_count: 2, total_rule_count: 5, min_rule_count: 2, max_rule_count: 3, rule_criteria_total_counts: { exact: 5 } } + query_rulesets: { total_count: 2, total_rule_count: 5, min_rule_count: 2, max_rule_count: 3, rule_criteria_total_counts: { exact: 5 } } } } - do: - query_ruleset.delete: + query_rules.delete_ruleset: ruleset_id: test-query-ruleset2 - do: @@ -216,7 +216,7 @@ teardown: available: true, search_applications: { count: 1 }, analytics_collections: { count: 0 }, - query_rulesets: { total_count: 1, total_rule_count: 2, min_rule_count: 2, max_rule_count: 2, rule_criteria_total_counts: { exact: 2 } } + query_rulesets: { total_count: 1, total_rule_count: 2, min_rule_count: 2, max_rule_count: 2, rule_criteria_total_counts: { exact: 2 } } } } diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/10_query_ruleset_put.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/10_query_ruleset_put.yml index f3f37e41ec756..a1f9eeccf2002 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/10_query_ruleset_put.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/10_query_ruleset_put.yml @@ -6,19 +6,19 @@ setup: --- teardown: - do: - query_ruleset.delete: + query_rules.delete_ruleset: ruleset_id: test-ruleset ignore: 404 - do: - query_ruleset.delete: + query_rules.delete_ruleset: ruleset_id: test-query-ruleset-recreating ignore: 404 --- 'Create Query Ruleset': - do: - query_ruleset.put: + query_rules.put_ruleset: ruleset_id: test-ruleset body: rules: @@ -48,7 +48,7 @@ teardown: - match: { result: 'created' } - do: - query_ruleset.get: + query_rules.get_ruleset: ruleset_id: test-ruleset - match: { ruleset_id: test-ruleset } - match: @@ -79,7 +79,7 @@ teardown: --- 'Create Query Ruleset - Resource already exists': - do: - query_ruleset.put: + query_rules.put_ruleset: ruleset_id: test-query-ruleset-recreating body: rules: @@ -96,7 +96,7 @@ teardown: - match: { result: 'created' } - do: - query_ruleset.put: + query_rules.put_ruleset: ruleset_id: test-query-ruleset-recreating body: rules: @@ -120,7 +120,7 @@ teardown: - do: catch: forbidden headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user - query_ruleset.put: + query_rules.put_ruleset: ruleset_id: forbidden-query-ruleset body: rules: diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/20_query_ruleset_list.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/20_query_ruleset_list.yml index b30f1c2418f4f..f2ced956b5369 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/20_query_ruleset_list.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/20_query_ruleset_list.yml @@ -3,7 +3,7 @@ setup: cluster_features: [ "gte_v8.10.0" ] reason: Introduced in 8.10.0 - do: - query_ruleset.put: + query_rules.put_ruleset: ruleset_id: test-query-ruleset-3 body: rules: @@ -29,7 +29,7 @@ setup: - 'id4' - do: - query_ruleset.put: + query_rules.put_ruleset: ruleset_id: test-query-ruleset-1 body: rules: @@ -65,7 +65,7 @@ setup: - 'id6' - do: - query_ruleset.put: + query_rules.put_ruleset: ruleset_id: test-query-ruleset-2 body: rules: @@ -112,29 +112,29 @@ setup: --- teardown: - do: - query_ruleset.delete: + query_rules.delete_ruleset: ruleset_id: test-query-ruleset-1 ignore: 404 - do: - query_ruleset.delete: + query_rules.delete_ruleset: ruleset_id: test-query-ruleset-2 ignore: 404 - do: - query_ruleset.delete: + query_rules.delete_ruleset: ruleset_id: test-query-ruleset-3 ignore: 404 - do: - query_ruleset.delete: + query_rules.delete_ruleset: ruleset_id: a-test-query-ruleset-with-lots-of-criteria ignore: 404 --- "List Query Rulesets": - do: - query_ruleset.list: { } + query_rules.list_rulesets: { } - match: { count: 3 } @@ -154,7 +154,7 @@ teardown: --- "List Query Rulesets - with from": - do: - query_ruleset.list: + query_rules.list_rulesets: from: 1 - match: { count: 3 } @@ -171,7 +171,7 @@ teardown: --- "List Query Rulesets - with size": - do: - query_ruleset.list: + query_rules.list_rulesets: size: 2 - match: { count: 3 } @@ -188,26 +188,26 @@ teardown: --- "List Query Rulesets - empty": - do: - query_ruleset.delete: + query_rules.delete_ruleset: ruleset_id: test-query-ruleset-1 - do: - query_ruleset.delete: + query_rules.delete_ruleset: ruleset_id: test-query-ruleset-2 - do: - query_ruleset.delete: + query_rules.delete_ruleset: ruleset_id: test-query-ruleset-3 - do: - query_ruleset.list: { } + query_rules.list_rulesets: { } - match: { count: 0 } --- "List Query Rulesets with multiple rules": - do: - query_ruleset.put: + query_rules.put_ruleset: ruleset_id: a-test-query-ruleset-with-lots-of-criteria body: rules: @@ -267,7 +267,7 @@ teardown: - 'id10' - do: - query_ruleset.list: + query_rules.list_rulesets: from: 0 size: 1 @@ -293,7 +293,7 @@ teardown: - do: catch: forbidden headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user - query_ruleset.list: { } + query_rules.list_rulesets: { } - match: { error.type: 'security_exception' } diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/30_query_ruleset_delete.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/30_query_ruleset_delete.yml index 81e3e6c8411f7..91dc0581659a6 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/30_query_ruleset_delete.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/30_query_ruleset_delete.yml @@ -3,7 +3,7 @@ setup: cluster_features: [ "gte_v8.10.0" ] reason: Introduced in 8.10.0 - do: - query_ruleset.put: + query_rules.put_ruleset: ruleset_id: test-query-ruleset-to-delete body: rules: @@ -21,21 +21,21 @@ setup: --- "Delete Query Ruleset": - do: - query_ruleset.delete: + query_rules.delete_ruleset: ruleset_id: test-query-ruleset-to-delete - match: { acknowledged: true } - do: catch: "missing" - query_ruleset.get: + query_rules.get_ruleset: ruleset_id: test-query-ruleset-to-delete --- "Delete Query Ruleset - Ruleset does not exist": - do: catch: "missing" - query_ruleset.delete: + query_rules.delete_ruleset: ruleset_id: test-nonexistent-query-ruleset --- @@ -46,7 +46,7 @@ setup: - do: catch: forbidden headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user - query_ruleset.delete: + query_rules.delete_ruleset: ruleset_id: test-query-ruleset-to-delete - match: { error.type: 'security_exception' } diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/40_rule_query_search.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/40_rule_query_search.yml index 5cf0932f2fae2..078e24d86f1c8 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/40_rule_query_search.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/40_rule_query_search.yml @@ -40,7 +40,7 @@ setup: - { "text": "observability" } - do: - query_ruleset.put: + query_rules.put_ruleset: ruleset_id: test-ruleset body: rules: @@ -84,7 +84,7 @@ setup: - 'doc7' - do: - query_ruleset.put: + query_rules.put_ruleset: ruleset_id: another-test-ruleset body: rules: @@ -101,17 +101,17 @@ setup: --- teardown: - do: - query_ruleset.delete: + query_rules.delete_ruleset: ruleset_id: test-ruleset ignore: 404 - do: - query_ruleset.delete: + query_rules.delete_ruleset: ruleset_id: another-test-ruleset ignore: 404 - do: - query_ruleset.delete: + query_rules.delete_ruleset: ruleset_id: combined-ruleset ignore: 404 @@ -314,7 +314,7 @@ teardown: "Perform a rule query over a ruleset with combined numeric and text rule matching": - do: - query_ruleset.put: + query_rules.put_ruleset: ruleset_id: combined-ruleset body: rules: @@ -426,7 +426,7 @@ teardown: wait_for: started - do: - query_ruleset.put: + query_rules.put_ruleset: ruleset_id: combined-ruleset body: rules: diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/50_query_rule_put.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/50_query_rule_put.yml index a89cf7a24c2fa..fb3d7be9d2367 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/50_query_rule_put.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/50_query_rule_put.yml @@ -7,17 +7,17 @@ setup: --- teardown: - do: - query_ruleset.delete: + query_rules.delete_ruleset: ruleset_id: test-ruleset ignore: 404 - do: - query_ruleset.delete: + query_rules.delete_ruleset: ruleset_id: test-query-rule-recreating ignore: 404 - do: - query_ruleset.delete: + query_rules.delete_ruleset: ruleset_id: forbidden-query-ruleset ignore: 404 @@ -25,7 +25,7 @@ teardown: --- 'Create query rule with nonexistant ruleset that is also created': - do: - query_rule.put: + query_rules.put_rule: ruleset_id: new-ruleset rule_id: query-rule-id body: @@ -43,7 +43,7 @@ teardown: - match: { result: 'created' } - do: - query_rule.get: + query_rules.get_rule: ruleset_id: new-ruleset rule_id: query-rule-id @@ -55,7 +55,7 @@ teardown: # Update the same rule in place - do: - query_rule.put: + query_rules.put_rule: ruleset_id: new-ruleset rule_id: query-rule-id body: @@ -72,7 +72,7 @@ teardown: - match: { result: 'updated' } - do: - query_rule.get: + query_rules.get_rule: ruleset_id: new-ruleset rule_id: query-rule-id @@ -86,7 +86,7 @@ teardown: 'Create query rule with existing ruleset respecting priority order': # Start with 2 rules, one that specifies priority and one that does not (should go at the end) - do: - query_ruleset.put: + query_rules.put_ruleset: ruleset_id: test-ruleset body: rules: @@ -115,7 +115,7 @@ teardown: - match: { result: 'created' } - do: - query_ruleset.get: + query_rules.get_ruleset: ruleset_id: test-ruleset - match: { ruleset_id: test-ruleset } @@ -145,7 +145,7 @@ teardown: # Next, add a rule with a priority 2 - this should go in the middle - do: - query_rule.put: + query_rules.put_rule: ruleset_id: test-ruleset rule_id: query-rule-id3 body: @@ -162,7 +162,7 @@ teardown: - match: { result: 'created' } - do: - query_ruleset.get: + query_rules.get_ruleset: ruleset_id: test-ruleset - match: { ruleset_id: test-ruleset } @@ -202,7 +202,7 @@ teardown: # Finally, add another single rule with no priority. This should be appended to the ruleset. - do: - query_rule.put: + query_rules.put_rule: ruleset_id: test-ruleset rule_id: query-rule-id4 body: @@ -218,7 +218,7 @@ teardown: - match: { result: 'created' } - do: - query_ruleset.get: + query_rules.get_ruleset: ruleset_id: test-ruleset - match: { ruleset_id: test-ruleset } @@ -269,7 +269,7 @@ teardown: --- 'Create Query Rule - Resource already exists': - do: - query_rule.put: + query_rules.put_rule: ruleset_id: test-query-rule-recreating rule_id: abc body: @@ -286,7 +286,7 @@ teardown: - match: { result: 'created' } - do: - query_rule.put: + query_rules.put_rule: ruleset_id: test-query-rule-recreating rule_id: abc body: @@ -310,7 +310,7 @@ teardown: - do: catch: forbidden headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user - query_rule.put: + query_rules.put_rule: ruleset_id: forbidden-query-ruleset rule_id: abc body: diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/5_query_rulesets_before_setup.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/5_query_rulesets_before_setup.yml index cbe4f98370300..e13c23b6d3c35 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/5_query_rulesets_before_setup.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/5_query_rulesets_before_setup.yml @@ -1,26 +1,26 @@ setup: - - requires: - cluster_features: ["gte_v8.10.0"] - reason: Introduced in 8.10.0 + - requires: + cluster_features: [ "gte_v8.10.0" ] + reason: Introduced in 8.10.0 --- "Get query ruleset returns a 404 when no query rulesets exist": - do: catch: /resource_not_found_exception/ - query_ruleset.get: + query_rules.get_ruleset: ruleset_id: test-i-dont-exist --- "Delete query ruleset returns a 404 when no query rulesets exist": - do: catch: /resource_not_found_exception/ - query_ruleset.delete: - ruleset_id: test-i-dont-exist + query_rules.delete_ruleset: + ruleset_id: test-i-dont-exist --- "List query rulesets returns an empty list when no query rulesets exist": - do: - query_ruleset.list: { } + query_rules.list_rulesets: { } - match: { count: 0 } diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/60_query_rule_delete.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/60_query_rule_delete.yml index 63862ba666f41..033cab68e2bea 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/60_query_rule_delete.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/rules/60_query_rule_delete.yml @@ -3,7 +3,7 @@ setup: cluster_features: [ "gte_v8.15.0" ] reason: Introduced in 8.15.0 - do: - query_ruleset.put: + query_rules.put_ruleset: ruleset_id: test-query-ruleset body: rules: @@ -28,7 +28,7 @@ setup: - 'id3' - 'id4' - do: - query_ruleset.put: + query_rules.put_ruleset: ruleset_id: test-query-ruleset-to-delete body: rules: @@ -55,19 +55,19 @@ setup: --- teardown: - do: - query_ruleset.delete: + query_rules.delete_ruleset: ruleset_id: test-query-ruleset ignore: 404 - do: - query_ruleset.delete: + query_rules.delete_ruleset: ruleset_id: test-query-ruleset-to-delete ignore: 404 --- "Delete Query Rule, ruleset still exists": - do: - query_rule.delete: + query_rules.delete_rule: ruleset_id: test-query-ruleset rule_id: query-rule-id1 @@ -75,12 +75,12 @@ teardown: - do: catch: "missing" - query_rule.get: + query_rules.get_rule: ruleset_id: test-query-ruleset rule_id: query-rule-id1 - do: - query_ruleset.get: + query_rules.get_ruleset: ruleset_id: test-query-ruleset - match: { rules.0.rule_id: query-rule-id2 } @@ -88,7 +88,7 @@ teardown: --- "Delete Query Rule, ruleset is also deleted as it is now empty": - do: - query_rule.delete: + query_rules.delete_rule: ruleset_id: test-query-ruleset-to-delete rule_id: query-rule-id1 @@ -96,18 +96,18 @@ teardown: - do: catch: "missing" - query_rule.get: + query_rules.get_rule: ruleset_id: test-query-ruleset-to-delete rule_id: query-rule-id1 - do: - query_ruleset.get: + query_rules.get_ruleset: ruleset_id: test-query-ruleset-to-delete - match: { rules.0.rule_id: query-rule-id2 } - do: - query_rule.delete: + query_rules.delete_rule: ruleset_id: test-query-ruleset-to-delete rule_id: query-rule-id2 @@ -115,20 +115,20 @@ teardown: - do: catch: "missing" - query_rule.get: + query_rules.get_rule: ruleset_id: test-query-ruleset-to-delete rule_id: query-rule-id2 - do: catch: "missing" - query_ruleset.get: + query_rules.get_ruleset: ruleset_id: test-query-ruleset-to-delete --- "Delete Query Rule - Rule does not exist": - do: catch: "missing" - query_rule.delete: + query_rules.delete_rule: ruleset_id: test-query-ruleset rule_id: nonexistent-rule @@ -136,7 +136,7 @@ teardown: "Delete Query Rule - Ruleset does not exist": - do: catch: "missing" - query_rule.delete: + query_rules.delete_rule: ruleset_id: nonexistent-query-ruleset rule_id: nonexistent-rule @@ -148,7 +148,7 @@ teardown: - do: catch: forbidden headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user - query_rule.delete: + query_rules.delete_rule: ruleset_id: test-query-ruleset rule_id: query-rule-id1 From 99ded38604303638b606dd4266206b510f2fe437 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Fri, 28 Jun 2024 07:33:45 -0500 Subject: [PATCH 038/216] Removing check for index existence from IndicesService::withTempIndexService (#110247) --- .../metadata/MetadataCreateIndexService.java | 2 ++ .../elasticsearch/indices/IndicesService.java | 3 -- .../indices/IndicesServiceTests.java | 31 ++++++++++++++----- 3 files changed, 26 insertions(+), 10 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java index 948b1dcb9b752..b5ee0ebd7e387 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java @@ -471,6 +471,8 @@ private ClusterState applyCreateIndexWithTemporaryService( final ActionListener rerouteListener ) throws Exception { // create the index here (on the master) to validate it can be created, as well as adding the mapping + assert indicesService.hasIndex(temporaryIndexMeta.getIndex()) == false + : Strings.format("Index [%s] already exists", temporaryIndexMeta.getIndex().getName()); return indicesService.withTempIndexService(temporaryIndexMeta, indexService -> { try { updateIndexMappingsAndBuildSortOrder(indexService, request, mappings, sourceMetadata); diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 199bbc54fa3d6..0d81d24e64646 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -683,9 +683,6 @@ public T withTempIndexService( CheckedFunction indexServiceConsumer ) throws IOException, E { final Index index = indexMetadata.getIndex(); - if (hasIndex(index)) { - throw new ResourceAlreadyExistsException(index); - } List finalListeners = List.of( // double check that shard is not created. new IndexEventListener() { diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java index d5359d4510436..088caa5ef6589 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java @@ -40,6 +40,7 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.SlowLogFieldProvider; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineConfig; @@ -79,6 +80,7 @@ import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createBackingIndex; +import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_INDEX_UUID; import static org.elasticsearch.cluster.metadata.IndexNameExpressionResolverTests.indexBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; @@ -416,7 +418,7 @@ public void testVerifyIfIndexContentDeleted() throws Exception { final ClusterService clusterService = getInstanceFromNode(ClusterService.class); final Settings idxSettings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) - .put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID()) + .put(SETTING_INDEX_UUID, index.getUUID()) .build(); final IndexMetadata indexMetadata = new IndexMetadata.Builder(index.getName()).settings(idxSettings) .numberOfShards(1) @@ -454,7 +456,7 @@ public void testDanglingIndicesWithAliasConflict() throws Exception { final LocalAllocateDangledIndices dangling = getInstanceFromNode(LocalAllocateDangledIndices.class); final Settings idxSettings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) - .put(IndexMetadata.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) + .put(SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) .build(); final IndexMetadata indexMetadata = new IndexMetadata.Builder(alias).settings(idxSettings) .numberOfShards(1) @@ -485,7 +487,7 @@ public void testDanglingIndicesWithLaterVersion() throws Exception { final LocalAllocateDangledIndices dangling = getInstanceFromNode(LocalAllocateDangledIndices.class); final Settings idxSettingsLater = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.fromId(IndexVersion.current().id() + 10000)) - .put(IndexMetadata.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) + .put(SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) .build(); final IndexMetadata indexMetadataLater = new IndexMetadata.Builder(indexNameLater).settings(idxSettingsLater) .numberOfShards(1) @@ -513,7 +515,7 @@ public void testIndexAndTombstoneWithSameNameOnStartup() throws Exception { final IndicesService indicesService = getIndicesService(); final Settings idxSettings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) - .put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID()) + .put(SETTING_INDEX_UUID, index.getUUID()) .build(); final IndexMetadata indexMetadata = new IndexMetadata.Builder(index.getName()).settings(idxSettings) .numberOfShards(1) @@ -537,7 +539,7 @@ public void testStandAloneMapperServiceWithPlugins() throws IOException { final IndicesService indicesService = getIndicesService(); final Settings idxSettings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) - .put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID()) + .put(SETTING_INDEX_UUID, index.getUUID()) .put(IndexModule.SIMILARITY_SETTINGS_PREFIX + ".test.type", "fake-similarity") .build(); final IndexMetadata indexMetadata = new IndexMetadata.Builder(index.getName()).settings(idxSettings) @@ -615,7 +617,7 @@ public void testGetEngineFactory() throws IOException { final Index index = new Index(indexName, UUIDs.randomBase64UUID()); final Settings.Builder builder = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) - .put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID()); + .put(SETTING_INDEX_UUID, index.getUUID()); if (value != null) { builder.put(FooEnginePlugin.FOO_INDEX_SETTING.getKey(), value); } @@ -638,7 +640,7 @@ public void testConflictingEngineFactories() { final Index index = new Index(indexName, UUIDs.randomBase64UUID()); final Settings settings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) - .put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID()) + .put(SETTING_INDEX_UUID, index.getUUID()) .put(FooEnginePlugin.FOO_INDEX_SETTING.getKey(), true) .put(BarEnginePlugin.BAR_INDEX_SETTING.getKey(), true) .build(); @@ -828,4 +830,19 @@ public void testLoadSlowLogFieldProvider() { assertEquals(Map.of(), fieldProvider.searchSlowLogFields()); assertEquals(Map.of(), fieldProvider.indexSlowLogFields()); } + + public void testWithTempIndexServiceHandlesExistingIndex() throws Exception { + // This test makes sure that we can run withTempIndexService even if the index already exists + IndicesService indicesService = getIndicesService(); + IndexMetadata indexMetadata = new IndexMetadata.Builder("test").settings( + indexSettings(randomIntBetween(1, 5), randomIntBetween(0, 5)).put("index.version.created", IndexVersions.V_8_10_0) + .put(SETTING_INDEX_UUID, randomUUID()) + ).build(); + IndexService createdIndexService = indicesService.createIndex(indexMetadata, List.of(), true); + indicesService.withTempIndexService(indexMetadata, indexService -> { + assertNotEquals(createdIndexService, indexService); + assertEquals(createdIndexService.index(), indexService.index()); + return null; + }); + } } From 37af666508b763b3bebecc6ac8ecafedf17e3bd8 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Fri, 28 Jun 2024 14:51:51 +0200 Subject: [PATCH 039/216] Fix configuration cache incompatibility in ESQL build script (#110259) --- x-pack/plugin/esql/build.gradle | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index da8cb397df035..efe274512c886 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -254,8 +254,8 @@ tasks.register("regen") { tasks.named("spotlessJava") { dependsOn stringTemplates } tasks.named('checkstyleMain').configure { excludes = [ "**/*.java.st" ] - exclude { it.file.toString().startsWith("${projectDir}/src/main/generated-src/generated") } - exclude { it.file.toString().startsWith("${projectDir}/src/main/generated") } + exclude { it.file.toString().contains("src/main/generated-src/generated") } + exclude { it.file.toString().contains("src/main/generated") } } def prop(Type, type, TYPE, BYTES, Array) { From f14c4a007e612890e42dc8c0e04cfdff65535808 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Fri, 28 Jun 2024 14:58:00 +0200 Subject: [PATCH 040/216] [Inference API] Remove validateParameters in InternalServiceSettings (#110230) --- .../CustomElandInternalServiceSettings.java | 18 +++++++++------ .../ElasticsearchInternalServiceSettings.java | 12 ++++------ ...lingualE5SmallInternalServiceSettings.java | 14 +++++++---- .../elser/ElserInternalServiceSettings.java | 23 ++++++++++++------- .../settings/InternalServiceSettings.java | 23 ------------------- 5 files changed, 41 insertions(+), 49 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java index cc52419f3f2d7..6c81cc9948b70 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java @@ -14,11 +14,13 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.inference.services.ServiceUtils; import java.io.IOException; import java.util.Map; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredPositiveInteger; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; + public class CustomElandInternalServiceSettings extends ElasticsearchInternalServiceSettings { public static final String NAME = "custom_eland_model_internal_service_settings"; @@ -39,14 +41,16 @@ public CustomElandInternalServiceSettings(int numAllocations, int numThreads, St * @return The {@code CustomElandServiceSettings} builder */ public static CustomElandInternalServiceSettings fromMap(Map map) { - ValidationException validationException = new ValidationException(); - Integer numAllocations = ServiceUtils.removeAsType(map, NUM_ALLOCATIONS, Integer.class); - Integer numThreads = ServiceUtils.removeAsType(map, NUM_THREADS, Integer.class); - - validateParameters(numAllocations, validationException, numThreads); - String modelId = ServiceUtils.extractRequiredString(map, MODEL_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); + Integer numAllocations = extractRequiredPositiveInteger( + map, + NUM_ALLOCATIONS, + ModelConfigurations.SERVICE_SETTINGS, + validationException + ); + Integer numThreads = extractRequiredPositiveInteger(map, NUM_THREADS, ModelConfigurations.SERVICE_SETTINGS, validationException); + String modelId = extractRequiredString(map, MODEL_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); if (validationException.validationErrors().isEmpty() == false) { throw validationException; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java index c2cf17fcc19f1..45d616074dded 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java @@ -12,14 +12,14 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.inference.ModelConfigurations; -import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings; import java.io.IOException; import java.util.Map; import java.util.Objects; -import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalPositiveInteger; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredPositiveInteger; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; public class ElasticsearchInternalServiceSettings extends InternalServiceSettings { @@ -27,16 +27,14 @@ public class ElasticsearchInternalServiceSettings extends InternalServiceSetting private static final int FAILED_INT_PARSE_VALUE = -1; public static ElasticsearchInternalServiceSettings fromMap(Map map, ValidationException validationException) { - Integer numAllocations = extractOptionalPositiveInteger( + Integer numAllocations = extractRequiredPositiveInteger( map, NUM_ALLOCATIONS, ModelConfigurations.SERVICE_SETTINGS, validationException ); - Integer numThreads = extractOptionalPositiveInteger(map, NUM_THREADS, ModelConfigurations.SERVICE_SETTINGS, validationException); - validateParameters(numAllocations, validationException, numThreads); - - String modelId = ServiceUtils.extractRequiredString(map, MODEL_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); + Integer numThreads = extractRequiredPositiveInteger(map, NUM_THREADS, ModelConfigurations.SERVICE_SETTINGS, validationException); + String modelId = extractRequiredString(map, MODEL_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); // if an error occurred while parsing, we'll set these to an invalid value so we don't accidentally get a // null pointer when doing unboxing diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java index d514ca6a917d4..602f3a5c6c4e8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; +import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings; @@ -20,6 +21,8 @@ import java.util.Arrays; import java.util.Map; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredPositiveInteger; + public class MultilingualE5SmallInternalServiceSettings extends ElasticsearchInternalServiceSettings { public static final String NAME = "multilingual_e5_small_service_settings"; @@ -56,10 +59,13 @@ public static MultilingualE5SmallInternalServiceSettings.Builder fromMap(Map map, ValidationException validationException) { - Integer numAllocations = ServiceUtils.removeAsType(map, NUM_ALLOCATIONS, Integer.class); - Integer numThreads = ServiceUtils.removeAsType(map, NUM_THREADS, Integer.class); - - validateParameters(numAllocations, validationException, numThreads); + Integer numAllocations = extractRequiredPositiveInteger( + map, + NUM_ALLOCATIONS, + ModelConfigurations.SERVICE_SETTINGS, + validationException + ); + Integer numThreads = extractRequiredPositiveInteger(map, NUM_THREADS, ModelConfigurations.SERVICE_SETTINGS, validationException); String modelId = ServiceUtils.removeAsType(map, MODEL_ID, String.class); if (modelId != null) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceSettings.java index 21fef51eed077..603c218d4dd21 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceSettings.java @@ -12,13 +12,16 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings; import java.io.IOException; import java.util.Map; import java.util.Objects; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredPositiveInteger; + public class ElserInternalServiceSettings extends InternalServiceSettings { public static final String NAME = "elser_mlnode_service_settings"; @@ -34,14 +37,18 @@ public class ElserInternalServiceSettings extends InternalServiceSettings { */ public static ElserInternalServiceSettings.Builder fromMap(Map map) { ValidationException validationException = new ValidationException(); - Integer numAllocations = ServiceUtils.removeAsType(map, NUM_ALLOCATIONS, Integer.class); - Integer numThreads = ServiceUtils.removeAsType(map, NUM_THREADS, Integer.class); - validateParameters(numAllocations, validationException, numThreads); + Integer numAllocations = extractRequiredPositiveInteger( + map, + NUM_ALLOCATIONS, + ModelConfigurations.SERVICE_SETTINGS, + validationException + ); + Integer numThreads = extractRequiredPositiveInteger(map, NUM_THREADS, ModelConfigurations.SERVICE_SETTINGS, validationException); + String modelId = extractOptionalString(map, MODEL_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); - String model_id = ServiceUtils.removeAsType(map, MODEL_ID, String.class); - if (model_id != null && ElserInternalService.VALID_ELSER_MODEL_IDS.contains(model_id) == false) { - validationException.addValidationError("unknown ELSER model id [" + model_id + "]"); + if (modelId != null && ElserInternalService.VALID_ELSER_MODEL_IDS.contains(modelId) == false) { + validationException.addValidationError("unknown ELSER model id [" + modelId + "]"); } if (validationException.validationErrors().isEmpty() == false) { @@ -56,7 +63,7 @@ public ElserInternalServiceSettings build() { }; builder.setNumAllocations(numAllocations); builder.setNumThreads(numThreads); - builder.setModelId(model_id); + builder.setModelId(modelId); return builder; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/InternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/InternalServiceSettings.java index ad9ccd17a95c4..00bb48ae2302a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/InternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/InternalServiceSettings.java @@ -7,13 +7,10 @@ package org.elasticsearch.xpack.inference.services.settings; -import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.inference.services.ServiceUtils; import java.io.IOException; import java.util.Objects; @@ -34,26 +31,6 @@ public InternalServiceSettings(int numAllocations, int numThreads, String modelI this.modelId = modelId; } - protected static void validateParameters(Integer numAllocations, ValidationException validationException, Integer numThreads) { - if (numAllocations == null) { - validationException.addValidationError( - ServiceUtils.missingSettingErrorMsg(NUM_ALLOCATIONS, ModelConfigurations.SERVICE_SETTINGS) - ); - } else if (numAllocations < 1) { - validationException.addValidationError( - ServiceUtils.mustBeAPositiveIntegerErrorMessage(NUM_ALLOCATIONS, ModelConfigurations.SERVICE_SETTINGS, numAllocations) - ); - } - - if (numThreads == null) { - validationException.addValidationError(ServiceUtils.missingSettingErrorMsg(NUM_THREADS, ModelConfigurations.SERVICE_SETTINGS)); - } else if (numThreads < 1) { - validationException.addValidationError( - ServiceUtils.mustBeAPositiveIntegerErrorMessage(NUM_THREADS, ModelConfigurations.SERVICE_SETTINGS, numThreads) - ); - } - } - public int getNumAllocations() { return numAllocations; } From 8eb0745d3d67bb348e75f884327d75a3148048bf Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Fri, 28 Jun 2024 15:32:27 +0200 Subject: [PATCH 041/216] [Gradle] Fix build scan configuration Cache incompatibility (#110260) --- .../src/main/groovy/elasticsearch.build-scan.gradle | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle index c6930c2263ec3..7cba4730e88da 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle @@ -26,13 +26,10 @@ develocity { if (jenkinsUrl?.host?.endsWith('elastic.co') || jenkinsUrl?.host?.endsWith('elastic.dev') || System.getenv('BUILDKITE') == 'true') { publishing.onlyIf { true } server = 'https://gradle-enterprise.elastic.co' - } else { - publishing.onlyIf { - server.isPresent(); - } + } else if( server.isPresent() == false) { + publishing.onlyIf { false } } - background { tag OS.current().name() tag Architecture.current().name() From 9938c4adfdcd280f1baaf2810048a46e95956e70 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Fri, 28 Jun 2024 15:51:27 +0200 Subject: [PATCH 042/216] [Inference API] Move Google Vertex AI task settings tests to correct package (#110262) --- .../GoogleVertexAiEmbeddingsRequestTaskSettingsTests.java | 4 +--- .../GoogleVertexAiEmbeddingsTaskSettingsTests.java | 4 +--- 2 files changed, 2 insertions(+), 6 deletions(-) rename x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/{external/response/googlevertexai => services/googlevertexai/embeddings}/GoogleVertexAiEmbeddingsRequestTaskSettingsTests.java (82%) rename x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/{external/response/googlevertexai => services/googlevertexai/embeddings}/GoogleVertexAiEmbeddingsTaskSettingsTests.java (94%) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiEmbeddingsRequestTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsRequestTaskSettingsTests.java similarity index 82% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiEmbeddingsRequestTaskSettingsTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsRequestTaskSettingsTests.java index 87edbddb257a0..1e9a2f435cb08 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiEmbeddingsRequestTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsRequestTaskSettingsTests.java @@ -5,11 +5,9 @@ * 2.0. */ -package org.elasticsearch.xpack.inference.external.response.googlevertexai; +package org.elasticsearch.xpack.inference.services.googlevertexai.embeddings; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.inference.services.googlevertexai.embeddings.GoogleVertexAiEmbeddingsRequestTaskSettings; -import org.elasticsearch.xpack.inference.services.googlevertexai.embeddings.GoogleVertexAiEmbeddingsTaskSettings; import java.util.HashMap; import java.util.Map; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiEmbeddingsTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsTaskSettingsTests.java similarity index 94% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiEmbeddingsTaskSettingsTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsTaskSettingsTests.java index 23e4e836ff510..364d8090786df 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiEmbeddingsTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsTaskSettingsTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.inference.external.response.googlevertexai; +package org.elasticsearch.xpack.inference.services.googlevertexai.embeddings; import org.elasticsearch.TransportVersion; import org.elasticsearch.common.Strings; @@ -16,8 +16,6 @@ import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; -import org.elasticsearch.xpack.inference.services.googlevertexai.embeddings.GoogleVertexAiEmbeddingsRequestTaskSettings; -import org.elasticsearch.xpack.inference.services.googlevertexai.embeddings.GoogleVertexAiEmbeddingsTaskSettings; import java.io.IOException; import java.util.HashMap; From 405e39660b0f86d12ea71b02288f735a2b7cb751 Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Fri, 28 Jun 2024 09:59:28 -0400 Subject: [PATCH 043/216] Support k parameter for knn query (#110233) Introduce an optional k param for knn query If k is not set, knn query has the previous behaviour: - `num_candidates` docs is collected from each shard. This `num_candidates` docs are used for combining with results with other queries and aggregations on each shard. - docs from all shards are merged to produce the top global `size` results If k is set, the behaviour instead is following: - `k` docs is collected from each shard. This `k` docs are used for combining results with other queries and aggregations on each shard. - similarly, docs from all shards are merged to produce the top global `size` results. Having `k` param makes it more intuitive for users to address their needs. They also don't need to care and can skip `num_candidates` param for this query as it is of more internal details to tune how knn search operates. Closes #108473 --- docs/changelog/110233.yaml | 6 + docs/reference/query-dsl/knn-query.asciidoc | 37 +-- docs/reference/rest-api/common-parms.asciidoc | 2 +- .../percolator/PercolatorQuerySearchIT.java | 2 +- .../140_knn_query_with_other_queries.yml | 9 - .../190_knn_query-with-k-param.yml | 262 ++++++++++++++++++ server/src/main/java/module-info.java | 1 + .../org/elasticsearch/TransportVersions.java | 1 + .../vectors/DenseVectorFieldMapper.java | 33 ++- .../elasticsearch/search/SearchFeatures.java | 22 ++ ...iversifyingChildrenByteKnnVectorQuery.java | 15 +- ...versifyingChildrenFloatKnnVectorQuery.java | 15 +- .../search/vectors/ESKnnByteVectorQuery.java | 10 +- .../search/vectors/ESKnnFloatVectorQuery.java | 9 +- .../search/vectors/KnnSearchBuilder.java | 2 +- .../vectors/KnnSearchRequestParser.java | 2 +- .../search/vectors/KnnVectorQueryBuilder.java | 78 ++++-- ...lasticsearch.features.FeatureSpecification | 1 + .../search/KnnSearchSingleNodeTests.java | 2 +- .../vectors/DenseVectorFieldMapperTests.java | 20 +- .../vectors/DenseVectorFieldTypeTests.java | 22 +- .../index/query/NestedQueryBuilderTests.java | 1 + ...AbstractKnnVectorQueryBuilderTestCase.java | 66 ++++- .../KnnByteVectorQueryBuilderTests.java | 4 +- .../KnnFloatVectorQueryBuilderTests.java | 4 +- .../search/vectors/KnnSearchBuilderTests.java | 3 +- .../mapper/SemanticTextFieldMapper.java | 2 +- .../DocumentLevelSecurityTests.java | 2 +- .../integration/FieldLevelSecurityTests.java | 6 +- 29 files changed, 523 insertions(+), 116 deletions(-) create mode 100644 docs/changelog/110233.yaml create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/190_knn_query-with-k-param.yml create mode 100644 server/src/main/java/org/elasticsearch/search/SearchFeatures.java diff --git a/docs/changelog/110233.yaml b/docs/changelog/110233.yaml new file mode 100644 index 0000000000000..d9ce4057090a4 --- /dev/null +++ b/docs/changelog/110233.yaml @@ -0,0 +1,6 @@ +pr: 110233 +summary: Support k parameter for knn query +area: Vector Search +type: enhancement +issues: + - 108473 diff --git a/docs/reference/query-dsl/knn-query.asciidoc b/docs/reference/query-dsl/knn-query.asciidoc index b7ded6929ed21..05a00b9949912 100644 --- a/docs/reference/query-dsl/knn-query.asciidoc +++ b/docs/reference/query-dsl/knn-query.asciidoc @@ -50,7 +50,8 @@ POST my-image-index/_bulk?refresh=true ---- //TEST[continued] -. Run the search using the `knn` query, asking for the top 3 nearest vectors. +. Run the search using the `knn` query, asking for the top 10 nearest vectors +from each shard, and then combine shard results to get the top 3 global results. + [source,console] ---- @@ -61,18 +62,13 @@ POST my-image-index/_search "knn": { "field": "image-vector", "query_vector": [-5, 9, -12], - "num_candidates": 10 + "k": 10 } } } ---- //TEST[continued] -NOTE: `knn` query doesn't have a separate `k` parameter. `k` is defined by -`size` parameter of a search request similar to other queries. `knn` query -collects `num_candidates` results from each shard, then merges them to get -the top `size` results. - [[knn-query-top-level-parameters]] ==== Top-level parameters for `knn` @@ -99,14 +95,21 @@ Either this or `query_vector_builder` must be provided. include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn-query-vector-builder] -- +`k`:: ++ +-- +(Optional, integer) The number of nearest neighbors to return from each shard. +{es} collects `k` results from each shard, then merges them to find the global top results. +This value must be less than or equal to `num_candidates`. Defaults to `num_candidates`. +-- `num_candidates`:: + -- -(Optional, integer) The number of nearest neighbor candidates to consider per shard. -Cannot exceed 10,000. {es} collects `num_candidates` results from each shard, then -merges them to find the top results. Increasing `num_candidates` tends to improve the -accuracy of the final results. Defaults to `Math.min(1.5 * size, 10_000)`. +(Optional, integer) The number of nearest neighbor candidates to consider per shard +while doing knn search. Cannot exceed 10,000. Increasing `num_candidates` tends to +improve the accuracy of the final results. +Defaults to `1.5 * k` if `k` is set, or `1.5 * size` if `k` is not set. -- `filter`:: @@ -160,7 +163,7 @@ Also filters from <> are applied as pre-filters. All other filters found in the Query DSL tree are applied as post-filters. For example, `knn` query finds the top 3 documents with the nearest vectors -(num_candidates=3), which are combined with `term` filter, that is +(k=3), which are combined with `term` filter, that is post-filtered. The final set of documents will contain only a single document that passes the post-filter. @@ -176,7 +179,7 @@ POST my-image-index/_search "knn": { "field": "image-vector", "query_vector": [-5, 9, -12], - "num_candidates": 3 + "k": 3 } }, "filter" : { @@ -217,7 +220,7 @@ POST my-image-index/_search "knn": { "field": "image-vector", "query_vector": [-5, 9, -12], - "num_candidates": 10, + "k": 10, "boost": 2 } } @@ -267,8 +270,8 @@ A sample query can look like below: [[knn-query-aggregations]] ==== Knn query with aggregations -`knn` query calculates aggregations on `num_candidates` from each shard. +`knn` query calculates aggregations on top `k` documents from each shard. Thus, the final results from aggregations contain -`num_candidates * number_of_shards` documents. This is different from +`k * number_of_shards` documents. This is different from the <> where aggregations are -calculated on the global top k nearest documents. +calculated on the global top `k` nearest documents. diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc index e537fc959965a..7c2e42a26b923 100644 --- a/docs/reference/rest-api/common-parms.asciidoc +++ b/docs/reference/rest-api/common-parms.asciidoc @@ -594,7 +594,7 @@ end::knn-filter[] tag::knn-k[] Number of nearest neighbors to return as top hits. This value must be less than -`num_candidates`. Defaults to `size`. +or equal to `num_candidates`. Defaults to `size`. end::knn-k[] tag::knn-num-candidates[] diff --git a/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java index 88a39fe4aebc8..0fe7de9bbb23c 100644 --- a/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java +++ b/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java @@ -1358,7 +1358,7 @@ public void testKnnQueryNotSupportedInPercolator() throws IOException { """); indicesAdmin().prepareCreate("index1").setMapping(mappings).get(); ensureGreen(); - QueryBuilder knnVectorQueryBuilder = new KnnVectorQueryBuilder("my_vector", new float[] { 1, 1, 1, 1, 1 }, 10, null); + QueryBuilder knnVectorQueryBuilder = new KnnVectorQueryBuilder("my_vector", new float[] { 1, 1, 1, 1, 1 }, 10, 10, null); IndexRequestBuilder indexRequestBuilder = prepareIndex("index1").setId("knn_query1") .setSource(jsonBuilder().startObject().field("my_query", knnVectorQueryBuilder).endObject()); diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml index 28ecd8ef59c02..d52a5daf22344 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml @@ -26,15 +26,6 @@ setup: my_name: type: keyword store: true - aliases: - my_alias: - filter: - term: - my_name: v2 - my_alias1: - filter: - term: - my_name: v1 - do: bulk: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/190_knn_query-with-k-param.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/190_knn_query-with-k-param.yml new file mode 100644 index 0000000000000..f6538b573809a --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/190_knn_query-with-k-param.yml @@ -0,0 +1,262 @@ +# test how knn query interact with other queries +setup: + - requires: + cluster_features: "search.vectors.k_param_supported" + reason: 'k param for knn as query is required' + test_runner_features: close_to + + - do: + indices.create: + index: my_index + body: + settings: + number_of_shards: 1 + mappings: + dynamic: false + properties: + my_vector: + type: dense_vector + dims: 4 + index : true + similarity : l2_norm + index_options: + type: hnsw + m: 16 + ef_construction: 200 + my_name: + type: keyword + store: true + + - do: + bulk: + refresh: true + index: my_index + body: + - '{"index": {"_id": "1"}}' + - '{"my_vector": [1, 1, 1, 1], "my_name": "v1"}' + - '{"index": {"_id": "2"}}' + - '{"my_vector": [1, 1, 1, 2], "my_name": "v2"}' + - '{"index": {"_id": "3"}}' + - '{"my_vector": [1, 1, 1, 3], "my_name": "v1"}' + - '{"index": {"_id": "4"}}' + - '{"my_vector": [1, 1, 1, 4], "my_name": "v2"}' + - '{"index": {"_id": "5"}}' + - '{"my_vector": [1, 1, 1, 5], "my_name": "v1"}' + - '{"index": {"_id": "6"}}' + - '{"my_vector": [1, 1, 1, 6], "my_name": "v2"}' + - '{"index": {"_id": "7"}}' + - '{"my_vector": [1, 1, 1, 7], "my_name": "v1"}' + - '{"index": {"_id": "8"}}' + - '{"my_vector": [1, 1, 1, 8], "my_name": "v2"}' + - '{"index": {"_id": "9"}}' + - '{"my_vector": [1, 1, 1, 9], "my_name": "v1"}' + - '{"index": {"_id": "10"}}' + - '{"my_vector": [1, 1, 1, 10], "my_name": "v2"}' + +--- +"Simple knn query with k param": + - do: + search: + index: my_index + body: + query: + knn: + field: my_vector + query_vector: [1, 1, 1, 1] + k: 5 + + - match: { hits.total.value: 5 } # collector sees k docs + - length: {hits.hits: 5} # k docs retrieved + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "3" } + - match: { hits.hits.3._id: "4" } + - match: { hits.hits.4._id: "5" } + + - do: + search: + index: my_index + body: + size: 3 + query: + knn: + field: my_vector + query_vector: [ 1, 1, 1, 1 ] + k: 5 + + - match: { hits.total.value: 5 } # collector sees k docs + - length: { hits.hits: 3 } # size docs retrieved + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "3" } + + - do: + search: + index: my_index + body: + size: 3 + query: + knn: + field: my_vector + query_vector: [ 1, 1, 1, 1 ] + k: 5 + num_candidates: 10 + + - match: { hits.total.value: 5 } # collector sees k docs + - length: { hits.hits: 3 } # size docs retrieved + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "3" } + +--- +"Knn query within the standard retriever": + - do: + search: + index: my_index + body: + retriever: + standard: + filter: + bool: + must: + term: + my_name: "v1" + query: + knn: + field: my_vector + query_vector: [ 1, 1, 1, 1 ] + k: 10 + - match: { hits.total.value: 5 } # docs that pass post-filter + - length: { hits.hits: 5 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "3" } + - match: { hits.hits.2._id: "5" } + - match: { hits.hits.3._id: "7" } + - match: { hits.hits.4._id: "9" } + +--- +"Incorrect k param": + - do: + catch: bad_request + search: + index: my_index + body: + query: + knn: + field: my_vector + query_vector: [ 1, 1, 1, 1 ] + k: 5 + num_candidates: 3 + - match: { status: 400 } + - match: { error.type: "x_content_parse_exception" } + - match: { error.caused_by.type: "illegal_argument_exception" } + - match: { error.caused_by.reason: "[num_candidates] cannot be less than [k]" } + + - do: + catch: bad_request + search: + index: my_index + body: + query: + knn: + field: my_vector + query_vector: [ 1, 1, 1, 1 ] + k: 0 + - match: { status: 400 } + - match: { error.type: "x_content_parse_exception" } + - match: { error.caused_by.type: "illegal_argument_exception" } + - match: { error.caused_by.reason: "[k] must be greater than 0" } + +--- +"Function score query with knn query with k param": + # find top 5 knn docs, then boost docs with name v1 by 10 and docs with name v2 by 100 + - do: + search: + index: my_index + body: + size: 3 + fields: [ my_name ] + query: + function_score: + query: + knn: + field: my_vector + query_vector: [ 1, 1, 1, 1 ] + k : 5 + functions: + - filter: { match: { my_name: v1 } } + weight: 10 + - filter: { match: { my_name: v2 } } + weight: 100 + boost_mode: multiply + + - match: { hits.total.value: 5 } # collector sees k docs + - length: { hits.hits: 3 } + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.0.fields.my_name.0: v2 } + - close_to: { hits.hits.0._score: { value: 50.0, error: 0.001 } } + - match: { hits.hits.1._id: "1" } + - match: { hits.hits.1.fields.my_name.0: v1 } + - close_to: { hits.hits.1._score: { value: 10.0, error: 0.001 } } + - match: { hits.hits.2._id: "4" } + - match: { hits.hits.2.fields.my_name.0: v2 } + - close_to: { hits.hits.2._score: { value: 10.0, error: 0.001 } } + +--- +"dis_max query with knn query": + - do: + search: + index: my_index + body: + size: 10 + fields: [ my_name ] + query: + dis_max: + queries: + - knn: { field: my_vector, query_vector: [ 1, 1, 1, 1 ], k: 5, num_candidates: 10 } + - match: { my_name: v2 } + tie_breaker: 0.8 + + - match: { hits.total.value: 8 } # 5 knn results + extra results from match query + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.0.fields.my_name.0: v2 } + - match: { hits.hits.1._id: "1" } + - match: { hits.hits.1.fields.my_name.0: v1 } + - match: { hits.hits.2._id: "4" } + - match: { hits.hits.2.fields.my_name.0: v2 } + - match: { hits.hits.3._id: "6" } + - match: { hits.hits.3.fields.my_name.0: v2 } + - match: { hits.hits.4._id: "8" } + - match: { hits.hits.4.fields.my_name.0: v2 } + - match: { hits.hits.5._id: "10" } + - match: { hits.hits.5.fields.my_name.0: v2 } + - match: { hits.hits.6._id: "3" } + - match: { hits.hits.6.fields.my_name.0: v1 } + - match: { hits.hits.7._id: "5" } + - match: { hits.hits.7.fields.my_name.0: v1 } + +--- +"Aggregations with collected number of docs depends on k param": + - do: + search: + index: my_index + body: + size: 2 + query: + knn: + field: my_vector + query_vector: [1, 1, 1, 1] + k: 5 # collect 5 results from each shard + aggs: + my_agg: + terms: + field: my_name + order: + _key: asc + + - length: {hits.hits: 2} + - match: {hits.total.value: 5} + - match: {aggregations.my_agg.buckets.0.key: 'v1'} + - match: {aggregations.my_agg.buckets.1.key: 'v2'} + - match: {aggregations.my_agg.buckets.0.doc_count: 3} + - match: {aggregations.my_agg.buckets.1.doc_count: 2} diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index e2810a6f5bf16..aaf8b3d0c8d84 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -431,6 +431,7 @@ org.elasticsearch.indices.IndicesFeatures, org.elasticsearch.action.admin.cluster.allocation.AllocationStatsFeatures, org.elasticsearch.index.mapper.MapperFeatures, + org.elasticsearch.search.SearchFeatures, org.elasticsearch.script.ScriptFeatures, org.elasticsearch.search.retriever.RetrieversFeatures, org.elasticsearch.reservedstate.service.FileSettingsFeatures; diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 0a75ccfbbedf3..9c9d16032ba95 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -204,6 +204,7 @@ static TransportVersion def(int id) { public static final TransportVersion EVENT_INGESTED_RANGE_IN_CLUSTER_STATE = def(8_695_00_0); public static final TransportVersion ESQL_ADD_AGGREGATE_TYPE = def(8_696_00_0); public static final TransportVersion SECURITY_MIGRATIONS_MIGRATION_NEEDED_ADDED = def(8_697_00_0); + public static final TransportVersion K_FOR_KNN_QUERY_ADDED = def(8_698_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 3a50fe6f28a6a..7d2fc9230a72f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -1749,12 +1749,20 @@ && isNotUnitVector(squaredMagnitude)) { return new DenseVectorQuery.Floats(queryVector, name()); } - Query createKnnQuery(float[] queryVector, int numCands, Query filter, Float similarityThreshold, BitSetProducer parentFilter) { - return createKnnQuery(VectorData.fromFloats(queryVector), numCands, filter, similarityThreshold, parentFilter); + Query createKnnQuery( + float[] queryVector, + Integer k, + int numCands, + Query filter, + Float similarityThreshold, + BitSetProducer parentFilter + ) { + return createKnnQuery(VectorData.fromFloats(queryVector), k, numCands, filter, similarityThreshold, parentFilter); } public Query createKnnQuery( VectorData queryVector, + Integer k, int numCands, Query filter, Float similarityThreshold, @@ -1766,14 +1774,15 @@ public Query createKnnQuery( ); } return switch (getElementType()) { - case BYTE -> createKnnByteQuery(queryVector.asByteVector(), numCands, filter, similarityThreshold, parentFilter); - case FLOAT -> createKnnFloatQuery(queryVector.asFloatVector(), numCands, filter, similarityThreshold, parentFilter); - case BIT -> createKnnBitQuery(queryVector.asByteVector(), numCands, filter, similarityThreshold, parentFilter); + case BYTE -> createKnnByteQuery(queryVector.asByteVector(), k, numCands, filter, similarityThreshold, parentFilter); + case FLOAT -> createKnnFloatQuery(queryVector.asFloatVector(), k, numCands, filter, similarityThreshold, parentFilter); + case BIT -> createKnnBitQuery(queryVector.asByteVector(), k, numCands, filter, similarityThreshold, parentFilter); }; } private Query createKnnBitQuery( byte[] queryVector, + Integer k, int numCands, Query filter, Float similarityThreshold, @@ -1781,8 +1790,8 @@ private Query createKnnBitQuery( ) { elementType.checkDimensions(dims, queryVector.length); Query knnQuery = parentFilter != null - ? new ESDiversifyingChildrenByteKnnVectorQuery(name(), queryVector, filter, numCands, parentFilter) - : new ESKnnByteVectorQuery(name(), queryVector, numCands, filter); + ? new ESDiversifyingChildrenByteKnnVectorQuery(name(), queryVector, filter, k, numCands, parentFilter) + : new ESKnnByteVectorQuery(name(), queryVector, k, numCands, filter); if (similarityThreshold != null) { knnQuery = new VectorSimilarityQuery( knnQuery, @@ -1795,6 +1804,7 @@ private Query createKnnBitQuery( private Query createKnnByteQuery( byte[] queryVector, + Integer k, int numCands, Query filter, Float similarityThreshold, @@ -1807,8 +1817,8 @@ private Query createKnnByteQuery( elementType.checkVectorMagnitude(similarity, ElementType.errorByteElementsAppender(queryVector), squaredMagnitude); } Query knnQuery = parentFilter != null - ? new ESDiversifyingChildrenByteKnnVectorQuery(name(), queryVector, filter, numCands, parentFilter) - : new ESKnnByteVectorQuery(name(), queryVector, numCands, filter); + ? new ESDiversifyingChildrenByteKnnVectorQuery(name(), queryVector, filter, k, numCands, parentFilter) + : new ESKnnByteVectorQuery(name(), queryVector, k, numCands, filter); if (similarityThreshold != null) { knnQuery = new VectorSimilarityQuery( knnQuery, @@ -1821,6 +1831,7 @@ private Query createKnnByteQuery( private Query createKnnFloatQuery( float[] queryVector, + Integer k, int numCands, Query filter, Float similarityThreshold, @@ -1842,8 +1853,8 @@ && isNotUnitVector(squaredMagnitude)) { } } Query knnQuery = parentFilter != null - ? new ESDiversifyingChildrenFloatKnnVectorQuery(name(), queryVector, filter, numCands, parentFilter) - : new ESKnnFloatVectorQuery(name(), queryVector, numCands, filter); + ? new ESDiversifyingChildrenFloatKnnVectorQuery(name(), queryVector, filter, k, numCands, parentFilter) + : new ESKnnFloatVectorQuery(name(), queryVector, k, numCands, filter); if (similarityThreshold != null) { knnQuery = new VectorSimilarityQuery( knnQuery, diff --git a/server/src/main/java/org/elasticsearch/search/SearchFeatures.java b/server/src/main/java/org/elasticsearch/search/SearchFeatures.java new file mode 100644 index 0000000000000..9d6abda593272 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/SearchFeatures.java @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search; + +import org.elasticsearch.features.FeatureSpecification; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.search.vectors.KnnVectorQueryBuilder; + +import java.util.Set; + +public final class SearchFeatures implements FeatureSpecification { + @Override + public Set getFeatures() { + return Set.of(KnnVectorQueryBuilder.K_PARAM_SUPPORTED); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenByteKnnVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenByteKnnVectorQuery.java index f5f3ac8e8fe24..bf250a2f35184 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenByteKnnVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenByteKnnVectorQuery.java @@ -15,15 +15,24 @@ import org.elasticsearch.search.profile.query.QueryProfiler; public class ESDiversifyingChildrenByteKnnVectorQuery extends DiversifyingChildrenByteKnnVectorQuery implements ProfilingQuery { + private final Integer kParam; private long vectorOpsCount; - public ESDiversifyingChildrenByteKnnVectorQuery(String field, byte[] query, Query childFilter, int k, BitSetProducer parentsFilter) { - super(field, query, childFilter, k, parentsFilter); + public ESDiversifyingChildrenByteKnnVectorQuery( + String field, + byte[] query, + Query childFilter, + Integer k, + int numCands, + BitSetProducer parentsFilter + ) { + super(field, query, childFilter, numCands, parentsFilter); + this.kParam = k; } @Override protected TopDocs mergeLeafResults(TopDocs[] perLeafResults) { - TopDocs topK = super.mergeLeafResults(perLeafResults); + TopDocs topK = kParam == null ? super.mergeLeafResults(perLeafResults) : TopDocs.merge(kParam, perLeafResults); vectorOpsCount = topK.totalHits.value; return topK; } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenFloatKnnVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenFloatKnnVectorQuery.java index 1ecb7e5afd044..59b8f26902367 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenFloatKnnVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenFloatKnnVectorQuery.java @@ -15,15 +15,24 @@ import org.elasticsearch.search.profile.query.QueryProfiler; public class ESDiversifyingChildrenFloatKnnVectorQuery extends DiversifyingChildrenFloatKnnVectorQuery implements ProfilingQuery { + private final Integer kParam; private long vectorOpsCount; - public ESDiversifyingChildrenFloatKnnVectorQuery(String field, float[] query, Query childFilter, int k, BitSetProducer parentsFilter) { - super(field, query, childFilter, k, parentsFilter); + public ESDiversifyingChildrenFloatKnnVectorQuery( + String field, + float[] query, + Query childFilter, + Integer k, + int numCands, + BitSetProducer parentsFilter + ) { + super(field, query, childFilter, numCands, parentsFilter); + this.kParam = k; } @Override protected TopDocs mergeLeafResults(TopDocs[] perLeafResults) { - TopDocs topK = super.mergeLeafResults(perLeafResults); + TopDocs topK = kParam == null ? super.mergeLeafResults(perLeafResults) : TopDocs.merge(kParam, perLeafResults); vectorOpsCount = topK.totalHits.value; return topK; } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java index 05cf52fd23f24..9808d97ec8253 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java @@ -14,16 +14,18 @@ import org.elasticsearch.search.profile.query.QueryProfiler; public class ESKnnByteVectorQuery extends KnnByteVectorQuery implements ProfilingQuery { - + private final Integer kParam; private long vectorOpsCount; - public ESKnnByteVectorQuery(String field, byte[] target, int k, Query filter) { - super(field, target, k, filter); + public ESKnnByteVectorQuery(String field, byte[] target, Integer k, int numCands, Query filter) { + super(field, target, numCands, filter); + this.kParam = k; } @Override protected TopDocs mergeLeafResults(TopDocs[] perLeafResults) { - TopDocs topK = super.mergeLeafResults(perLeafResults); + // if k param is set, we get only top k results from each shard + TopDocs topK = kParam == null ? super.mergeLeafResults(perLeafResults) : TopDocs.merge(kParam, perLeafResults); vectorOpsCount = topK.totalHits.value; return topK; } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java index e83a90a3c4df8..aad4005eb83ed 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java @@ -14,15 +14,18 @@ import org.elasticsearch.search.profile.query.QueryProfiler; public class ESKnnFloatVectorQuery extends KnnFloatVectorQuery implements ProfilingQuery { + private final Integer kParam; private long vectorOpsCount; - public ESKnnFloatVectorQuery(String field, float[] target, int k, Query filter) { - super(field, target, k, filter); + public ESKnnFloatVectorQuery(String field, float[] target, Integer k, int numCands, Query filter) { + super(field, target, numCands, filter); + this.kParam = k; } @Override protected TopDocs mergeLeafResults(TopDocs[] perLeafResults) { - TopDocs topK = super.mergeLeafResults(perLeafResults); + // if k param is set, we get only top k results from each shard + TopDocs topK = kParam == null ? super.mergeLeafResults(perLeafResults) : TopDocs.merge(kParam, perLeafResults); vectorOpsCount = topK.totalHits.value; return topK; } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnSearchBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnSearchBuilder.java index 3c03d3258ebab..601c55293418d 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/KnnSearchBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnSearchBuilder.java @@ -398,7 +398,7 @@ public KnnVectorQueryBuilder toQueryBuilder() { if (queryVectorBuilder != null) { throw new IllegalArgumentException("missing rewrite"); } - return new KnnVectorQueryBuilder(field, queryVector, numCands, similarity).boost(boost) + return new KnnVectorQueryBuilder(field, queryVector, null, numCands, similarity).boost(boost) .queryName(queryName) .addFilterQueries(filterQueries); } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnSearchRequestParser.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnSearchRequestParser.java index 8e5c24d0398b9..237bb7e832c3e 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/KnnSearchRequestParser.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnSearchRequestParser.java @@ -255,7 +255,7 @@ public KnnVectorQueryBuilder toQueryBuilder() { if (numCands > NUM_CANDS_LIMIT) { throw new IllegalArgumentException("[" + NUM_CANDS_FIELD.getPreferredName() + "] cannot exceed [" + NUM_CANDS_LIMIT + "]"); } - return new KnnVectorQueryBuilder(field, queryVector, numCands, null); + return new KnnVectorQueryBuilder(field, queryVector, null, numCands, null); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java index 0c8dfc9a98330..0f64859e877f4 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.core.Nullable; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NestedObjectMapper; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; @@ -52,11 +53,14 @@ * {@link org.apache.lucene.search.KnnByteVectorQuery}. */ public class KnnVectorQueryBuilder extends AbstractQueryBuilder { + public static final NodeFeature K_PARAM_SUPPORTED = new NodeFeature("search.vectors.k_param_supported"); + public static final String NAME = "knn"; private static final int NUM_CANDS_LIMIT = 10_000; private static final float NUM_CANDS_MULTIPLICATIVE_FACTOR = 1.5f; public static final ParseField FIELD_FIELD = new ParseField("field"); + public static final ParseField K_FIELD = new ParseField("k"); public static final ParseField NUM_CANDS_FIELD = new ParseField("num_candidates"); public static final ParseField QUERY_VECTOR_FIELD = new ParseField("query_vector"); public static final ParseField VECTOR_SIMILARITY_FIELD = new ParseField("similarity"); @@ -69,10 +73,11 @@ public class KnnVectorQueryBuilder extends AbstractQueryBuilder new KnnVectorQueryBuilder( (String) args[0], (VectorData) args[1], - (QueryVectorBuilder) args[4], + (QueryVectorBuilder) args[5], null, (Integer) args[2], - (Float) args[3] + (Integer) args[3], + (Float) args[4] ) ); @@ -84,6 +89,7 @@ public class KnnVectorQueryBuilder extends AbstractQueryBuilder filterQueries = new ArrayList<>(); private final Float vectorSimilarity; private final QueryVectorBuilder queryVectorBuilder; private final Supplier queryVectorSupplier; - public KnnVectorQueryBuilder(String fieldName, float[] queryVector, Integer numCands, Float vectorSimilarity) { - this(fieldName, VectorData.fromFloats(queryVector), null, null, numCands, vectorSimilarity); + public KnnVectorQueryBuilder(String fieldName, float[] queryVector, Integer k, Integer numCands, Float vectorSimilarity) { + this(fieldName, VectorData.fromFloats(queryVector), null, null, k, numCands, vectorSimilarity); } - protected KnnVectorQueryBuilder(String fieldName, QueryVectorBuilder queryVectorBuilder, Integer numCands, Float vectorSimilarity) { - this(fieldName, null, queryVectorBuilder, null, numCands, vectorSimilarity); + protected KnnVectorQueryBuilder( + String fieldName, + QueryVectorBuilder queryVectorBuilder, + Integer k, + Integer numCands, + Float vectorSimilarity + ) { + this(fieldName, null, queryVectorBuilder, null, k, numCands, vectorSimilarity); } - public KnnVectorQueryBuilder(String fieldName, byte[] queryVector, Integer numCands, Float vectorSimilarity) { - this(fieldName, VectorData.fromBytes(queryVector), null, null, numCands, vectorSimilarity); + public KnnVectorQueryBuilder(String fieldName, byte[] queryVector, Integer k, Integer numCands, Float vectorSimilarity) { + this(fieldName, VectorData.fromBytes(queryVector), null, null, k, numCands, vectorSimilarity); } - public KnnVectorQueryBuilder(String fieldName, VectorData queryVector, Integer numCands, Float vectorSimilarity) { - this(fieldName, queryVector, null, null, numCands, vectorSimilarity); + public KnnVectorQueryBuilder(String fieldName, VectorData queryVector, Integer k, Integer numCands, Float vectorSimilarity) { + this(fieldName, queryVector, null, null, k, numCands, vectorSimilarity); } private KnnVectorQueryBuilder( @@ -133,12 +146,21 @@ private KnnVectorQueryBuilder( VectorData queryVector, QueryVectorBuilder queryVectorBuilder, Supplier queryVectorSupplier, + Integer k, Integer numCands, Float vectorSimilarity ) { + if (k != null && k < 1) { + throw new IllegalArgumentException("[" + K_FIELD.getPreferredName() + "] must be greater than 0"); + } if (numCands != null && numCands > NUM_CANDS_LIMIT) { throw new IllegalArgumentException("[" + NUM_CANDS_FIELD.getPreferredName() + "] cannot exceed [" + NUM_CANDS_LIMIT + "]"); } + if (k != null && numCands != null && numCands < k) { + throw new IllegalArgumentException( + "[" + NUM_CANDS_FIELD.getPreferredName() + "] cannot be less than [" + K_FIELD.getPreferredName() + "]" + ); + } if (queryVector == null && queryVectorBuilder == null) { throw new IllegalArgumentException( format( @@ -158,6 +180,7 @@ private KnnVectorQueryBuilder( } this.fieldName = fieldName; this.queryVector = queryVector; + this.k = k; this.numCands = numCands; this.vectorSimilarity = vectorSimilarity; this.queryVectorBuilder = queryVectorBuilder; @@ -167,6 +190,11 @@ private KnnVectorQueryBuilder( public KnnVectorQueryBuilder(StreamInput in) throws IOException { super(in); this.fieldName = in.readString(); + if (in.getTransportVersion().onOrAfter(TransportVersions.K_FOR_KNN_QUERY_ADDED)) { + this.k = in.readOptionalVInt(); + } else { + this.k = null; + } if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { this.numCands = in.readOptionalVInt(); } else { @@ -214,6 +242,10 @@ public Float getVectorSimilarity() { return vectorSimilarity; } + public Integer k() { + return k; + } + public Integer numCands() { return numCands; } @@ -245,6 +277,9 @@ protected void doWriteTo(StreamOutput out) throws IOException { throw new IllegalStateException("missing a rewriteAndFetch?"); } out.writeString(fieldName); + if (out.getTransportVersion().onOrAfter(TransportVersions.K_FOR_KNN_QUERY_ADDED)) { + out.writeOptionalVInt(k); + } if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeOptionalVInt(numCands); } else { @@ -302,6 +337,9 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep if (queryVector != null) { builder.field(QUERY_VECTOR_FIELD.getPreferredName(), queryVector); } + if (k != null) { + builder.field(K_FIELD.getPreferredName(), k); + } if (numCands != null) { builder.field(NUM_CANDS_FIELD.getPreferredName(), numCands); } @@ -335,7 +373,7 @@ protected QueryBuilder doRewrite(QueryRewriteContext ctx) throws IOException { if (queryVectorSupplier.get() == null) { return this; } - return new KnnVectorQueryBuilder(fieldName, queryVectorSupplier.get(), numCands, vectorSimilarity).boost(boost) + return new KnnVectorQueryBuilder(fieldName, queryVectorSupplier.get(), k, numCands, vectorSimilarity).boost(boost) .queryName(queryName) .addFilterQueries(filterQueries); } @@ -357,7 +395,7 @@ protected QueryBuilder doRewrite(QueryRewriteContext ctx) throws IOException { } ll.onResponse(null); }))); - return new KnnVectorQueryBuilder(fieldName, queryVector, queryVectorBuilder, toSet::get, numCands, vectorSimilarity).boost( + return new KnnVectorQueryBuilder(fieldName, queryVector, queryVectorBuilder, toSet::get, k, numCands, vectorSimilarity).boost( boost ).queryName(queryName).addFilterQueries(filterQueries); } @@ -377,7 +415,7 @@ protected QueryBuilder doRewrite(QueryRewriteContext ctx) throws IOException { rewrittenQueries.add(rewrittenQuery); } if (changed) { - return new KnnVectorQueryBuilder(fieldName, queryVector, queryVectorBuilder, queryVectorSupplier, numCands, vectorSimilarity) + return new KnnVectorQueryBuilder(fieldName, queryVector, queryVectorBuilder, queryVectorSupplier, k, numCands, vectorSimilarity) .boost(boost) .queryName(queryName) .addFilterQueries(rewrittenQueries); @@ -388,7 +426,12 @@ protected QueryBuilder doRewrite(QueryRewriteContext ctx) throws IOException { @Override protected Query doToQuery(SearchExecutionContext context) throws IOException { MappedFieldType fieldType = context.getFieldType(fieldName); - int requestSize = context.requestSize() == null || context.requestSize() < 0 ? DEFAULT_SIZE : context.requestSize(); + int requestSize; + if (k != null) { + requestSize = k; + } else { + requestSize = context.requestSize() == null || context.requestSize() < 0 ? DEFAULT_SIZE : context.requestSize(); + } int adjustedNumCands = numCands == null ? Math.round(Math.min(NUM_CANDS_MULTIPLICATIVE_FACTOR * requestSize, NUM_CANDS_LIMIT)) : numCands; @@ -446,20 +489,21 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { // Now join the filterQuery & parentFilter to provide the matching blocks of children filterQuery = new ToChildBlockJoinQuery(filterQuery, parentBitSet); } - return vectorFieldType.createKnnQuery(queryVector, adjustedNumCands, filterQuery, vectorSimilarity, parentBitSet); + return vectorFieldType.createKnnQuery(queryVector, k, adjustedNumCands, filterQuery, vectorSimilarity, parentBitSet); } - return vectorFieldType.createKnnQuery(queryVector, adjustedNumCands, filterQuery, vectorSimilarity, null); + return vectorFieldType.createKnnQuery(queryVector, k, adjustedNumCands, filterQuery, vectorSimilarity, null); } @Override protected int doHashCode() { - return Objects.hash(fieldName, Objects.hashCode(queryVector), numCands, filterQueries, vectorSimilarity, queryVectorBuilder); + return Objects.hash(fieldName, Objects.hashCode(queryVector), k, numCands, filterQueries, vectorSimilarity, queryVectorBuilder); } @Override protected boolean doEquals(KnnVectorQueryBuilder other) { return Objects.equals(fieldName, other.fieldName) && Objects.equals(queryVector, other.queryVector) + && Objects.equals(k, other.k) && Objects.equals(numCands, other.numCands) && Objects.equals(filterQueries, other.filterQueries) && Objects.equals(vectorSimilarity, other.vectorSimilarity) diff --git a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification index 5192ea2b4b108..a9d9c6a5a1938 100644 --- a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification +++ b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification @@ -14,6 +14,7 @@ org.elasticsearch.rest.RestFeatures org.elasticsearch.indices.IndicesFeatures org.elasticsearch.action.admin.cluster.allocation.AllocationStatsFeatures org.elasticsearch.index.mapper.MapperFeatures +org.elasticsearch.search.SearchFeatures org.elasticsearch.search.retriever.RetrieversFeatures org.elasticsearch.script.ScriptFeatures org.elasticsearch.reservedstate.service.FileSettingsFeatures diff --git a/server/src/test/java/org/elasticsearch/action/search/KnnSearchSingleNodeTests.java b/server/src/test/java/org/elasticsearch/action/search/KnnSearchSingleNodeTests.java index 818f74da5853a..c182d93ffafc9 100644 --- a/server/src/test/java/org/elasticsearch/action/search/KnnSearchSingleNodeTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/KnnSearchSingleNodeTests.java @@ -416,7 +416,7 @@ public void testKnnSearchAction() throws IOException { // how the action works (it builds a kNN query under the hood) float[] queryVector = randomVector(); assertResponse( - client().prepareSearch("index1", "index2").setQuery(new KnnVectorQueryBuilder("vector", queryVector, 5, null)).setSize(2), + client().prepareSearch("index1", "index2").setQuery(new KnnVectorQueryBuilder("vector", queryVector, null, 5, null)).setSize(2), response -> { // The total hits is num_cands * num_shards, since the query gathers num_cands hits from each shard assertHitCount(response, 5 * 2); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index 5397e4cd335ff..5aa02994fa6a8 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -1126,7 +1126,7 @@ public void testByteVectorQueryBoundaries() throws IOException { Exception e = expectThrows( IllegalArgumentException.class, - () -> denseVectorFieldType.createKnnQuery(new float[] { 128, 0, 0 }, 3, null, null, null) + () -> denseVectorFieldType.createKnnQuery(new float[] { 128, 0, 0 }, 3, 3, null, null, null) ); assertThat( e.getMessage(), @@ -1135,7 +1135,7 @@ public void testByteVectorQueryBoundaries() throws IOException { e = expectThrows( IllegalArgumentException.class, - () -> denseVectorFieldType.createKnnQuery(new float[] { 0.0f, 0f, -129.0f }, 3, null, null, null) + () -> denseVectorFieldType.createKnnQuery(new float[] { 0.0f, 0f, -129.0f }, 3, 3, null, null, null) ); assertThat( e.getMessage(), @@ -1144,7 +1144,7 @@ public void testByteVectorQueryBoundaries() throws IOException { e = expectThrows( IllegalArgumentException.class, - () -> denseVectorFieldType.createKnnQuery(new float[] { 0.0f, 0.5f, 0.0f }, 3, null, null, null) + () -> denseVectorFieldType.createKnnQuery(new float[] { 0.0f, 0.5f, 0.0f }, 3, 3, null, null, null) ); assertThat( e.getMessage(), @@ -1153,7 +1153,7 @@ public void testByteVectorQueryBoundaries() throws IOException { e = expectThrows( IllegalArgumentException.class, - () -> denseVectorFieldType.createKnnQuery(new float[] { 0, 0.0f, -0.25f }, 3, null, null, null) + () -> denseVectorFieldType.createKnnQuery(new float[] { 0, 0.0f, -0.25f }, 3, 3, null, null, null) ); assertThat( e.getMessage(), @@ -1162,13 +1162,13 @@ public void testByteVectorQueryBoundaries() throws IOException { e = expectThrows( IllegalArgumentException.class, - () -> denseVectorFieldType.createKnnQuery(new float[] { Float.NaN, 0f, 0.0f }, 3, null, null, null) + () -> denseVectorFieldType.createKnnQuery(new float[] { Float.NaN, 0f, 0.0f }, 3, 3, null, null, null) ); assertThat(e.getMessage(), containsString("element_type [byte] vectors do not support NaN values but found [NaN] at dim [0];")); e = expectThrows( IllegalArgumentException.class, - () -> denseVectorFieldType.createKnnQuery(new float[] { Float.POSITIVE_INFINITY, 0f, 0.0f }, 3, null, null, null) + () -> denseVectorFieldType.createKnnQuery(new float[] { Float.POSITIVE_INFINITY, 0f, 0.0f }, 3, 3, null, null, null) ); assertThat( e.getMessage(), @@ -1177,7 +1177,7 @@ public void testByteVectorQueryBoundaries() throws IOException { e = expectThrows( IllegalArgumentException.class, - () -> denseVectorFieldType.createKnnQuery(new float[] { 0, Float.NEGATIVE_INFINITY, 0.0f }, 3, null, null, null) + () -> denseVectorFieldType.createKnnQuery(new float[] { 0, Float.NEGATIVE_INFINITY, 0.0f }, 3, 3, null, null, null) ); assertThat( e.getMessage(), @@ -1203,13 +1203,13 @@ public void testFloatVectorQueryBoundaries() throws IOException { Exception e = expectThrows( IllegalArgumentException.class, - () -> denseVectorFieldType.createKnnQuery(new float[] { Float.NaN, 0f, 0.0f }, 3, null, null, null) + () -> denseVectorFieldType.createKnnQuery(new float[] { Float.NaN, 0f, 0.0f }, 3, 3, null, null, null) ); assertThat(e.getMessage(), containsString("element_type [float] vectors do not support NaN values but found [NaN] at dim [0];")); e = expectThrows( IllegalArgumentException.class, - () -> denseVectorFieldType.createKnnQuery(new float[] { Float.POSITIVE_INFINITY, 0f, 0.0f }, 3, null, null, null) + () -> denseVectorFieldType.createKnnQuery(new float[] { Float.POSITIVE_INFINITY, 0f, 0.0f }, 3, 3, null, null, null) ); assertThat( e.getMessage(), @@ -1218,7 +1218,7 @@ public void testFloatVectorQueryBoundaries() throws IOException { e = expectThrows( IllegalArgumentException.class, - () -> denseVectorFieldType.createKnnQuery(new float[] { 0, Float.NEGATIVE_INFINITY, 0.0f }, 3, null, null, null) + () -> denseVectorFieldType.createKnnQuery(new float[] { 0, Float.NEGATIVE_INFINITY, 0.0f }, 3, 3, null, null, null) ); assertThat( e.getMessage(), diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java index 96917a42cff65..2a4554091dc91 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java @@ -165,7 +165,7 @@ public void testCreateNestedKnnQuery() { for (int i = 0; i < dims; i++) { queryVector[i] = randomFloat(); } - Query query = field.createKnnQuery(queryVector, 10, null, null, producer); + Query query = field.createKnnQuery(queryVector, 10, 10, null, null, producer); assertThat(query, instanceOf(DiversifyingChildrenFloatKnnVectorQuery.class)); } { @@ -186,11 +186,11 @@ public void testCreateNestedKnnQuery() { floatQueryVector[i] = queryVector[i]; } VectorData vectorData = new VectorData(null, queryVector); - Query query = field.createKnnQuery(vectorData, 10, null, null, producer); + Query query = field.createKnnQuery(vectorData, 10, 10, null, null, producer); assertThat(query, instanceOf(DiversifyingChildrenByteKnnVectorQuery.class)); vectorData = new VectorData(floatQueryVector, null); - query = field.createKnnQuery(vectorData, 10, null, null, producer); + query = field.createKnnQuery(vectorData, 10, 10, null, null, producer); assertThat(query, instanceOf(DiversifyingChildrenByteKnnVectorQuery.class)); } } @@ -251,7 +251,7 @@ public void testFloatCreateKnnQuery() { ); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> unindexedField.createKnnQuery(new float[] { 0.3f, 0.1f, 1.0f, 0.0f }, 10, null, null, null) + () -> unindexedField.createKnnQuery(new float[] { 0.3f, 0.1f, 1.0f, 0.0f }, 10, 10, null, null, null) ); assertThat(e.getMessage(), containsString("to perform knn search on field [f], its mapping must have [index] set to [true]")); @@ -267,7 +267,7 @@ public void testFloatCreateKnnQuery() { ); e = expectThrows( IllegalArgumentException.class, - () -> dotProductField.createKnnQuery(new float[] { 0.3f, 0.1f, 1.0f, 0.0f }, 10, null, null, null) + () -> dotProductField.createKnnQuery(new float[] { 0.3f, 0.1f, 1.0f, 0.0f }, 10, 10, null, null, null) ); assertThat(e.getMessage(), containsString("The [dot_product] similarity can only be used with unit-length vectors.")); @@ -283,7 +283,7 @@ public void testFloatCreateKnnQuery() { ); e = expectThrows( IllegalArgumentException.class, - () -> cosineField.createKnnQuery(new float[] { 0.0f, 0.0f, 0.0f, 0.0f }, 10, null, null, null) + () -> cosineField.createKnnQuery(new float[] { 0.0f, 0.0f, 0.0f, 0.0f }, 10, 10, null, null, null) ); assertThat(e.getMessage(), containsString("The [cosine] similarity does not support vectors with zero magnitude.")); } @@ -304,7 +304,7 @@ public void testCreateKnnQueryMaxDims() { for (int i = 0; i < 4096; i++) { queryVector[i] = randomFloat(); } - Query query = fieldWith4096dims.createKnnQuery(queryVector, 10, null, null, null); + Query query = fieldWith4096dims.createKnnQuery(queryVector, 10, 10, null, null, null); assertThat(query, instanceOf(KnnFloatVectorQuery.class)); } @@ -324,7 +324,7 @@ public void testCreateKnnQueryMaxDims() { queryVector[i] = randomByte(); } VectorData vectorData = new VectorData(null, queryVector); - Query query = fieldWith4096dims.createKnnQuery(vectorData, 10, null, null, null); + Query query = fieldWith4096dims.createKnnQuery(vectorData, 10, 10, null, null, null); assertThat(query, instanceOf(KnnByteVectorQuery.class)); } } @@ -342,7 +342,7 @@ public void testByteCreateKnnQuery() { ); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> unindexedField.createKnnQuery(new float[] { 0.3f, 0.1f, 1.0f }, 10, null, null, null) + () -> unindexedField.createKnnQuery(new float[] { 0.3f, 0.1f, 1.0f }, 10, 10, null, null, null) ); assertThat(e.getMessage(), containsString("to perform knn search on field [f], its mapping must have [index] set to [true]")); @@ -358,13 +358,13 @@ public void testByteCreateKnnQuery() { ); e = expectThrows( IllegalArgumentException.class, - () -> cosineField.createKnnQuery(new float[] { 0.0f, 0.0f, 0.0f }, 10, null, null, null) + () -> cosineField.createKnnQuery(new float[] { 0.0f, 0.0f, 0.0f }, 10, 10, null, null, null) ); assertThat(e.getMessage(), containsString("The [cosine] similarity does not support vectors with zero magnitude.")); e = expectThrows( IllegalArgumentException.class, - () -> cosineField.createKnnQuery(new VectorData(null, new byte[] { 0, 0, 0 }), 10, null, null, null) + () -> cosineField.createKnnQuery(new VectorData(null, new byte[] { 0, 0, 0 }), 10, 10, null, null, null) ); assertThat(e.getMessage(), containsString("The [cosine] similarity does not support vectors with zero magnitude.")); } diff --git a/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java index 137e0cb348a9c..b0a891dfbb1f7 100644 --- a/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java @@ -267,6 +267,7 @@ public void testKnnRewriteForInnerHits() throws IOException { KnnVectorQueryBuilder innerQueryBuilder = new KnnVectorQueryBuilder( "nested1." + VECTOR_FIELD, new float[] { 1.0f, 2.0f, 3.0f }, + null, 1, null ); diff --git a/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java b/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java index fdd9b94cb5050..f5d9f35e34695 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java @@ -52,7 +52,7 @@ abstract class AbstractKnnVectorQueryBuilderTestCase extends AbstractQueryTestCa abstract DenseVectorFieldMapper.ElementType elementType(); - abstract KnnVectorQueryBuilder createKnnVectorQueryBuilder(String fieldName, int numCands, Float similarity); + abstract KnnVectorQueryBuilder createKnnVectorQueryBuilder(String fieldName, Integer k, int numCands, Float similarity); @Override protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { @@ -82,8 +82,9 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws @Override protected KnnVectorQueryBuilder doCreateTestQueryBuilder() { String fieldName = randomBoolean() ? VECTOR_FIELD : VECTOR_ALIAS_FIELD; - int numCands = randomIntBetween(DEFAULT_SIZE, 1000); - KnnVectorQueryBuilder queryBuilder = createKnnVectorQueryBuilder(fieldName, numCands, randomBoolean() ? null : randomFloat()); + Integer k = randomBoolean() ? null : randomIntBetween(1, 100); + int numCands = randomIntBetween(k == null ? DEFAULT_SIZE : k + 20, 1000); + KnnVectorQueryBuilder queryBuilder = createKnnVectorQueryBuilder(fieldName, k, numCands, randomFloat()); if (randomBoolean()) { List filters = new ArrayList<>(); @@ -125,12 +126,14 @@ protected void doAssertLuceneQuery(KnnVectorQueryBuilder queryBuilder, Query que case BYTE, BIT -> new ESKnnByteVectorQuery( VECTOR_FIELD, queryBuilder.queryVector().asByteVector(), + queryBuilder.k(), queryBuilder.numCands(), filterQuery ); case FLOAT -> new ESKnnFloatVectorQuery( VECTOR_FIELD, queryBuilder.queryVector().asFloatVector(), + queryBuilder.k(), queryBuilder.numCands(), filterQuery ); @@ -143,7 +146,7 @@ protected void doAssertLuceneQuery(KnnVectorQueryBuilder queryBuilder, Query que public void testWrongDimension() { SearchExecutionContext context = createSearchExecutionContext(); - KnnVectorQueryBuilder query = new KnnVectorQueryBuilder(VECTOR_FIELD, new float[] { 1.0f, 2.0f }, 10, null); + KnnVectorQueryBuilder query = new KnnVectorQueryBuilder(VECTOR_FIELD, new float[] { 1.0f, 2.0f }, 5, 10, null); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> query.doToQuery(context)); assertThat( e.getMessage(), @@ -153,7 +156,7 @@ public void testWrongDimension() { public void testNonexistentField() { SearchExecutionContext context = createSearchExecutionContext(); - KnnVectorQueryBuilder query = new KnnVectorQueryBuilder("nonexistent", new float[] { 1.0f, 1.0f, 1.0f }, 10, null); + KnnVectorQueryBuilder query = new KnnVectorQueryBuilder("nonexistent", new float[] { 1.0f, 1.0f, 1.0f }, 5, 10, null); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> query.doToQuery(context)); assertThat(e.getMessage(), containsString("field [nonexistent] does not exist in the mapping")); } @@ -163,6 +166,7 @@ public void testWrongFieldType() { KnnVectorQueryBuilder query = new KnnVectorQueryBuilder( AbstractBuilderTestCase.KEYWORD_FIELD_NAME, new float[] { 1.0f, 1.0f, 1.0f }, + 5, 10, null ); @@ -170,9 +174,19 @@ public void testWrongFieldType() { assertThat(e.getMessage(), containsString("[knn] queries are only supported on [dense_vector] fields")); } + public void testNumCandsLessThanK() { + int k = 5; + int numCands = 3; + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new KnnVectorQueryBuilder(VECTOR_FIELD, new float[] { 1.0f, 1.0f, 1.0f }, k, numCands, null) + ); + assertThat(e.getMessage(), containsString("[num_candidates] cannot be less than [k]")); + } + @Override public void testValidOutput() { - KnnVectorQueryBuilder query = new KnnVectorQueryBuilder(VECTOR_FIELD, new float[] { 1.0f, 2.0f, 3.0f }, 10, null); + KnnVectorQueryBuilder query = new KnnVectorQueryBuilder(VECTOR_FIELD, new float[] { 1.0f, 2.0f, 3.0f }, null, 10, null); String expected = """ { "knn" : { @@ -186,6 +200,22 @@ public void testValidOutput() { } }"""; assertEquals(expected, query.toString()); + + KnnVectorQueryBuilder query2 = new KnnVectorQueryBuilder(VECTOR_FIELD, new float[] { 1.0f, 2.0f, 3.0f }, 5, 10, null); + String expected2 = """ + { + "knn" : { + "field" : "vector", + "query_vector" : [ + 1.0, + 2.0, + 3.0 + ], + "k" : 5, + "num_candidates" : 10 + } + }"""; + assertEquals(expected2, query2.toString()); } @Override @@ -193,7 +223,13 @@ public void testMustRewrite() throws IOException { SearchExecutionContext context = createSearchExecutionContext(); context.setAllowUnmappedFields(true); TermQueryBuilder termQuery = new TermQueryBuilder("unmapped_field", 42); - KnnVectorQueryBuilder query = new KnnVectorQueryBuilder(VECTOR_FIELD, new float[] { 1.0f, 2.0f, 3.0f }, VECTOR_DIMENSION, null); + KnnVectorQueryBuilder query = new KnnVectorQueryBuilder( + VECTOR_FIELD, + new float[] { 1.0f, 2.0f, 3.0f }, + VECTOR_DIMENSION, + null, + null + ); query.addFilterQuery(termQuery); IllegalStateException e = expectThrows(IllegalStateException.class, () -> query.toQuery(context)); @@ -206,7 +242,7 @@ public void testMustRewrite() throws IOException { public void testBWCVersionSerializationFilters() throws IOException { KnnVectorQueryBuilder query = createTestQueryBuilder(); VectorData vectorData = VectorData.fromFloats(query.queryVector().asFloatVector()); - KnnVectorQueryBuilder queryNoFilters = new KnnVectorQueryBuilder(query.getFieldName(), vectorData, query.numCands(), null) + KnnVectorQueryBuilder queryNoFilters = new KnnVectorQueryBuilder(query.getFieldName(), vectorData, null, query.numCands(), null) .queryName(query.queryName()) .boost(query.boost()); TransportVersion beforeFilterVersion = TransportVersionUtils.randomVersionBetween( @@ -220,7 +256,7 @@ public void testBWCVersionSerializationFilters() throws IOException { public void testBWCVersionSerializationSimilarity() throws IOException { KnnVectorQueryBuilder query = createTestQueryBuilder(); VectorData vectorData = VectorData.fromFloats(query.queryVector().asFloatVector()); - KnnVectorQueryBuilder queryNoSimilarity = new KnnVectorQueryBuilder(query.getFieldName(), vectorData, query.numCands(), null) + KnnVectorQueryBuilder queryNoSimilarity = new KnnVectorQueryBuilder(query.getFieldName(), vectorData, null, query.numCands(), null) .queryName(query.queryName()) .boost(query.boost()) .addFilterQueries(query.filterQueries()); @@ -236,10 +272,13 @@ public void testBWCVersionSerializationQuery() throws IOException { ); Float similarity = differentQueryVersion.before(TransportVersions.V_8_8_0) ? null : query.getVectorSimilarity(); VectorData vectorData = VectorData.fromFloats(query.queryVector().asFloatVector()); - KnnVectorQueryBuilder queryOlderVersion = new KnnVectorQueryBuilder(query.getFieldName(), vectorData, query.numCands(), similarity) - .queryName(query.queryName()) - .boost(query.boost()) - .addFilterQueries(query.filterQueries()); + KnnVectorQueryBuilder queryOlderVersion = new KnnVectorQueryBuilder( + query.getFieldName(), + vectorData, + null, + query.numCands(), + similarity + ).queryName(query.queryName()).boost(query.boost()).addFilterQueries(query.filterQueries()); assertBWCSerialization(query, queryOlderVersion, differentQueryVersion); } @@ -266,6 +305,7 @@ public void testRewriteWithQueryVectorBuilder() throws Exception { KnnVectorQueryBuilder knnVectorQueryBuilder = new KnnVectorQueryBuilder( "field", new TestQueryVectorBuilderPlugin.TestQueryVectorBuilder(expectedArray), + null, 5, 1f ); diff --git a/server/src/test/java/org/elasticsearch/search/vectors/KnnByteVectorQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/search/vectors/KnnByteVectorQueryBuilderTests.java index 6c83700d0b29a..8399fed58494c 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/KnnByteVectorQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/KnnByteVectorQueryBuilderTests.java @@ -17,11 +17,11 @@ DenseVectorFieldMapper.ElementType elementType() { } @Override - protected KnnVectorQueryBuilder createKnnVectorQueryBuilder(String fieldName, int numCands, Float similarity) { + protected KnnVectorQueryBuilder createKnnVectorQueryBuilder(String fieldName, Integer k, int numCands, Float similarity) { byte[] vector = new byte[VECTOR_DIMENSION]; for (int i = 0; i < vector.length; i++) { vector[i] = randomByte(); } - return new KnnVectorQueryBuilder(fieldName, vector, numCands, similarity); + return new KnnVectorQueryBuilder(fieldName, vector, k, numCands, similarity); } } diff --git a/server/src/test/java/org/elasticsearch/search/vectors/KnnFloatVectorQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/search/vectors/KnnFloatVectorQueryBuilderTests.java index eeb5244d57943..744256bff4ee3 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/KnnFloatVectorQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/KnnFloatVectorQueryBuilderTests.java @@ -17,11 +17,11 @@ DenseVectorFieldMapper.ElementType elementType() { } @Override - KnnVectorQueryBuilder createKnnVectorQueryBuilder(String fieldName, int numCands, Float similarity) { + KnnVectorQueryBuilder createKnnVectorQueryBuilder(String fieldName, Integer k, int numCands, Float similarity) { float[] vector = new float[VECTOR_DIMENSION]; for (int i = 0; i < vector.length; i++) { vector[i] = randomFloat(); } - return new KnnVectorQueryBuilder(fieldName, vector, numCands, similarity); + return new KnnVectorQueryBuilder(fieldName, vector, k, numCands, similarity); } } diff --git a/server/src/test/java/org/elasticsearch/search/vectors/KnnSearchBuilderTests.java b/server/src/test/java/org/elasticsearch/search/vectors/KnnSearchBuilderTests.java index 564c8b9d0db11..616b87972faaa 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/KnnSearchBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/KnnSearchBuilderTests.java @@ -166,7 +166,8 @@ public void testToQueryBuilder() { builder.addFilterQuery(filter); } - QueryBuilder expected = new KnnVectorQueryBuilder(field, vector, numCands, similarity).addFilterQueries(filterQueries).boost(boost); + QueryBuilder expected = new KnnVectorQueryBuilder(field, vector, null, numCands, similarity).addFilterQueries(filterQueries) + .boost(boost); assertEquals(expected, builder.toQueryBuilder()); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index 775ac441e63b8..6cdbae9754f7d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -427,7 +427,7 @@ public QueryBuilder semanticQuery(InferenceResults inferenceResults, float boost ); } - yield new KnnVectorQueryBuilder(inferenceResultsFieldName, inference, null, null); + yield new KnnVectorQueryBuilder(inferenceResultsFieldName, inference, null, null, null); } default -> throw new IllegalStateException( "Field [" diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java index 1be8f543ebcb3..704d8b75d9ed3 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java @@ -884,7 +884,7 @@ public void testKnnSearch() throws Exception { // Since there's no kNN search action at the transport layer, we just emulate // how the action works (it builds a kNN query under the hood) float[] queryVector = new float[] { 0.0f, 0.0f, 0.0f }; - KnnVectorQueryBuilder query = new KnnVectorQueryBuilder("vector", queryVector, 50, null); + KnnVectorQueryBuilder query = new KnnVectorQueryBuilder("vector", queryVector, 50, 50, null); if (randomBoolean()) { query.addFilterQuery(new WildcardQueryBuilder("other", "value*")); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java index 849f5d1a48c5e..bffa53b1f4da6 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java @@ -441,7 +441,7 @@ public void testKnnSearch() throws IOException { // Since there's no kNN search action at the transport layer, we just emulate // how the action works (it builds a kNN query under the hood) float[] queryVector = new float[] { 0.0f, 0.0f, 0.0f }; - KnnVectorQueryBuilder query = new KnnVectorQueryBuilder("vector", queryVector, 10, null); + KnnVectorQueryBuilder query = new KnnVectorQueryBuilder("vector", queryVector, 10, 10, null); // user1 has access to vector field, so the query should match with the document: assertResponse( @@ -475,7 +475,7 @@ public void testKnnSearch() throws IOException { } ); // user1 can access field1, so the filtered query should match with the document: - KnnVectorQueryBuilder filterQuery1 = new KnnVectorQueryBuilder("vector", queryVector, 10, null).addFilterQuery( + KnnVectorQueryBuilder filterQuery1 = new KnnVectorQueryBuilder("vector", queryVector, 10, 10, null).addFilterQuery( QueryBuilders.matchQuery("field1", "value1") ); assertHitCount( @@ -486,7 +486,7 @@ public void testKnnSearch() throws IOException { ); // user1 cannot access field2, so the filtered query should not match with the document: - KnnVectorQueryBuilder filterQuery2 = new KnnVectorQueryBuilder("vector", queryVector, 10, null).addFilterQuery( + KnnVectorQueryBuilder filterQuery2 = new KnnVectorQueryBuilder("vector", queryVector, 10, 10, null).addFilterQuery( QueryBuilders.matchQuery("field2", "value2") ); assertHitCount( From dea593db3f4d7d40ed23c0574a6a3f3fb8b3470e Mon Sep 17 00:00:00 2001 From: George Wallace Date: Fri, 28 Jun 2024 09:01:48 -0600 Subject: [PATCH 044/216] Update behavioral-analytics-start.asciidoc (#110271) --- .../behavioral-analytics/behavioral-analytics-start.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-start.asciidoc b/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-start.asciidoc index fe1515302082f..f29a6f3a37fdf 100644 --- a/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-start.asciidoc +++ b/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-start.asciidoc @@ -182,7 +182,7 @@ createTracker({ [[behavioral-analytics-start-ui-integration-search-ui]] ==== Search UI integration -https://docs.elastic.co/search-ui/getting-started[Search UI^] is a JavaScript library for building search experiences. +https://docs.elastic.co/search-ui[Search UI^] is a JavaScript library for building search experiences. Use the https://www.npmjs.com/package/@elastic/search-ui-analytics-plugin[Search UI analytics plugin^] available on NPM to integrate behavioral analytics with Search UI. This integration enables you to dispatch events from Search UI to the behavioral analytics client. From 3cb77c9e5c7ca4a099ee6c719b421289305e4f40 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 28 Jun 2024 08:41:28 -0700 Subject: [PATCH 045/216] Tolerate floating point precision in multi node tests (#110238) Some assertions with floating-point values are failing on serverless because we run tests with three shards with serverless. This can cause variations in precision because data may arrive in different orders. For example, sum([a, b, c]) can yield a different precision than sum([a, c, b]). This change introduces tolerance for precision differences beyond e-10, which should be acceptable for ESQL. --- .../esql/qa/mixed/MixedClusterEsqlSpecIT.java | 5 ++++ .../xpack/esql/ccq/MultiClusterSpecIT.java | 5 ++++ .../xpack/esql/qa/multi_node/EsqlSpecIT.java | 5 ++++ .../xpack/esql/qa/rest/EsqlSpecTestCase.java | 28 +++++++++++++++++-- 4 files changed, 41 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java index cbfa043b9dc5d..8ab375dfd24c7 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java @@ -73,4 +73,9 @@ protected void shouldSkipTest(String testName) throws IOException { protected boolean supportsAsync() { return oldClusterHasFeature(ASYNC_QUERY_FEATURE_ID); } + + @Override + protected boolean enableRoundingDoubleValuesOnAsserting() { + return true; + } } diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index 807d6cff1966c..e6c7a3c73f1fb 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -242,4 +242,9 @@ static boolean hasIndexMetadata(String query) { } return false; } + + @Override + protected boolean enableRoundingDoubleValuesOnAsserting() { + return true; + } } diff --git a/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlSpecIT.java b/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlSpecIT.java index aeb8fa96d0db3..15407804a56f2 100644 --- a/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/multi_node/EsqlSpecIT.java @@ -24,4 +24,9 @@ protected String getTestRestCluster() { public EsqlSpecIT(String fileName, String groupName, String testName, Integer lineNumber, CsvTestCase testCase, Mode mode) { super(fileName, groupName, testName, lineNumber, testCase, mode); } + + @Override + protected boolean enableRoundingDoubleValuesOnAsserting() { + return true; + } } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index b231de66f29a6..e25eb84023867 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -37,6 +37,9 @@ import org.junit.Before; import java.io.IOException; +import java.math.BigDecimal; +import java.math.MathContext; +import java.math.RoundingMode; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; @@ -244,10 +247,10 @@ protected void assertResults( Logger logger ) { assertMetadata(expected, actualColumns, logger); - assertData(expected, actualValues, testCase.ignoreOrder, logger, EsqlSpecTestCase::valueMapper); + assertData(expected, actualValues, testCase.ignoreOrder, logger, this::valueMapper); } - private static Object valueMapper(CsvTestUtils.Type type, Object value) { + private Object valueMapper(CsvTestUtils.Type type, Object value) { if (value == null) { return "null"; } @@ -262,9 +265,30 @@ private static Object valueMapper(CsvTestUtils.Type type, Object value) { } catch (Throwable ignored) {} } } + if (type == CsvTestUtils.Type.DOUBLE && enableRoundingDoubleValuesOnAsserting()) { + if (value instanceof List vs) { + List values = new ArrayList<>(); + for (Object v : vs) { + values.add(valueMapper(type, v)); + } + return values; + } else if (value instanceof Double d) { + return new BigDecimal(d).round(new MathContext(10, RoundingMode.DOWN)).doubleValue(); + } else if (value instanceof String s) { + return new BigDecimal(s).round(new MathContext(10, RoundingMode.DOWN)).doubleValue(); + } + } return value.toString(); } + /** + * Rounds double values when asserting double values returned in queries. + * By default, no rounding is performed. + */ + protected boolean enableRoundingDoubleValuesOnAsserting() { + return false; + } + private static String normalizedPoint(CsvTestUtils.Type type, double x, double y) { if (type == CsvTestUtils.Type.GEO_POINT) { return normalizedGeoPoint(x, y); From 989580f64558371422c426dbefdb31ce150edfb1 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Fri, 28 Jun 2024 11:02:35 -0500 Subject: [PATCH 046/216] Updating TransportSimulateBulkActionTests.testIndexData() to use IndexRequests (#110275) --- .../bulk/TransportSimulateBulkActionTests.java | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java index 7313cb3277100..9e80f73d4df4a 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.IndexVersions; @@ -30,6 +31,8 @@ import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.json.JsonXContent; import org.junit.After; import org.junit.Before; @@ -113,10 +116,11 @@ public void testIndexData() { for (int i = 0; i < bulkItemCount; i++) { Map source = Map.of(randomAlphaOfLength(10), randomAlphaOfLength(5)); IndexRequest indexRequest = new IndexRequest(randomAlphaOfLength(10)).id(randomAlphaOfLength(10)).source(source); + indexRequest.setListExecutedPipelines(true); for (int j = 0; j < randomIntBetween(0, 10); j++) { indexRequest.addPipeline(randomAlphaOfLength(12)); } - bulkRequest.add(); + bulkRequest.add(indexRequest); } AtomicBoolean onResponseCalled = new AtomicBoolean(false); ActionListener listener = new ActionListener<>() { @@ -124,6 +128,7 @@ public void testIndexData() { public void onResponse(BulkResponse response) { onResponseCalled.set(true); BulkItemResponse[] responseItems = response.getItems(); + assertThat(responseItems.length, equalTo(bulkItemCount)); assertThat(responseItems.length, equalTo(bulkRequest.requests().size())); for (int i = 0; i < responseItems.length; i++) { BulkItemResponse responseItem = responseItems[i]; @@ -143,12 +148,15 @@ public void onResponse(BulkResponse response) { Strings.format( """ { + "_id": "%s", "_index": "%s", + "_version": -3, "_source": %s, "executed_pipelines": [%s] }""", + indexRequest.id(), indexRequest.index(), - indexRequest.source(), + convertMapToJsonString(indexRequest.sourceAsMap()), indexRequest.getExecutedPipelines() .stream() .map(pipeline -> "\"" + pipeline + "\"") @@ -171,4 +179,10 @@ public void onFailure(Exception e) { bulkAction.doInternalExecute(task, bulkRequest, r -> fail("executor is unused"), listener, randomLongBetween(0, Long.MAX_VALUE)); assertThat(onResponseCalled.get(), equalTo(true)); } + + private String convertMapToJsonString(Map map) throws IOException { + try (XContentBuilder builder = JsonXContent.contentBuilder().map(map)) { + return BytesReference.bytes(builder).utf8ToString(); + } + } } From 047212ba0ab233a02104d020c41f0eb44922c947 Mon Sep 17 00:00:00 2001 From: Mike Pellegrini Date: Fri, 28 Jun 2024 12:50:31 -0400 Subject: [PATCH 047/216] Add Element Type To Semantic Text Model Settings Adds the element type to semantic_text's model settings, which allows it to index byte embeddings --- .../vectors/DenseVectorFieldMapper.java | 8 ++ .../vectors/DenseVectorFieldMapperTests.java | 15 +++ .../TestDenseInferenceServiceExtension.java | 118 +++++++++++++----- .../ShardBulkInferenceActionFilterIT.java | 4 +- .../inference/mapper/SemanticTextField.java | 38 +++++- .../mapper/SemanticTextFieldMapper.java | 3 + .../elasticsearch/xpack/inference/Utils.java | 10 +- .../mapper/SemanticTextFieldMapperTests.java | 80 +++++++++++- .../mapper/SemanticTextFieldTests.java | 38 ++++-- .../xpack/inference/model/TestModel.java | 29 ++++- .../queries/SemanticQueryBuilderTests.java | 27 ++-- .../10_semantic_text_field_mapping.yml | 1 + ...eld_mapping_incompatible_field_mapping.yml | 29 +++++ .../test/inference/40_semantic_text_query.yml | 61 +++++++++ 14 files changed, 401 insertions(+), 60 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 7d2fc9230a72f..44f49fb6c7966 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -272,8 +272,16 @@ public Builder dimensions(int dimensions) { return this; } + public Builder elementType(ElementType elementType) { + this.elementType.setValue(elementType); + return this; + } + @Override public DenseVectorFieldMapper build(MapperBuilderContext context) { + // Validate again here because the dimensions or element type could have been set programmatically, + // which affects index option validity + validate(); return new DenseVectorFieldMapper( leafName(), new DenseVectorFieldType( diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index 5aa02994fa6a8..3dd4e31b9ca3f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.index.mapper.DocumentParsingException; import org.elasticsearch.index.mapper.LuceneDocument; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperTestCase; @@ -791,6 +792,20 @@ public void testAddDocumentsToIndexBefore_V_7_5_0() throws Exception { assertArrayEquals("Decoded dense vector values is not equal to the indexed one.", validVector, decodedValues, 0.001f); } + public void testValidateOnBuild() { + final MapperBuilderContext context = MapperBuilderContext.root(false, false); + + // Build a dense vector field mapper with float element type, which will trigger int8 HNSW index options + DenseVectorFieldMapper mapper = new DenseVectorFieldMapper.Builder("test", IndexVersion.current()).elementType(ElementType.FLOAT) + .build(context); + + // Change the element type to byte, which is incompatible with int8 HNSW index options + DenseVectorFieldMapper.Builder builder = (DenseVectorFieldMapper.Builder) mapper.getMergeBuilder(); + builder.elementType(ElementType.BYTE); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.build(context)); + assertThat(e.getMessage(), containsString("[element_type] cannot be [byte] when using index type [int8_hnsw]")); + } + private static float[] decodeDenseVector(IndexVersion indexVersion, BytesRef encodedVector) { int dimCount = VectorEncoderDecoder.denseVectorLength(indexVersion, encodedVector); float[] vector = new float[dimCount]; diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java index cddcff9692a70..d455b564b32d5 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; import org.elasticsearch.inference.InferenceServiceExtension; @@ -33,6 +34,7 @@ import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -138,41 +140,78 @@ public void chunkedInfer( private InferenceTextEmbeddingFloatResults makeResults(List input, int dimensions) { List embeddings = new ArrayList<>(); - for (int i = 0; i < input.size(); i++) { - float[] doubleEmbeddings = generateEmbedding(input.get(i), dimensions); - List floatEmbeddings = new ArrayList<>(dimensions); - for (int j = 0; j < dimensions; j++) { - floatEmbeddings.add(doubleEmbeddings[j]); - } + for (String inputString : input) { + List floatEmbeddings = generateEmbedding(inputString, dimensions); embeddings.add(InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(floatEmbeddings)); } return new InferenceTextEmbeddingFloatResults(embeddings); } private List makeChunkedResults(List input, int dimensions) { - var chunks = new ArrayList(); - for (int i = 0; i < input.size(); i++) { - float[] embedding = generateEmbedding(input.get(i), dimensions); - chunks.add(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(embedding)); - } - - return InferenceChunkedTextEmbeddingFloatResults.listOf(input, new InferenceTextEmbeddingFloatResults(chunks)); + InferenceTextEmbeddingFloatResults nonChunkedResults = makeResults(input, dimensions); + return InferenceChunkedTextEmbeddingFloatResults.listOf(input, nonChunkedResults); } protected ServiceSettings getServiceSettingsFromMap(Map serviceSettingsMap) { return TestServiceSettings.fromMap(serviceSettingsMap); } - private static float[] generateEmbedding(String input, int dimensions) { - float[] embedding = new float[dimensions]; - for (int j = 0; j < dimensions; j++) { - embedding[j] = input.hashCode() + 1 + j; + /** + * Generate a test embedding for the provided input. + *

    + * The goal of this method is to generate an embedding with the following properties: + *

    + *
      + *
    • Unique to the input
    • + *
    • Reproducible (i.e given the same input, the same embedding should be generated)
    • + *
    • Valid as both a float and byte embedding
    • + *
    + *

    + * The embedding is generated by: + *

    + *
      + *
    • getting the hash code of the input
    • + *
    • converting the hash code value to a string
    • + *
    • converting the string to a UTF-8 encoded byte array
    • + *
    • repeatedly appending the byte array to the embedding until the desired number of dimensions are populated
    • + *
    + *

    + * Since the hash code value, when interpreted as a string, is guaranteed to only contain digits and the "-" character, the UTF-8 + * encoded byte array is guaranteed to only contain values in the standard ASCII table. + *

    + * + * @param input The input string + * @param dimensions The embedding dimension count + * @return An embedding + */ + private static List generateEmbedding(String input, int dimensions) { + List embedding = new ArrayList<>(dimensions); + + byte[] byteArray = Integer.toString(input.hashCode()).getBytes(StandardCharsets.UTF_8); + List embeddingValues = new ArrayList<>(byteArray.length); + for (byte value : byteArray) { + embeddingValues.add((float) value); + } + + int remainingDimensions = dimensions; + while (remainingDimensions >= embeddingValues.size()) { + embedding.addAll(embeddingValues); + remainingDimensions -= embeddingValues.size(); } + if (remainingDimensions > 0) { + embedding.addAll(embeddingValues.subList(0, remainingDimensions)); + } + return embedding; } } - public record TestServiceSettings(String model, Integer dimensions, SimilarityMeasure similarity) implements ServiceSettings { + public record TestServiceSettings( + String model, + Integer dimensions, + SimilarityMeasure similarity, + DenseVectorFieldMapper.ElementType elementType + ) implements ServiceSettings { static final String NAME = "test_text_embedding_service_settings"; @@ -195,11 +234,26 @@ public static TestServiceSettings fromMap(Map map) { similarity = SimilarityMeasure.fromString(similarityStr); } - return new TestServiceSettings(model, dimensions, similarity); + DenseVectorFieldMapper.ElementType elementType = null; + String elementTypeStr = (String) map.remove("element_type"); + if (elementTypeStr != null) { + elementType = DenseVectorFieldMapper.ElementType.fromString(elementTypeStr); + } + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new TestServiceSettings(model, dimensions, similarity, elementType); } public TestServiceSettings(StreamInput in) throws IOException { - this(in.readString(), in.readOptionalInt(), in.readOptionalEnum(SimilarityMeasure.class)); + this( + in.readString(), + in.readOptionalInt(), + in.readOptionalEnum(SimilarityMeasure.class), + in.readOptionalEnum(DenseVectorFieldMapper.ElementType.class) + ); } @Override @@ -210,6 +264,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (similarity != null) { builder.field("similarity", similarity); } + if (elementType != null) { + builder.field("element_type", elementType); + } builder.endObject(); return builder; } @@ -229,22 +286,23 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(model); out.writeInt(dimensions); out.writeOptionalEnum(similarity); + out.writeOptionalEnum(elementType); } @Override public ToXContentObject getFilteredXContentObject() { - return (builder, params) -> { - builder.startObject(); - builder.field("model", model); - builder.field("dimensions", dimensions); - if (similarity != null) { - builder.field("similarity", similarity); - } - builder.endObject(); - return builder; - }; + return this; } + @Override + public SimilarityMeasure similarity() { + return similarity != null ? similarity : SimilarityMeasure.COSINE; + } + + @Override + public DenseVectorFieldMapper.ElementType elementType() { + return elementType != null ? elementType : DenseVectorFieldMapper.ElementType.FLOAT; + } } } diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java index 300c0d2c471dc..7046179d1fa71 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -45,7 +46,8 @@ public void setup() throws Exception { client(), randomIntBetween(1, 100), // dot product means that we need normalized vectors; it's not worth doing that in this test - randomValueOtherThan(SimilarityMeasure.DOT_PRODUCT, () -> randomFrom(SimilarityMeasure.values())) + randomValueOtherThan(SimilarityMeasure.DOT_PRODUCT, () -> randomFrom(SimilarityMeasure.values())), + randomFrom(DenseVectorFieldMapper.ElementType.values()) ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java index 8ec614247bfbb..0c807c1166608 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.SimilarityMeasure; @@ -64,20 +65,37 @@ public record SemanticTextField(String fieldName, List originalValues, I static final String TASK_TYPE_FIELD = "task_type"; static final String DIMENSIONS_FIELD = "dimensions"; static final String SIMILARITY_FIELD = "similarity"; + static final String ELEMENT_TYPE_FIELD = "element_type"; public record InferenceResult(String inferenceId, ModelSettings modelSettings, List chunks) {} public record Chunk(String text, BytesReference rawEmbeddings) {} - public record ModelSettings(TaskType taskType, Integer dimensions, SimilarityMeasure similarity) implements ToXContentObject { + public record ModelSettings( + TaskType taskType, + Integer dimensions, + SimilarityMeasure similarity, + DenseVectorFieldMapper.ElementType elementType + ) implements ToXContentObject { public ModelSettings(Model model) { - this(model.getTaskType(), model.getServiceSettings().dimensions(), model.getServiceSettings().similarity()); + this( + model.getTaskType(), + model.getServiceSettings().dimensions(), + model.getServiceSettings().similarity(), + model.getServiceSettings().elementType() + ); } - public ModelSettings(TaskType taskType, Integer dimensions, SimilarityMeasure similarity) { + public ModelSettings( + TaskType taskType, + Integer dimensions, + SimilarityMeasure similarity, + DenseVectorFieldMapper.ElementType elementType + ) { this.taskType = Objects.requireNonNull(taskType, "task type must not be null"); this.dimensions = dimensions; this.similarity = similarity; + this.elementType = elementType; validate(); } @@ -91,6 +109,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (similarity != null) { builder.field(SIMILARITY_FIELD, similarity); } + if (elementType != null) { + builder.field(ELEMENT_TYPE_FIELD, elementType); + } return builder.endObject(); } @@ -104,6 +125,9 @@ public String toString() { if (similarity != null) { sb.append(", similarity=").append(similarity); } + if (elementType != null) { + sb.append(", element_type=").append(elementType); + } return sb.toString(); } @@ -112,10 +136,12 @@ private void validate() { case TEXT_EMBEDDING: validateFieldPresent(DIMENSIONS_FIELD, dimensions); validateFieldPresent(SIMILARITY_FIELD, similarity); + validateFieldPresent(ELEMENT_TYPE_FIELD, elementType); break; case SPARSE_EMBEDDING: validateFieldNotPresent(DIMENSIONS_FIELD, dimensions); validateFieldNotPresent(SIMILARITY_FIELD, similarity); + validateFieldNotPresent(ELEMENT_TYPE_FIELD, elementType); break; default: @@ -247,7 +273,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws TaskType taskType = TaskType.fromString((String) args[0]); Integer dimensions = (Integer) args[1]; SimilarityMeasure similarity = args[2] == null ? null : SimilarityMeasure.fromString((String) args[2]); - return new ModelSettings(taskType, dimensions, similarity); + DenseVectorFieldMapper.ElementType elementType = args[3] == null + ? null + : DenseVectorFieldMapper.ElementType.fromString((String) args[3]); + return new ModelSettings(taskType, dimensions, similarity, elementType); } ); @@ -273,6 +302,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws MODEL_SETTINGS_PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField(TASK_TYPE_FIELD)); MODEL_SETTINGS_PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), new ParseField(DIMENSIONS_FIELD)); MODEL_SETTINGS_PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField(SIMILARITY_FIELD)); + MODEL_SETTINGS_PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField(ELEMENT_TYPE_FIELD)); } /** diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index 6cdbae9754f7d..84b41bf37db56 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -478,6 +478,7 @@ private static Mapper.Builder createEmbeddingsField(IndexVersion indexVersionCre CHUNKED_EMBEDDINGS_FIELD, indexVersionCreated ); + SimilarityMeasure similarity = modelSettings.similarity(); if (similarity != null) { switch (similarity) { @@ -490,6 +491,8 @@ private static Mapper.Builder createEmbeddingsField(IndexVersion indexVersionCre } } denseVectorMapperBuilder.dimensions(modelSettings.dimensions()); + denseVectorMapperBuilder.elementType(modelSettings.elementType()); + yield denseVectorMapperBuilder; } default -> throw new IllegalArgumentException("Invalid task_type in model_settings [" + modelSettings.taskType().name() + "]"); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java index 4545327b62272..ec36040507ccd 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; @@ -97,10 +98,15 @@ public static void storeSparseModel(Client client) throws Exception { storeModel(client, model); } - public static void storeDenseModel(Client client, int dimensions, SimilarityMeasure similarityMeasure) throws Exception { + public static void storeDenseModel( + Client client, + int dimensions, + SimilarityMeasure similarityMeasure, + DenseVectorFieldMapper.ElementType elementType + ) throws Exception { Model model = new TestDenseInferenceServiceExtension.TestDenseModel( TestDenseInferenceServiceExtension.TestInferenceService.NAME, - new TestDenseInferenceServiceExtension.TestServiceSettings("dense_model", dimensions, similarityMeasure) + new TestDenseInferenceServiceExtension.TestServiceSettings("dense_model", dimensions, similarityMeasure, elementType) ); storeModel(client, model); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java index 9af0a182b028a..14de5ceffa6d4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java @@ -44,6 +44,7 @@ import org.elasticsearch.index.mapper.vectors.SparseVectorFieldMapper; import org.elasticsearch.index.search.ESToParentBlockJoinQuery; import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.LeafNestedDocuments; @@ -61,6 +62,7 @@ import java.util.HashSet; import java.util.List; import java.util.Set; +import java.util.function.BiConsumer; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.CHUNKED_EMBEDDINGS_FIELD; @@ -192,7 +194,7 @@ public void testDynamicUpdate() throws IOException { List.of(), new SemanticTextField.InferenceResult( inferenceId, - new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, null), + new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, null, null), List.of() ), XContentType.JSON @@ -280,6 +282,7 @@ public void testUpdateModelSettings() throws IOException { .field("task_type", "text_embedding") .field("dimensions", 10) .field("similarity", "cosine") + .field("element_type", "float") .endObject() .endObject() ) @@ -290,7 +293,7 @@ public void testUpdateModelSettings() throws IOException { containsString( "Cannot update parameter [model_settings] " + "from [task_type=sparse_embedding] " - + "to [task_type=text_embedding, dimensions=10, similarity=cosine]" + + "to [task_type=text_embedding, dimensions=10, similarity=cosine, element_type=float]" ) ); } @@ -449,7 +452,7 @@ public void testMissingInferenceId() throws IOException { source( b -> b.startObject("field") .startObject(INFERENCE_FIELD) - .field(MODEL_SETTINGS_FIELD, new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, null)) + .field(MODEL_SETTINGS_FIELD, new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, null, null)) .field(CHUNKS_FIELD, List.of()) .endObject() .endObject() @@ -491,6 +494,77 @@ public void testMissingTaskType() throws IOException { assertThat(ex.getCause().getMessage(), containsString("failed to parse field [model_settings]")); } + public void testDenseVectorElementType() throws IOException { + final String fieldName = "field"; + final String inferenceId = "test_service"; + + BiConsumer assertMapperService = (m, e) -> { + Mapper mapper = m.mappingLookup().getMapper(fieldName); + assertThat(mapper, instanceOf(SemanticTextFieldMapper.class)); + SemanticTextFieldMapper semanticTextFieldMapper = (SemanticTextFieldMapper) mapper; + assertThat(semanticTextFieldMapper.fieldType().getModelSettings().elementType(), equalTo(e)); + }; + + MapperService floatMapperService = mapperServiceForFieldWithModelSettings( + fieldName, + inferenceId, + new SemanticTextField.ModelSettings( + TaskType.TEXT_EMBEDDING, + 1024, + SimilarityMeasure.COSINE, + DenseVectorFieldMapper.ElementType.FLOAT + ) + ); + assertMapperService.accept(floatMapperService, DenseVectorFieldMapper.ElementType.FLOAT); + + MapperService byteMapperService = mapperServiceForFieldWithModelSettings( + fieldName, + inferenceId, + new SemanticTextField.ModelSettings( + TaskType.TEXT_EMBEDDING, + 1024, + SimilarityMeasure.COSINE, + DenseVectorFieldMapper.ElementType.BYTE + ) + ); + assertMapperService.accept(byteMapperService, DenseVectorFieldMapper.ElementType.BYTE); + } + + private MapperService mapperServiceForFieldWithModelSettings( + String fieldName, + String inferenceId, + SemanticTextField.ModelSettings modelSettings + ) throws IOException { + MapperService mapperService = createMapperService(mapping(b -> {})); + mapperService.merge( + "_doc", + new CompressedXContent( + Strings.toString(PutMappingRequest.simpleMapping(fieldName, "type=semantic_text,inference_id=" + inferenceId)) + ), + MapperService.MergeReason.MAPPING_UPDATE + ); + + SemanticTextField semanticTextField = new SemanticTextField( + fieldName, + List.of(), + new SemanticTextField.InferenceResult(inferenceId, modelSettings, List.of()), + XContentType.JSON + ); + XContentBuilder builder = JsonXContent.contentBuilder().startObject(); + builder.field(semanticTextField.fieldName()); + builder.value(semanticTextField); + builder.endObject(); + + SourceToParse sourceToParse = new SourceToParse("test", BytesReference.bytes(builder), XContentType.JSON); + ParsedDocument parsedDocument = mapperService.documentMapper().parse(sourceToParse); + mapperService.merge( + "_doc", + parsedDocument.dynamicMappingsUpdate().toCompressedXContent(), + MapperService.MergeReason.MAPPING_UPDATE + ); + return mapperService; + } + private static void addSemanticTextMapping(XContentBuilder mappingBuilder, String fieldName, String modelId) throws IOException { mappingBuilder.startObject(fieldName); mappingBuilder.field("type", SemanticTextFieldMapper.CONTENT_TYPE); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java index 6d8b3ab4fa28e..2a64f77e28756 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.SimilarityMeasure; @@ -106,36 +107,55 @@ protected boolean supportsUnknownFields() { public void testModelSettingsValidation() { NullPointerException npe = expectThrows(NullPointerException.class, () -> { - new SemanticTextField.ModelSettings(null, 10, SimilarityMeasure.COSINE); + new SemanticTextField.ModelSettings(null, 10, SimilarityMeasure.COSINE, DenseVectorFieldMapper.ElementType.FLOAT); }); assertThat(npe.getMessage(), equalTo("task type must not be null")); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> { - new SemanticTextField.ModelSettings(TaskType.COMPLETION, 10, SimilarityMeasure.COSINE); + new SemanticTextField.ModelSettings( + TaskType.COMPLETION, + 10, + SimilarityMeasure.COSINE, + DenseVectorFieldMapper.ElementType.FLOAT + ); }); assertThat(ex.getMessage(), containsString("Wrong [task_type]")); ex = expectThrows( IllegalArgumentException.class, - () -> { new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, 10, null); } + () -> { new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, 10, null, null); } ); assertThat(ex.getMessage(), containsString("[dimensions] is not allowed")); ex = expectThrows(IllegalArgumentException.class, () -> { - new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, SimilarityMeasure.COSINE); + new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, SimilarityMeasure.COSINE, null); }); assertThat(ex.getMessage(), containsString("[similarity] is not allowed")); ex = expectThrows(IllegalArgumentException.class, () -> { - new SemanticTextField.ModelSettings(TaskType.TEXT_EMBEDDING, null, SimilarityMeasure.COSINE); + new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, null, DenseVectorFieldMapper.ElementType.FLOAT); + }); + assertThat(ex.getMessage(), containsString("[element_type] is not allowed")); + + ex = expectThrows(IllegalArgumentException.class, () -> { + new SemanticTextField.ModelSettings( + TaskType.TEXT_EMBEDDING, + null, + SimilarityMeasure.COSINE, + DenseVectorFieldMapper.ElementType.FLOAT + ); }); assertThat(ex.getMessage(), containsString("required [dimensions] field is missing")); - ex = expectThrows( - IllegalArgumentException.class, - () -> { new SemanticTextField.ModelSettings(TaskType.TEXT_EMBEDDING, 10, null); } - ); + ex = expectThrows(IllegalArgumentException.class, () -> { + new SemanticTextField.ModelSettings(TaskType.TEXT_EMBEDDING, 10, null, DenseVectorFieldMapper.ElementType.FLOAT); + }); assertThat(ex.getMessage(), containsString("required [similarity] field is missing")); + + ex = expectThrows(IllegalArgumentException.class, () -> { + new SemanticTextField.ModelSettings(TaskType.TEXT_EMBEDDING, 10, SimilarityMeasure.COSINE, null); + }); + assertThat(ex.getMessage(), containsString("required [element_type] field is missing")); } public static InferenceChunkedTextEmbeddingFloatResults randomInferenceChunkedTextEmbeddingFloatResults( diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java index c454bd6ff9ce6..094952b8716b7 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -39,11 +40,12 @@ public static TestModel createRandomInstance() { public static TestModel createRandomInstance(TaskType taskType) { var dimensions = taskType == TaskType.TEXT_EMBEDDING ? randomInt(64) : null; var similarity = taskType == TaskType.TEXT_EMBEDDING ? randomFrom(SimilarityMeasure.values()) : null; + var elementType = taskType == TaskType.TEXT_EMBEDDING ? randomFrom(DenseVectorFieldMapper.ElementType.values()) : null; return new TestModel( randomAlphaOfLength(4), taskType, randomAlphaOfLength(10), - new TestModel.TestServiceSettings(randomAlphaOfLength(4), dimensions, similarity), + new TestModel.TestServiceSettings(randomAlphaOfLength(4), dimensions, similarity, elementType), new TestModel.TestTaskSettings(randomInt(3)), new TestModel.TestSecretSettings(randomAlphaOfLength(4)) ); @@ -78,7 +80,12 @@ public TestSecretSettings getSecretSettings() { return (TestSecretSettings) super.getSecretSettings(); } - public record TestServiceSettings(String model, Integer dimensions, SimilarityMeasure similarity) implements ServiceSettings { + public record TestServiceSettings( + String model, + Integer dimensions, + SimilarityMeasure similarity, + DenseVectorFieldMapper.ElementType elementType + ) implements ServiceSettings { private static final String NAME = "test_service_settings"; @@ -95,11 +102,16 @@ public static TestServiceSettings fromMap(Map map) { throw validationException; } - return new TestServiceSettings(model, null, null); + return new TestServiceSettings(model, null, null, null); } public TestServiceSettings(StreamInput in) throws IOException { - this(in.readString(), in.readOptionalVInt(), in.readOptionalEnum(SimilarityMeasure.class)); + this( + in.readString(), + in.readOptionalVInt(), + in.readOptionalEnum(SimilarityMeasure.class), + in.readOptionalEnum(DenseVectorFieldMapper.ElementType.class) + ); } @Override @@ -112,6 +124,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (similarity != null) { builder.field("similarity", similarity); } + if (elementType != null) { + builder.field("element_type", elementType); + } builder.endObject(); return builder; } @@ -131,6 +146,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(model); out.writeOptionalVInt(dimensions); out.writeOptionalEnum(similarity); + out.writeOptionalEnum(elementType); } @Override @@ -147,6 +163,11 @@ public SimilarityMeasure similarity() { public Integer dimensions() { return dimensions; } + + @Override + public DenseVectorFieldMapper.ElementType elementType() { + return elementType; + } } public record TestTaskSettings(Integer temperature) implements TaskSettings { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java index 07713952e36c3..12b3dede5217b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java @@ -10,6 +10,7 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.KnnByteVectorQuery; import org.apache.lucene.search.KnnFloatVectorQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; @@ -29,6 +30,7 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.index.query.MatchNoneQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; @@ -79,6 +81,7 @@ public class SemanticQueryBuilderTests extends AbstractQueryTestCase expectedKnnQueryClass = switch (denseVectorElementType) { + case FLOAT -> KnnFloatVectorQuery.class; + case BYTE -> KnnByteVectorQuery.class; + }; + assertThat(innerQuery, instanceOf(expectedKnnQueryClass)); } private Query assertOuterBooleanQuery(Query query) { @@ -308,14 +317,18 @@ public void testSerializingQueryWhenNoInferenceId() throws IOException { assertThat(rewritten, instanceOf(MatchNoneQueryBuilder.class)); } - private static SourceToParse buildSemanticTextFieldWithInferenceResults(InferenceResultType inferenceResultType) throws IOException { + private static SourceToParse buildSemanticTextFieldWithInferenceResults( + InferenceResultType inferenceResultType, + DenseVectorFieldMapper.ElementType denseVectorElementType + ) throws IOException { SemanticTextField.ModelSettings modelSettings = switch (inferenceResultType) { case NONE -> null; - case SPARSE_EMBEDDING -> new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, null); + case SPARSE_EMBEDDING -> new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, null, null); case TEXT_EMBEDDING -> new SemanticTextField.ModelSettings( TaskType.TEXT_EMBEDDING, TEXT_EMBEDDING_DIMENSION_COUNT, - SimilarityMeasure.COSINE + SimilarityMeasure.COSINE, + denseVectorElementType ); }; diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml index 041dc05a8f5bb..d60667448d6a0 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml @@ -88,6 +88,7 @@ setup: task_type: text_embedding dimensions: 4 similarity: cosine + element_type: float chunks: - text: "these are not the droids you're looking for" embeddings: [0.04673296958208084, -0.03237321600317955, -0.02543032355606556, 0.056035321205854416] diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/20_semantic_text_field_mapping_incompatible_field_mapping.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/20_semantic_text_field_mapping_incompatible_field_mapping.yml index a7335728095a7..3d46c3b23d7e3 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/20_semantic_text_field_mapping_incompatible_field_mapping.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/20_semantic_text_field_mapping_incompatible_field_mapping.yml @@ -49,6 +49,7 @@ setup: task_type: text_embedding dimensions: 4 similarity: cosine + element_type: float chunks: - text: "these are not the droids you're looking for" embeddings: [0.04673296958208084, -0.03237321600317955, -0.02543032355606556, 0.056035321205854416] @@ -73,6 +74,7 @@ setup: task_type: text_embedding dimensions: 5 similarity: cosine + element_type: float chunks: - text: "other text" embeddings: [0.04673296958208084, -0.03237321600317955, -0.02543032355606556, 0.056035321205854416, 0.053438711911439896] @@ -94,6 +96,7 @@ setup: task_type: text_embedding dimensions: 4 similarity: cosine + element_type: float chunks: - text: "other text" embeddings: [0.04673296958208084, -0.03237321600317955, -0.02543032355606556, 0.056035321205854416] @@ -115,6 +118,29 @@ setup: task_type: text_embedding dimensions: 4 similarity: dot_product + element_type: float + chunks: + - text: "other text" + embeddings: [0.04673296958208084, -0.03237321600317955, -0.02543032355606556, 0.056035321205854416] + +--- +"Fails for non-compatible element type": + + - do: + catch: /Incompatible model settings for field \[dense_field\].+/ + index: + index: test-index + id: doc_2 + body: + dense_field: + text: "other text" + inference: + inference_id: dense-inference-id + model_settings: + task_type: text_embedding + dimensions: 4 + similarity: cosine + element_type: byte chunks: - text: "other text" embeddings: [0.04673296958208084, -0.03237321600317955, -0.02543032355606556, 0.056035321205854416] @@ -159,6 +185,7 @@ setup: task_type: text_embedding dimensions: 4 similarity: cosine + element_type: float chunks: - text: "these are not the droids you're looking for" embeddings: [0.04673296958208084, -0.03237321600317955, -0.02543032355606556, 0.056035321205854416] @@ -180,6 +207,7 @@ setup: task_type: text_embedding dimensions: 4 similarity: cosine + element_type: float chunks: - text: "these are not the droids you're looking for" @@ -218,6 +246,7 @@ setup: task_type: text_embedding dimensions: 4 similarity: cosine + element_type: float chunks: - embeddings: [ 0.04673296958208084, -0.03237321600317955, -0.02543032355606556, 0.056035321205854416 ] diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml index 8fffa7fa8c7ef..5ee7a943c4d35 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml @@ -121,6 +121,67 @@ setup: - close_to: { hits.hits.0._score: { value: 1.0, error: 0.0001 } } - length: { hits.hits.0._source.inference_field.inference.chunks: 2 } +--- +"Query using a dense embedding model that uses byte embeddings": + - skip: + features: [ "headers", "close_to" ] + + - do: + inference.put: + task_type: text_embedding + inference_id: dense-inference-byte-id + body: > + { + "service": "text_embedding_test_service", + "service_settings": { + "model": "my_model", + "dimensions": 10, + "api_key": "abc64", + "similarity": "COSINE", + "element_type": "byte" + }, + "task_settings": { + } + } + + - do: + indices.create: + index: test-dense-byte-index + body: + mappings: + properties: + inference_field: + type: semantic_text + inference_id: dense-inference-byte-id + non_inference_field: + type: text + + - do: + index: + index: test-dense-byte-index + id: doc_1 + body: + inference_field: [ "inference test", "another inference test" ] + non_inference_field: "non inference test" + refresh: true + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-dense-byte-index + body: + query: + semantic: + field: "inference_field" + query: "inference test" + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + - close_to: { hits.hits.0._score: { value: 1.0, error: 0.0001 } } + - length: { hits.hits.0._source.inference_field.inference.chunks: 2 } + --- "Apply boost and query name": - skip: From 3d02d670fa2a905e58a51c2ca5385f140a537b81 Mon Sep 17 00:00:00 2001 From: Mike Pellegrini Date: Fri, 28 Jun 2024 13:18:13 -0400 Subject: [PATCH 048/216] Adjust Dense Vector Unit Vector Epsilon (#110240) Change dense vector unit vector epsilon to 1e-3 --- .../index/mapper/vectors/DenseVectorFieldMapper.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 44f49fb6c7966..989c92e909ce2 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -96,7 +96,7 @@ */ public class DenseVectorFieldMapper extends FieldMapper { public static final String COSINE_MAGNITUDE_FIELD_SUFFIX = "._magnitude"; - private static final float EPS = 1e-4f; + private static final float EPS = 1e-3f; static boolean isNotUnitVector(float magnitude) { return Math.abs(magnitude - 1.0f) > EPS; From 40386d504ab11e2422ec22fbfdbe629f92693de6 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 29 Jun 2024 04:04:51 +1000 Subject: [PATCH 049/216] Mute org.elasticsearch.xpack.esql.tree.EsqlNodeSubclassTests testReplaceChildren {class org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec} #110272 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index f4a0695b6f19e..15baa7a4fdbe0 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -85,6 +85,9 @@ tests: - class: org.elasticsearch.compute.lucene.ValueSourceReaderTypeConversionTests method: testLoadAll issue: https://github.com/elastic/elasticsearch/issues/110244 +- class: org.elasticsearch.xpack.esql.tree.EsqlNodeSubclassTests + method: testReplaceChildren {class org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec} + issue: https://github.com/elastic/elasticsearch/issues/110272 # Examples: # From ac740e4674c56ea84169a1ef552046183ea936e7 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 29 Jun 2024 04:04:56 +1000 Subject: [PATCH 050/216] Mute org.elasticsearch.painless.LangPainlessClientYamlTestSuiteIT test {yaml=painless/146_dense_vector_bit_basic/Cosine Similarity is not supported} #110290 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 15baa7a4fdbe0..311637bf54661 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -88,6 +88,9 @@ tests: - class: org.elasticsearch.xpack.esql.tree.EsqlNodeSubclassTests method: testReplaceChildren {class org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec} issue: https://github.com/elastic/elasticsearch/issues/110272 +- class: org.elasticsearch.painless.LangPainlessClientYamlTestSuiteIT + method: test {yaml=painless/146_dense_vector_bit_basic/Cosine Similarity is not supported} + issue: https://github.com/elastic/elasticsearch/issues/110290 # Examples: # From ed3ea2dce35eea25b98c6f7f56a9728ee7ef6b7d Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 29 Jun 2024 04:05:02 +1000 Subject: [PATCH 051/216] Mute org.elasticsearch.painless.LangPainlessClientYamlTestSuiteIT test {yaml=painless/146_dense_vector_bit_basic/Dot Product is not supported} #110291 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 311637bf54661..1c4ab79c12144 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -91,6 +91,9 @@ tests: - class: org.elasticsearch.painless.LangPainlessClientYamlTestSuiteIT method: test {yaml=painless/146_dense_vector_bit_basic/Cosine Similarity is not supported} issue: https://github.com/elastic/elasticsearch/issues/110290 +- class: org.elasticsearch.painless.LangPainlessClientYamlTestSuiteIT + method: test {yaml=painless/146_dense_vector_bit_basic/Dot Product is not supported} + issue: https://github.com/elastic/elasticsearch/issues/110291 # Examples: # From c8b67dcac9809b02fdc06fd96721628c7d86ef6e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 29 Jun 2024 04:05:05 +1000 Subject: [PATCH 052/216] Mute org.elasticsearch.action.search.SearchProgressActionListenerIT testSearchProgressWithQuery #109867 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 1c4ab79c12144..24286f2c2952f 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -94,6 +94,9 @@ tests: - class: org.elasticsearch.painless.LangPainlessClientYamlTestSuiteIT method: test {yaml=painless/146_dense_vector_bit_basic/Dot Product is not supported} issue: https://github.com/elastic/elasticsearch/issues/110291 +- class: org.elasticsearch.action.search.SearchProgressActionListenerIT + method: testSearchProgressWithQuery + issue: https://github.com/elastic/elasticsearch/issues/109867 # Examples: # From fd2c0a85f0175f8348aa8e164e92aba18d38d605 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 29 Jun 2024 04:05:10 +1000 Subject: [PATCH 053/216] Mute org.elasticsearch.backwards.SearchWithMinCompatibleSearchNodeIT testMinVersionAsNewVersion #95384 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 24286f2c2952f..7da08dbe4e846 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -97,6 +97,9 @@ tests: - class: org.elasticsearch.action.search.SearchProgressActionListenerIT method: testSearchProgressWithQuery issue: https://github.com/elastic/elasticsearch/issues/109867 +- class: org.elasticsearch.backwards.SearchWithMinCompatibleSearchNodeIT + method: testMinVersionAsNewVersion + issue: https://github.com/elastic/elasticsearch/issues/95384 # Examples: # From 7b5bc1436029f7639bc43fe7972fa458df46ba47 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 29 Jun 2024 04:05:12 +1000 Subject: [PATCH 054/216] Mute org.elasticsearch.backwards.SearchWithMinCompatibleSearchNodeIT testCcsMinimizeRoundtripsIsFalse #101974 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 7da08dbe4e846..62b1e55728eb5 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -100,6 +100,9 @@ tests: - class: org.elasticsearch.backwards.SearchWithMinCompatibleSearchNodeIT method: testMinVersionAsNewVersion issue: https://github.com/elastic/elasticsearch/issues/95384 +- class: org.elasticsearch.backwards.SearchWithMinCompatibleSearchNodeIT + method: testCcsMinimizeRoundtripsIsFalse + issue: https://github.com/elastic/elasticsearch/issues/101974 # Examples: # From 885ce77efc45e1fecb85a468a6794ddcfeb40038 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 29 Jun 2024 04:05:15 +1000 Subject: [PATCH 055/216] Mute org.elasticsearch.backwards.SearchWithMinCompatibleSearchNodeIT testMinVersionAsOldVersion #109454 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 62b1e55728eb5..e8c4dab5ff2ee 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -103,6 +103,9 @@ tests: - class: org.elasticsearch.backwards.SearchWithMinCompatibleSearchNodeIT method: testCcsMinimizeRoundtripsIsFalse issue: https://github.com/elastic/elasticsearch/issues/101974 +- class: org.elasticsearch.backwards.SearchWithMinCompatibleSearchNodeIT + method: testMinVersionAsOldVersion + issue: https://github.com/elastic/elasticsearch/issues/109454 # Examples: # From bbcc13feb5da79c60c9074f0c8d4de37c42f86a3 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 29 Jun 2024 04:05:21 +1000 Subject: [PATCH 056/216] Mute org.elasticsearch.xpack.esql.tree.EsqlNodeSubclassTests testInfoParameters {class org.elasticsearch.xpack.esql.plan.physical.ExchangeSinkExec} #110292 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index e8c4dab5ff2ee..f29a1ae2c73db 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -106,6 +106,9 @@ tests: - class: org.elasticsearch.backwards.SearchWithMinCompatibleSearchNodeIT method: testMinVersionAsOldVersion issue: https://github.com/elastic/elasticsearch/issues/109454 +- class: org.elasticsearch.xpack.esql.tree.EsqlNodeSubclassTests + method: testInfoParameters {class org.elasticsearch.xpack.esql.plan.physical.ExchangeSinkExec} + issue: https://github.com/elastic/elasticsearch/issues/110292 # Examples: # From 11a75c003532768771b59814329a4199213c2f26 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 29 Jun 2024 04:05:27 +1000 Subject: [PATCH 057/216] Mute org.elasticsearch.xpack.esql.tree.EsqlNodeSubclassTests testInfoParameters {class org.elasticsearch.xpack.esql.plan.physical.ExchangeExec} #110293 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index f29a1ae2c73db..6d3d060a51bfd 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -109,6 +109,9 @@ tests: - class: org.elasticsearch.xpack.esql.tree.EsqlNodeSubclassTests method: testInfoParameters {class org.elasticsearch.xpack.esql.plan.physical.ExchangeSinkExec} issue: https://github.com/elastic/elasticsearch/issues/110292 +- class: org.elasticsearch.xpack.esql.tree.EsqlNodeSubclassTests + method: testInfoParameters {class org.elasticsearch.xpack.esql.plan.physical.ExchangeExec} + issue: https://github.com/elastic/elasticsearch/issues/110293 # Examples: # From 7cf2090e9a69fd9374a9ba54ed47fb5b529ebdda Mon Sep 17 00:00:00 2001 From: Michael Peterson Date: Fri, 28 Jun 2024 15:35:39 -0400 Subject: [PATCH 058/216] TransportSimulateIndexTemplateAction creates a temporary cluster state that needs to set eventIngestedRange according to minTransportVersion (#110280) TransportSimulateIndexTemplateAction creates a temporary cluster state that needs to set eventIngestedRange according to minTransportVersion in order to pass mixed-cluster state tests. Fixes #110252 --- .../TransportSimulateIndexTemplateAction.java | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java index 253f02d30465a..eb90e95cb08be 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java @@ -38,6 +38,7 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.shard.IndexLongFieldRange; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.tasks.Task; @@ -214,8 +215,12 @@ public static ClusterState resolveTemporaryState( .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetadata.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) .build(); - final IndexMetadata indexMetadata = IndexMetadata.builder(indexName).settings(dummySettings).build(); + final IndexMetadata indexMetadata = IndexMetadata.builder(indexName) + // handle mixed-cluster states by passing in minTransportVersion to reset event.ingested range to UNKNOWN if an older version + .eventIngestedRange(getEventIngestedRange(indexName, simulatedState), simulatedState.getMinTransportVersion()) + .settings(dummySettings) + .build(); return ClusterState.builder(simulatedState) .metadata(Metadata.builder(simulatedState.metadata()).put(indexMetadata, true).build()) .build(); @@ -279,7 +284,11 @@ public static Template resolveTemplate( // Then apply settings resolved from templates: dummySettings.put(templateSettings); - final IndexMetadata indexMetadata = IndexMetadata.builder(indexName).settings(dummySettings).build(); + final IndexMetadata indexMetadata = IndexMetadata.builder(indexName) + // handle mixed-cluster states by passing in minTransportVersion to reset event.ingested range to UNKNOWN if an older version + .eventIngestedRange(getEventIngestedRange(indexName, simulatedState), simulatedState.getMinTransportVersion()) + .settings(dummySettings) + .build(); final ClusterState tempClusterState = ClusterState.builder(simulatedState) .metadata(Metadata.builder(simulatedState.metadata()).put(indexMetadata, true).build()) @@ -321,4 +330,9 @@ public static Template resolveTemplate( } return new Template(settings, mergedMapping, aliasesByName, lifecycle); } + + private static IndexLongFieldRange getEventIngestedRange(String indexName, ClusterState simulatedState) { + final IndexMetadata indexMetadata = simulatedState.metadata().index(indexName); + return indexMetadata == null ? IndexLongFieldRange.NO_SHARDS : indexMetadata.getEventIngestedRange(); + } } From 45a517ac177587a36179af1847e3c91cf932f1a1 Mon Sep 17 00:00:00 2001 From: Mike Pellegrini Date: Fri, 28 Jun 2024 15:51:14 -0400 Subject: [PATCH 059/216] Fix Semantic Text Test Failures (#110294) Fix semantic text test failures caused by the concurrent addition of DenseVectorFieldMapper.ElementType.BIT and #110010 --- .../inference/mock/TestDenseInferenceServiceExtension.java | 6 ++++++ .../action/filter/ShardBulkInferenceActionFilterIT.java | 3 ++- .../xpack/inference/queries/SemanticQueryBuilderTests.java | 6 +++++- 3 files changed, 13 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java index d455b564b32d5..c225f94694c01 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java @@ -215,6 +215,12 @@ public record TestServiceSettings( static final String NAME = "test_text_embedding_service_settings"; + public TestServiceSettings { + if (elementType == DenseVectorFieldMapper.ElementType.BIT) { + throw new IllegalArgumentException("Test dense inference service does not yet support element type BIT"); + } + } + public static TestServiceSettings fromMap(Map map) { ValidationException validationException = new ValidationException(); diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java index 7046179d1fa71..8da1aaabd517a 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java @@ -47,7 +47,8 @@ public void setup() throws Exception { randomIntBetween(1, 100), // dot product means that we need normalized vectors; it's not worth doing that in this test randomValueOtherThan(SimilarityMeasure.DOT_PRODUCT, () -> randomFrom(SimilarityMeasure.values())), - randomFrom(DenseVectorFieldMapper.ElementType.values()) + // TODO: Allow element type BIT once TestDenseInferenceServiceExtension supports it + randomValueOtherThan(DenseVectorFieldMapper.ElementType.BIT, () -> randomFrom(DenseVectorFieldMapper.ElementType.values())) ); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java index 12b3dede5217b..c2b99923bae61 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java @@ -96,7 +96,10 @@ public static void setInferenceResultType() { // These are class variables because they are used when initializing additional mappings, which happens once per test suite run in // AbstractBuilderTestCase#beforeTest as part of service holder creation. inferenceResultType = randomFrom(InferenceResultType.values()); - denseVectorElementType = randomFrom(DenseVectorFieldMapper.ElementType.values()); + denseVectorElementType = randomValueOtherThan( + DenseVectorFieldMapper.ElementType.BIT, + () -> randomFrom(DenseVectorFieldMapper.ElementType.values()) + ); // TODO: Support bit elements once KNN bit vector queries are available } @Override @@ -202,6 +205,7 @@ private void assertTextEmbeddingLuceneQuery(Query query) { Class expectedKnnQueryClass = switch (denseVectorElementType) { case FLOAT -> KnnFloatVectorQuery.class; case BYTE -> KnnByteVectorQuery.class; + default -> throw new IllegalStateException("Unhandled element type [" + denseVectorElementType + "]"); }; assertThat(innerQuery, instanceOf(expectedKnnQueryClass)); } From 1d08732750f5a5c3664e55cc7b6c60a05d41795d Mon Sep 17 00:00:00 2001 From: Adam Demjen Date: Fri, 28 Jun 2024 16:25:16 -0400 Subject: [PATCH 060/216] Add text similarity reranker retriever (#109813) * Add text similarity rerank builder and context * Add min_score support * Add tests * Filter by min_score + rename rank_window_size * Add transport version * Add doc + unit test * Implement equals and hashcode, use constant * Use optional float for min_score * Implement explainHit * Extract TS reranking to its own plugin * Fix description * Move field name * Add featurespec, fix build * Fix dir * Add XContent parsing, clean up build.gradle * Add license check * Spotless * Remove clusterModules from gradle config * Fix multi-node tests * Simplify * Move feature into inference plugin * Add Javadoc * Update docs/changelog/109813.yaml * Remove tracking of unnecessary sections * Hard failure on doc count != score count * Remove fromXContent parsing * Fix leak in tests * Add failure test * Remove minscore tracking * Put back node feature * Switch from platinum to enterprise license * Encapsulate min_score filtering in subclass * Add test case for inference response mismatch * Spotless * Add serialization test * Switch retriever builder to ConstructingObjectParser * Add retriever serialization test * Add yaml test * Exception if nesting other rank builder * Fix float rounding error * Fix multi-node tests * Set trial license for yaml tests * Spotless * Fix trial license setting for yaml tests * Update yaml test texts * Consolidate tests * Add random nulls and parsing tests with defaults * Update feature req in yaml test * Rename preprocess method * Spotless --- docs/changelog/109813.yaml | 5 + .../MockedRequestActionBasedRerankerIT.java | 48 ++- .../org/elasticsearch/TransportVersions.java | 1 + ...ankFeaturePhaseRankCoordinatorContext.java | 39 ++- .../rank/rerank/AbstractRerankerIT.java | 8 +- .../core/src/main/java/module-info.java | 4 +- .../elasticsearch/xpack/core/XPackField.java | 2 +- .../mock/TestInferenceServicePlugin.java | 5 + .../mock/TestRerankingServiceExtension.java | 195 ++++++++++++ ...search.inference.InferenceServiceExtension | 1 + .../inference/src/main/java/module-info.java | 2 + .../xpack/inference/InferenceFeatures.java | 26 ++ .../xpack/inference/InferencePlugin.java | 16 +- .../TextSimilarityRankBuilder.java | 215 +++++++++++++ ...ankFeaturePhaseRankCoordinatorContext.java | 112 +++++++ .../TextSimilarityRankRetrieverBuilder.java | 155 ++++++++++ ...lasticsearch.features.FeatureSpecification | 8 + .../TextSimilarityRankBuilderTests.java | 94 ++++++ ...aturePhaseRankCoordinatorContextTests.java | 65 ++++ .../TextSimilarityRankMultiNodeTests.java | 45 +++ ...xtSimilarityRankRetrieverBuilderTests.java | 107 +++++++ .../TextSimilarityRankTests.java | 173 +++++++++++ .../TextSimilarityTestPlugin.java | 287 ++++++++++++++++++ .../xpack/inference/InferenceRestIT.java | 1 + .../70_text_similarity_rank_retriever.yml | 90 ++++++ 25 files changed, 1674 insertions(+), 30 deletions(-) create mode 100644 docs/changelog/109813.yaml create mode 100644 x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestRerankingServiceExtension.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankBuilder.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankFeaturePhaseRankCoordinatorContext.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java create mode 100644 x-pack/plugin/inference/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankBuilderTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankFeaturePhaseRankCoordinatorContextTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilderTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityTestPlugin.java create mode 100644 x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_text_similarity_rank_retriever.yml diff --git a/docs/changelog/109813.yaml b/docs/changelog/109813.yaml new file mode 100644 index 0000000000000..edcef17e87606 --- /dev/null +++ b/docs/changelog/109813.yaml @@ -0,0 +1,5 @@ +pr: 109813 +summary: Add text similarity reranker retriever +area: Ranking +type: feature +issues: [] diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/rank/MockedRequestActionBasedRerankerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/rank/MockedRequestActionBasedRerankerIT.java index 32ce485db5727..0d6d17cbaeb1f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/rank/MockedRequestActionBasedRerankerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/rank/MockedRequestActionBasedRerankerIT.java @@ -65,15 +65,23 @@ public class MockedRequestActionBasedRerankerIT extends AbstractRerankerIT { private static final String inferenceId = "inference-id"; private static final String inferenceText = "inference-text"; + private static final float minScore = 0.0f; @Override protected RankBuilder getRankBuilder(int rankWindowSize, String rankFeatureField) { - return new MockRequestActionBasedRankBuilder(rankWindowSize, rankFeatureField, inferenceId, inferenceText); + return new MockRequestActionBasedRankBuilder(rankWindowSize, rankFeatureField, inferenceId, inferenceText, minScore); } @Override protected RankBuilder getThrowingRankBuilder(int rankWindowSize, String rankFeatureField, ThrowingRankBuilderType type) { - return new ThrowingMockRequestActionBasedRankBuilder(rankWindowSize, rankFeatureField, inferenceId, inferenceText, type.name()); + return new ThrowingMockRequestActionBasedRankBuilder( + rankWindowSize, + rankFeatureField, + inferenceId, + inferenceText, + minScore, + type.name() + ); } @Override @@ -237,7 +245,8 @@ public static class TestRerankingRankFeaturePhaseRankCoordinatorContext extends int windowSize, Client client, String inferenceId, - String inferenceText + String inferenceText, + float minScore ) { super(size, from, windowSize); this.client = client; @@ -288,6 +297,7 @@ public static class MockRequestActionBasedRankBuilder extends RankBuilder { public static final ParseField FIELD_FIELD = new ParseField("field"); public static final ParseField INFERENCE_ID = new ParseField("inference_id"); public static final ParseField INFERENCE_TEXT = new ParseField("inference_text"); + public static final ParseField MIN_SCORE_FIELD = new ParseField("min_score"); static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "request_action_based_rank", args -> { @@ -298,7 +308,8 @@ public static class MockRequestActionBasedRankBuilder extends RankBuilder { } final String inferenceId = (String) args[2]; final String inferenceText = (String) args[3]; - return new MockRequestActionBasedRankBuilder(rankWindowSize, field, inferenceId, inferenceText); + final float minScore = (float) args[4]; + return new MockRequestActionBasedRankBuilder(rankWindowSize, field, inferenceId, inferenceText, minScore); } ); @@ -312,6 +323,7 @@ public static class MockRequestActionBasedRankBuilder extends RankBuilder { protected final String field; protected final String inferenceId; protected final String inferenceText; + protected final float minScore; public static MockRequestActionBasedRankBuilder fromXContent(XContentParser parser) throws IOException { return PARSER.parse(parser, null); @@ -321,12 +333,14 @@ public MockRequestActionBasedRankBuilder( final int rankWindowSize, final String field, final String inferenceId, - final String inferenceText + final String inferenceText, + final float minScore ) { super(rankWindowSize); this.field = field; this.inferenceId = inferenceId; this.inferenceText = inferenceText; + this.minScore = minScore; } public MockRequestActionBasedRankBuilder(StreamInput in) throws IOException { @@ -334,6 +348,7 @@ public MockRequestActionBasedRankBuilder(StreamInput in) throws IOException { this.field = in.readString(); this.inferenceId = in.readString(); this.inferenceText = in.readString(); + this.minScore = in.readFloat(); } @Override @@ -341,6 +356,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeString(field); out.writeString(inferenceId); out.writeString(inferenceText); + out.writeFloat(minScore); } @Override @@ -348,6 +364,7 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep builder.field(FIELD_FIELD.getPreferredName(), field); builder.field(INFERENCE_ID.getPreferredName(), inferenceId); builder.field(INFERENCE_TEXT.getPreferredName(), inferenceText); + builder.field(MIN_SCORE_FIELD.getPreferredName(), minScore); } @Override @@ -383,7 +400,8 @@ public RankFeaturePhaseRankCoordinatorContext buildRankFeaturePhaseCoordinatorCo rankWindowSize(), client, inferenceId, - inferenceText + inferenceText, + minScore ); } @@ -425,8 +443,16 @@ public static class ThrowingMockRequestActionBasedRankBuilder extends MockReques } final String inferenceId = (String) args[2]; final String inferenceText = (String) args[3]; - String throwingType = (String) args[4]; - return new ThrowingMockRequestActionBasedRankBuilder(rankWindowSize, field, inferenceId, inferenceText, throwingType); + final float minScore = (float) args[4]; + String throwingType = (String) args[5]; + return new ThrowingMockRequestActionBasedRankBuilder( + rankWindowSize, + field, + inferenceId, + inferenceText, + minScore, + throwingType + ); } ); @@ -449,9 +475,10 @@ public ThrowingMockRequestActionBasedRankBuilder( final String field, final String inferenceId, final String inferenceText, + final float minScore, final String throwingType ) { - super(rankWindowSize, field, inferenceId, inferenceText); + super(rankWindowSize, field, inferenceId, inferenceText, minScore); this.throwingRankBuilderType = ThrowingRankBuilderType.valueOf(throwingType); } @@ -526,7 +553,8 @@ public RankFeaturePhaseRankCoordinatorContext buildRankFeaturePhaseCoordinatorCo rankWindowSize(), client, inferenceId, - inferenceText + inferenceText, + minScore ) { @Override protected TestRerankingActionRequest generateRequest(List docFeatures) { diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 9c9d16032ba95..ae70bb7cbf0a8 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -205,6 +205,7 @@ static TransportVersion def(int id) { public static final TransportVersion ESQL_ADD_AGGREGATE_TYPE = def(8_696_00_0); public static final TransportVersion SECURITY_MIGRATIONS_MIGRATION_NEEDED_ADDED = def(8_697_00_0); public static final TransportVersion K_FOR_KNN_QUERY_ADDED = def(8_698_00_0); + public static final TransportVersion TEXT_SIMILARITY_RERANKER_RETRIEVER = def(8_699_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/search/rank/context/RankFeaturePhaseRankCoordinatorContext.java b/server/src/main/java/org/elasticsearch/search/rank/context/RankFeaturePhaseRankCoordinatorContext.java index 915feaad6e339..02834f03f54ab 100644 --- a/server/src/main/java/org/elasticsearch/search/rank/context/RankFeaturePhaseRankCoordinatorContext.java +++ b/server/src/main/java/org/elasticsearch/search/rank/context/RankFeaturePhaseRankCoordinatorContext.java @@ -24,7 +24,8 @@ /** * {@code RankFeaturePhaseRankCoordinatorContext} is a base class that runs on the coordinating node and is responsible for retrieving - * {@code window_size} total results from all shards, rank them, and then produce a final paginated response of [from, from+size] results. + * {@code rank_window_size} total results from all shards, rank them, and then produce a final paginated response of [from, from+size] + * results. */ public abstract class RankFeaturePhaseRankCoordinatorContext { @@ -44,6 +45,16 @@ public RankFeaturePhaseRankCoordinatorContext(int size, int from, int rankWindow */ protected abstract void computeScores(RankFeatureDoc[] featureDocs, ActionListener scoreListener); + /** + * Preprocesses the provided documents: sorts them by score descending. + * @param originalDocs documents to process + */ + protected RankFeatureDoc[] preprocess(RankFeatureDoc[] originalDocs) { + return Arrays.stream(originalDocs) + .sorted(Comparator.comparing((RankFeatureDoc doc) -> doc.score).reversed()) + .toArray(RankFeatureDoc[]::new); + } + /** * This method is responsible for ranking the global results based on the provided rank feature results from each shard. *

    @@ -63,22 +74,28 @@ public void computeRankScoresForGlobalResults( RankFeatureDoc[] featureDocs = extractFeatureDocs(rankSearchResults); // generate the final `topResults` results, and pass them to fetch phase through the `rankListener` - computeScores(featureDocs, rankListener.delegateFailureAndWrap((listener, scores) -> { - for (int i = 0; i < featureDocs.length; i++) { - featureDocs[i].score = scores[i]; - } - listener.onResponse(featureDocs); - })); + if (featureDocs.length == 0) { + rankListener.onResponse(new RankFeatureDoc[0]); + } else { + computeScores(featureDocs, rankListener.delegateFailureAndWrap((listener, scores) -> { + for (int i = 0; i < featureDocs.length; i++) { + featureDocs[i].score = scores[i]; + } + listener.onResponse(featureDocs); + })); + } } /** - * Ranks the provided {@link RankFeatureDoc} array and paginates the results based on the `from` and `size` parameters. + * Ranks the provided {@link RankFeatureDoc} array and paginates the results based on the `from` and `size` parameters. Filters out + * documents that have a relevance score less than min_score. + * @param rankFeatureDocs documents to process */ public RankFeatureDoc[] rankAndPaginate(RankFeatureDoc[] rankFeatureDocs) { - Arrays.sort(rankFeatureDocs, Comparator.comparing((RankFeatureDoc doc) -> doc.score).reversed()); - RankFeatureDoc[] topResults = new RankFeatureDoc[Math.max(0, Math.min(size, rankFeatureDocs.length - from))]; + RankFeatureDoc[] sortedDocs = preprocess(rankFeatureDocs); + RankFeatureDoc[] topResults = new RankFeatureDoc[Math.max(0, Math.min(size, sortedDocs.length - from))]; for (int rank = 0; rank < topResults.length; ++rank) { - topResults[rank] = rankFeatureDocs[from + rank]; + topResults[rank] = sortedDocs[from + rank]; topResults[rank].rank = from + rank + 1; } return topResults; diff --git a/test/framework/src/main/java/org/elasticsearch/search/rank/rerank/AbstractRerankerIT.java b/test/framework/src/main/java/org/elasticsearch/search/rank/rerank/AbstractRerankerIT.java index ae5f0329390d0..13a045d8a4654 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/rank/rerank/AbstractRerankerIT.java +++ b/test/framework/src/main/java/org/elasticsearch/search/rank/rerank/AbstractRerankerIT.java @@ -42,7 +42,7 @@ @ESIntegTestCase.ClusterScope(minNumDataNodes = 3) public abstract class AbstractRerankerIT extends ESIntegTestCase { - protected enum ThrowingRankBuilderType { + public enum ThrowingRankBuilderType { THROWING_QUERY_PHASE_SHARD_CONTEXT, THROWING_QUERY_PHASE_COORDINATOR_CONTEXT, THROWING_RANK_FEATURE_PHASE_SHARD_CONTEXT, @@ -94,7 +94,7 @@ public void testRerankerNoExceptions() throws Exception { int rank = 1; for (SearchHit searchHit : response.getHits().getHits()) { assertThat(searchHit, hasId(String.valueOf(5 - (rank - 1)))); - assertEquals(searchHit.getScore(), (0.5f - ((rank - 1) * 0.1f)), 1e-5f); + assertEquals(0.5f - ((rank - 1) * 0.1f), searchHit.getScore(), 1e-5f); assertThat(searchHit, hasRank(rank)); assertNotNull(searchHit.getFields().get(searchField)); rank++; @@ -139,7 +139,7 @@ public void testRerankerPagination() throws Exception { int rank = 3; for (SearchHit searchHit : response.getHits().getHits()) { assertThat(searchHit, hasId(String.valueOf(5 - (rank - 1)))); - assertEquals(searchHit.getScore(), (0.5f - ((rank - 1) * 0.1f)), 1e-5f); + assertEquals(0.5f - ((rank - 1) * 0.1f), searchHit.getScore(), 1e-5f); assertThat(searchHit, hasRank(rank)); assertNotNull(searchHit.getFields().get(searchField)); rank++; @@ -221,7 +221,7 @@ public void testNotAllShardsArePresentInFetchPhase() throws Exception { int rank = 1; for (SearchHit searchHit : response.getHits().getHits()) { assertThat(searchHit, hasId(String.valueOf(5 - (rank - 1)))); - assertEquals(searchHit.getScore(), (0.5f - ((rank - 1) * 0.1f)), 1e-5f); + assertEquals(0.5f - ((rank - 1) * 0.1f), searchHit.getScore(), 1e-5f); assertThat(searchHit, hasRank(rank)); assertNotNull(searchHit.getFields().get(searchField)); rank++; diff --git a/x-pack/plugin/core/src/main/java/module-info.java b/x-pack/plugin/core/src/main/java/module-info.java index a37946200a47d..282072417875b 100644 --- a/x-pack/plugin/core/src/main/java/module-info.java +++ b/x-pack/plugin/core/src/main/java/module-info.java @@ -119,9 +119,11 @@ exports org.elasticsearch.xpack.core.ml.job.process.autodetect.state; exports org.elasticsearch.xpack.core.ml.job.results; exports org.elasticsearch.xpack.core.ml.job.snapshot.upgrade; + exports org.elasticsearch.xpack.core.ml.ltr; exports org.elasticsearch.xpack.core.ml.notifications; exports org.elasticsearch.xpack.core.ml.packageloader.action; exports org.elasticsearch.xpack.core.ml.process.writer; + exports org.elasticsearch.xpack.core.ml.search; exports org.elasticsearch.xpack.core.ml.stats; exports org.elasticsearch.xpack.core.ml.utils.time; exports org.elasticsearch.xpack.core.ml.utils; @@ -227,8 +229,6 @@ exports org.elasticsearch.xpack.core.watcher.trigger; exports org.elasticsearch.xpack.core.watcher.watch; exports org.elasticsearch.xpack.core.watcher; - exports org.elasticsearch.xpack.core.ml.ltr; - exports org.elasticsearch.xpack.core.ml.search; provides org.elasticsearch.action.admin.cluster.node.info.ComponentVersionNumber with diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java index 801ef2c463e95..4ed2e2a8e056c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java @@ -87,7 +87,7 @@ public final class XPackField { /** Name constant for the redact processor feature. */ public static final String REDACT_PROCESSOR = "redact_processor"; - /* Name for Universal Profiling. */ + /** Name for Universal Profiling. */ public static final String UNIVERSAL_PROFILING = "universal_profiling"; private XPackField() {} diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestInferenceServicePlugin.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestInferenceServicePlugin.java index 6460b06f13800..752472b90374b 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestInferenceServicePlugin.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestInferenceServicePlugin.java @@ -39,6 +39,11 @@ public List getNamedWriteables() { ServiceSettings.class, TestSparseInferenceServiceExtension.TestServiceSettings.NAME, TestSparseInferenceServiceExtension.TestServiceSettings::new + ), + new NamedWriteableRegistry.Entry( + ServiceSettings.class, + TestRerankingServiceExtension.TestServiceSettings.NAME, + TestRerankingServiceExtension.TestServiceSettings::new ) ); } diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestRerankingServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestRerankingServiceExtension.java new file mode 100644 index 0000000000000..b2f3b6f774a6f --- /dev/null +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestRerankingServiceExtension.java @@ -0,0 +1,195 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.mock; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.ChunkedInferenceServiceResults; +import org.elasticsearch.inference.ChunkingOptions; +import org.elasticsearch.inference.InferenceServiceExtension; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class TestRerankingServiceExtension implements InferenceServiceExtension { + @Override + public List getInferenceServiceFactories() { + return List.of(TestInferenceService::new); + } + + public static class TestRerankingModel extends Model { + public TestRerankingModel(String inferenceEntityId, TestServiceSettings serviceSettings) { + super( + new ModelConfigurations(inferenceEntityId, TaskType.RERANK, TestInferenceService.NAME, serviceSettings), + new ModelSecrets(new AbstractTestInferenceService.TestSecretSettings("api_key")) + ); + } + } + + public static class TestInferenceService extends AbstractTestInferenceService { + public static final String NAME = "test_reranking_service"; + + public TestInferenceService(InferenceServiceFactoryContext context) {} + + @Override + public String name() { + return NAME; + } + + @Override + @SuppressWarnings("unchecked") + public void parseRequestConfig( + String modelId, + TaskType taskType, + Map config, + Set platformArchitectures, + ActionListener parsedModelListener + ) { + var serviceSettingsMap = (Map) config.remove(ModelConfigurations.SERVICE_SETTINGS); + var serviceSettings = TestServiceSettings.fromMap(serviceSettingsMap); + var secretSettings = TestSecretSettings.fromMap(serviceSettingsMap); + + var taskSettingsMap = getTaskSettingsMap(config); + var taskSettings = TestTaskSettings.fromMap(taskSettingsMap); + + parsedModelListener.onResponse(new TestServiceModel(modelId, taskType, name(), serviceSettings, taskSettings, secretSettings)); + } + + @Override + public void infer( + Model model, + @Nullable String query, + List input, + Map taskSettings, + InputType inputType, + TimeValue timeout, + ActionListener listener + ) { + switch (model.getConfigurations().getTaskType()) { + case ANY, RERANK -> listener.onResponse(makeResults(input)); + default -> listener.onFailure( + new ElasticsearchStatusException( + TaskType.unsupportedTaskTypeErrorMsg(model.getConfigurations().getTaskType(), name()), + RestStatus.BAD_REQUEST + ) + ); + } + } + + @Override + public void chunkedInfer( + Model model, + @Nullable String query, + List input, + Map taskSettings, + InputType inputType, + ChunkingOptions chunkingOptions, + TimeValue timeout, + ActionListener> listener + ) { + listener.onFailure( + new ElasticsearchStatusException( + TaskType.unsupportedTaskTypeErrorMsg(model.getConfigurations().getTaskType(), name()), + RestStatus.BAD_REQUEST + ) + ); + } + + private RankedDocsResults makeResults(List input) { + List results = new ArrayList<>(); + int totalResults = input.size(); + float resultDiff = 0.2f; + for (int i = 0; i < input.size(); i++) { + results.add(new RankedDocsResults.RankedDoc(totalResults - 1 - i, resultDiff * (totalResults - i), input.get(i))); + } + return new RankedDocsResults(results); + } + + protected ServiceSettings getServiceSettingsFromMap(Map serviceSettingsMap) { + return TestServiceSettings.fromMap(serviceSettingsMap); + } + } + + public record TestServiceSettings(String model_id) implements ServiceSettings { + + static final String NAME = "test_reranking_service_settings"; + + public static TestServiceSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + String model = (String) map.remove("model_id"); + + if (model == null) { + validationException.addValidationError("missing model"); + } + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new TestServiceSettings(model); + } + + public TestServiceSettings(StreamInput in) throws IOException { + this(in.readString()); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("model_id", model_id); + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(model_id); + } + + @Override + public ToXContentObject getFilteredXContentObject() { + return (builder, params) -> { + builder.startObject(); + builder.field("model_id", model_id); + builder.endObject(); + return builder; + }; + } + } +} diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/resources/META-INF/services/org.elasticsearch.inference.InferenceServiceExtension b/x-pack/plugin/inference/qa/test-service-plugin/src/main/resources/META-INF/services/org.elasticsearch.inference.InferenceServiceExtension index c1908dc788251..690168b538fb9 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/resources/META-INF/services/org.elasticsearch.inference.InferenceServiceExtension +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/resources/META-INF/services/org.elasticsearch.inference.InferenceServiceExtension @@ -1,2 +1,3 @@ org.elasticsearch.xpack.inference.mock.TestSparseInferenceServiceExtension org.elasticsearch.xpack.inference.mock.TestDenseInferenceServiceExtension +org.elasticsearch.xpack.inference.mock.TestRerankingServiceExtension diff --git a/x-pack/plugin/inference/src/main/java/module-info.java b/x-pack/plugin/inference/src/main/java/module-info.java index 183d41bf730fe..aa907a236884a 100644 --- a/x-pack/plugin/inference/src/main/java/module-info.java +++ b/x-pack/plugin/inference/src/main/java/module-info.java @@ -28,4 +28,6 @@ exports org.elasticsearch.xpack.inference.rest; exports org.elasticsearch.xpack.inference.services; exports org.elasticsearch.xpack.inference; + + provides org.elasticsearch.features.FeatureSpecification with org.elasticsearch.xpack.inference.InferenceFeatures; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java new file mode 100644 index 0000000000000..4cc7f5b502ba9 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.features.FeatureSpecification; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.xpack.inference.rank.textsimilarity.TextSimilarityRankRetrieverBuilder; + +import java.util.Set; + +/** + * Provides inference features. + */ +public class InferenceFeatures implements FeatureSpecification { + + @Override + public Set getFeatures() { + return Set.of(TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_RETRIEVER_SUPPORTED); + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index c8fb7e94a19ab..8ab9f774898b6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -34,8 +34,10 @@ import org.elasticsearch.plugins.SystemIndexPlugin; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.search.rank.RankBuilder; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ScalingExecutorBuilder; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.core.inference.action.DeleteInferenceEndpointAction; @@ -59,6 +61,8 @@ import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper; import org.elasticsearch.xpack.inference.queries.SemanticQueryBuilder; +import org.elasticsearch.xpack.inference.rank.textsimilarity.TextSimilarityRankBuilder; +import org.elasticsearch.xpack.inference.rank.textsimilarity.TextSimilarityRankRetrieverBuilder; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.rest.RestDeleteInferenceEndpointAction; import org.elasticsearch.xpack.inference.rest.RestGetInferenceDiagnosticsAction; @@ -110,6 +114,7 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP public static final String NAME = "inference"; public static final String UTILITY_THREAD_POOL_NAME = "inference_utility"; + private final Settings settings; private final SetOnce httpFactory = new SetOnce<>(); private final SetOnce serviceComponents = new SetOnce<>(); @@ -210,8 +215,8 @@ public List getInferenceServiceFactories() { @Override public List getNamedWriteables() { - var entries = new ArrayList(); - entries.addAll(InferenceNamedWriteablesProvider.getNamedWriteables()); + var entries = new ArrayList<>(InferenceNamedWriteablesProvider.getNamedWriteables()); + entries.add(new NamedWriteableRegistry.Entry(RankBuilder.class, TextSimilarityRankBuilder.NAME, TextSimilarityRankBuilder::new)); return entries; } @@ -309,4 +314,11 @@ public List> getQueries() { } return List.of(); } + + @Override + public List> getRetrievers() { + return List.of( + new RetrieverSpec<>(new ParseField(TextSimilarityRankBuilder.NAME), TextSimilarityRankRetrieverBuilder::fromXContent) + ); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankBuilder.java new file mode 100644 index 0000000000000..6bc43a4309b0c --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankBuilder.java @@ -0,0 +1,215 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.rank.textsimilarity; + +import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.Query; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.license.License; +import org.elasticsearch.license.LicensedFeature; +import org.elasticsearch.search.rank.RankBuilder; +import org.elasticsearch.search.rank.RankDoc; +import org.elasticsearch.search.rank.context.QueryPhaseRankCoordinatorContext; +import org.elasticsearch.search.rank.context.QueryPhaseRankShardContext; +import org.elasticsearch.search.rank.context.RankFeaturePhaseRankCoordinatorContext; +import org.elasticsearch.search.rank.context.RankFeaturePhaseRankShardContext; +import org.elasticsearch.search.rank.feature.RankFeatureDoc; +import org.elasticsearch.search.rank.rerank.RerankingQueryPhaseRankCoordinatorContext; +import org.elasticsearch.search.rank.rerank.RerankingQueryPhaseRankShardContext; +import org.elasticsearch.search.rank.rerank.RerankingRankFeaturePhaseRankShardContext; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static org.elasticsearch.xpack.inference.rank.textsimilarity.TextSimilarityRankRetrieverBuilder.FIELD_FIELD; +import static org.elasticsearch.xpack.inference.rank.textsimilarity.TextSimilarityRankRetrieverBuilder.INFERENCE_ID_FIELD; +import static org.elasticsearch.xpack.inference.rank.textsimilarity.TextSimilarityRankRetrieverBuilder.INFERENCE_TEXT_FIELD; +import static org.elasticsearch.xpack.inference.rank.textsimilarity.TextSimilarityRankRetrieverBuilder.MIN_SCORE_FIELD; + +/** + * A {@code RankBuilder} that enables ranking with text similarity model inference. Supports parameters for configuring the inference call. + */ +public class TextSimilarityRankBuilder extends RankBuilder { + + public static final String NAME = "text_similarity_reranker"; + + public static final LicensedFeature.Momentary TEXT_SIMILARITY_RERANKER_FEATURE = LicensedFeature.momentary( + null, + "text-similarity-reranker", + License.OperationMode.ENTERPRISE + ); + + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, args -> { + String inferenceId = (String) args[0]; + String inferenceText = (String) args[1]; + String field = (String) args[2]; + Integer rankWindowSize = args[3] == null ? DEFAULT_RANK_WINDOW_SIZE : (Integer) args[3]; + Float minScore = (Float) args[4]; + + return new TextSimilarityRankBuilder(field, inferenceId, inferenceText, rankWindowSize, minScore); + }); + + static { + PARSER.declareString(constructorArg(), INFERENCE_ID_FIELD); + PARSER.declareString(constructorArg(), INFERENCE_TEXT_FIELD); + PARSER.declareString(constructorArg(), FIELD_FIELD); + PARSER.declareInt(optionalConstructorArg(), RANK_WINDOW_SIZE_FIELD); + PARSER.declareFloat(optionalConstructorArg(), MIN_SCORE_FIELD); + } + + private final String inferenceId; + private final String inferenceText; + private final String field; + private final Float minScore; + + public TextSimilarityRankBuilder(String field, String inferenceId, String inferenceText, int rankWindowSize, Float minScore) { + super(rankWindowSize); + this.inferenceId = inferenceId; + this.inferenceText = inferenceText; + this.field = field; + this.minScore = minScore; + } + + public TextSimilarityRankBuilder(StreamInput in) throws IOException { + super(in); + // rankWindowSize deserialization is handled by the parent class RankBuilder + this.inferenceId = in.readString(); + this.inferenceText = in.readString(); + this.field = in.readString(); + this.minScore = in.readOptionalFloat(); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.TEXT_SIMILARITY_RERANKER_RETRIEVER; + } + + @Override + public void doWriteTo(StreamOutput out) throws IOException { + // rankWindowSize serialization is handled by the parent class RankBuilder + out.writeString(inferenceId); + out.writeString(inferenceText); + out.writeString(field); + out.writeOptionalFloat(minScore); + } + + @Override + public void doXContent(XContentBuilder builder, Params params) throws IOException { + // rankWindowSize serialization is handled by the parent class RankBuilder + builder.field(INFERENCE_ID_FIELD.getPreferredName(), inferenceId); + builder.field(INFERENCE_TEXT_FIELD.getPreferredName(), inferenceText); + builder.field(FIELD_FIELD.getPreferredName(), field); + if (minScore != null) { + builder.field(MIN_SCORE_FIELD.getPreferredName(), minScore); + } + } + + @Override + public boolean isCompoundBuilder() { + return false; + } + + @Override + public Explanation explainHit(Explanation baseExplanation, RankDoc scoreDoc, List queryNames) { + if (scoreDoc == null) { + return baseExplanation; + } + if (false == baseExplanation.isMatch()) { + return baseExplanation; + } + + assert scoreDoc instanceof RankFeatureDoc : "ScoreDoc is not an instance of RankFeatureDoc"; + RankFeatureDoc rrfRankDoc = (RankFeatureDoc) scoreDoc; + + return Explanation.match( + rrfRankDoc.score, + "rank after reranking: [" + + rrfRankDoc.rank + + "] with score: [" + + rrfRankDoc.score + + "], using inference endpoint: [" + + inferenceId + + "] on document field: [" + + field + + "]", + baseExplanation + ); + } + + @Override + public QueryPhaseRankShardContext buildQueryPhaseShardContext(List queries, int from) { + return new RerankingQueryPhaseRankShardContext(queries, rankWindowSize()); + } + + @Override + public QueryPhaseRankCoordinatorContext buildQueryPhaseCoordinatorContext(int size, int from) { + return new RerankingQueryPhaseRankCoordinatorContext(rankWindowSize()); + } + + @Override + public RankFeaturePhaseRankShardContext buildRankFeaturePhaseShardContext() { + return new RerankingRankFeaturePhaseRankShardContext(field); + } + + @Override + public RankFeaturePhaseRankCoordinatorContext buildRankFeaturePhaseCoordinatorContext(int size, int from, Client client) { + return new TextSimilarityRankFeaturePhaseRankCoordinatorContext( + size, + from, + rankWindowSize(), + client, + inferenceId, + inferenceText, + minScore + ); + } + + public String field() { + return field; + } + + public String inferenceId() { + return inferenceId; + } + + public String inferenceText() { + return inferenceText; + } + + public Float minScore() { + return minScore; + } + + @Override + protected boolean doEquals(RankBuilder other) { + TextSimilarityRankBuilder that = (TextSimilarityRankBuilder) other; + return Objects.equals(inferenceId, that.inferenceId) + && Objects.equals(inferenceText, that.inferenceText) + && Objects.equals(field, that.field) + && Objects.equals(minScore, that.minScore); + } + + @Override + protected int doHashCode() { + return Objects.hash(inferenceId, inferenceText, field, minScore); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankFeaturePhaseRankCoordinatorContext.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankFeaturePhaseRankCoordinatorContext.java new file mode 100644 index 0000000000000..a22126439e9e2 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankFeaturePhaseRankCoordinatorContext.java @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.rank.textsimilarity; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.search.rank.context.RankFeaturePhaseRankCoordinatorContext; +import org.elasticsearch.search.rank.feature.RankFeatureDoc; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; + +import java.util.Arrays; +import java.util.Comparator; +import java.util.List; +import java.util.Map; + +/** + * A {@code RankFeaturePhaseRankCoordinatorContext} that performs a rerank inference call to determine relevance scores for documents within + * the provided rank window. + */ +public class TextSimilarityRankFeaturePhaseRankCoordinatorContext extends RankFeaturePhaseRankCoordinatorContext { + + protected final Client client; + protected final String inferenceId; + protected final String inferenceText; + protected final Float minScore; + + public TextSimilarityRankFeaturePhaseRankCoordinatorContext( + int size, + int from, + int rankWindowSize, + Client client, + String inferenceId, + String inferenceText, + Float minScore + ) { + super(size, from, rankWindowSize); + this.client = client; + this.inferenceId = inferenceId; + this.inferenceText = inferenceText; + this.minScore = minScore; + } + + @Override + protected void computeScores(RankFeatureDoc[] featureDocs, ActionListener scoreListener) { + // Wrap the provided rankListener to an ActionListener that would handle the response from the inference service + // and then pass the results + final ActionListener actionListener = scoreListener.delegateFailureAndWrap((l, r) -> { + float[] scores = extractScoresFromResponse(r); + if (scores.length != featureDocs.length) { + l.onFailure( + new IllegalStateException("Document and score count mismatch: [" + featureDocs.length + "] vs [" + scores.length + "]") + ); + } else { + l.onResponse(scores); + } + }); + + List featureData = Arrays.stream(featureDocs).map(x -> x.featureData).toList(); + InferenceAction.Request request = generateRequest(featureData); + try { + client.execute(InferenceAction.INSTANCE, request, actionListener); + } finally { + request.decRef(); + } + } + + protected InferenceAction.Request generateRequest(List docFeatures) { + return new InferenceAction.Request( + TaskType.RERANK, + inferenceId, + inferenceText, + docFeatures, + Map.of(), + InputType.SEARCH, + InferenceAction.Request.DEFAULT_TIMEOUT + ); + } + + private float[] extractScoresFromResponse(InferenceAction.Response response) { + InferenceServiceResults results = response.getResults(); + assert results instanceof RankedDocsResults; + + List rankedDocs = ((RankedDocsResults) results).getRankedDocs(); + float[] scores = new float[rankedDocs.size()]; + for (RankedDocsResults.RankedDoc rankedDoc : rankedDocs) { + scores[rankedDoc.index()] = rankedDoc.relevanceScore(); + } + + return scores; + } + + /** + * Sorts documents by score descending and discards those with a score less than minScore. + * @param originalDocs documents to process + */ + @Override + protected RankFeatureDoc[] preprocess(RankFeatureDoc[] originalDocs) { + return Arrays.stream(originalDocs) + .filter(doc -> minScore == null || doc.score >= minScore) + .sorted(Comparator.comparing((RankFeatureDoc doc) -> doc.score).reversed()) + .toArray(RankFeatureDoc[]::new); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java new file mode 100644 index 0000000000000..a81fbb51f678d --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java @@ -0,0 +1,155 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.rank.textsimilarity; + +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.license.LicenseUtils; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.retriever.RetrieverBuilder; +import org.elasticsearch.search.retriever.RetrieverParserContext; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.XPackPlugin; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.search.rank.RankBuilder.DEFAULT_RANK_WINDOW_SIZE; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +/** + * A {@code RetrieverBuilder} for parsing and constructing a text similarity reranker retriever. + */ +public class TextSimilarityRankRetrieverBuilder extends RetrieverBuilder { + + public static final NodeFeature TEXT_SIMILARITY_RERANKER_RETRIEVER_SUPPORTED = new NodeFeature( + "text_similarity_reranker_retriever_supported" + ); + + public static final ParseField RETRIEVER_FIELD = new ParseField("retriever"); + public static final ParseField INFERENCE_ID_FIELD = new ParseField("inference_id"); + public static final ParseField INFERENCE_TEXT_FIELD = new ParseField("inference_text"); + public static final ParseField FIELD_FIELD = new ParseField("field"); + public static final ParseField RANK_WINDOW_SIZE_FIELD = new ParseField("rank_window_size"); + public static final ParseField MIN_SCORE_FIELD = new ParseField("min_score"); + + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>(TextSimilarityRankBuilder.NAME, args -> { + RetrieverBuilder retrieverBuilder = (RetrieverBuilder) args[0]; + String inferenceId = (String) args[1]; + String inferenceText = (String) args[2]; + String field = (String) args[3]; + int rankWindowSize = args[4] == null ? DEFAULT_RANK_WINDOW_SIZE : (int) args[4]; + Float minScore = (Float) args[5]; + + return new TextSimilarityRankRetrieverBuilder(retrieverBuilder, inferenceId, inferenceText, field, rankWindowSize, minScore); + }); + + static { + PARSER.declareNamedObject(constructorArg(), (p, c, n) -> p.namedObject(RetrieverBuilder.class, n, c), RETRIEVER_FIELD); + PARSER.declareString(constructorArg(), INFERENCE_ID_FIELD); + PARSER.declareString(constructorArg(), INFERENCE_TEXT_FIELD); + PARSER.declareString(constructorArg(), FIELD_FIELD); + PARSER.declareInt(optionalConstructorArg(), RANK_WINDOW_SIZE_FIELD); + PARSER.declareFloat(optionalConstructorArg(), MIN_SCORE_FIELD); + + RetrieverBuilder.declareBaseParserFields(TextSimilarityRankBuilder.NAME, PARSER); + } + + public static TextSimilarityRankRetrieverBuilder fromXContent(XContentParser parser, RetrieverParserContext context) + throws IOException { + if (context.clusterSupportsFeature(TEXT_SIMILARITY_RERANKER_RETRIEVER_SUPPORTED) == false) { + throw new ParsingException(parser.getTokenLocation(), "unknown retriever [" + TextSimilarityRankBuilder.NAME + "]"); + } + if (TextSimilarityRankBuilder.TEXT_SIMILARITY_RERANKER_FEATURE.check(XPackPlugin.getSharedLicenseState()) == false) { + throw LicenseUtils.newComplianceException(TextSimilarityRankBuilder.NAME); + } + return PARSER.apply(parser, context); + } + + private final RetrieverBuilder retrieverBuilder; + private final String inferenceId; + private final String inferenceText; + private final String field; + private final int rankWindowSize; + private final Float minScore; + + public TextSimilarityRankRetrieverBuilder( + RetrieverBuilder retrieverBuilder, + String inferenceId, + String inferenceText, + String field, + int rankWindowSize, + Float minScore + ) { + this.retrieverBuilder = retrieverBuilder; + this.inferenceId = inferenceId; + this.inferenceText = inferenceText; + this.field = field; + this.rankWindowSize = rankWindowSize; + this.minScore = minScore; + } + + @Override + public void extractToSearchSourceBuilder(SearchSourceBuilder searchSourceBuilder, boolean compoundUsed) { + retrieverBuilder.extractToSearchSourceBuilder(searchSourceBuilder, compoundUsed); + + // Combining with other rank builder (such as RRF) is not supported yet + if (searchSourceBuilder.rankBuilder() != null) { + throw new IllegalArgumentException("text similarity rank builder cannot be combined with other rank builders"); + } + + searchSourceBuilder.rankBuilder( + new TextSimilarityRankBuilder(this.field, this.inferenceId, this.inferenceText, this.rankWindowSize, this.minScore) + ); + } + + @Override + public String getName() { + return TextSimilarityRankBuilder.NAME; + } + + public int rankWindowSize() { + return rankWindowSize; + } + + @Override + protected void doToXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(RETRIEVER_FIELD.getPreferredName()); + builder.startObject(); + builder.field(retrieverBuilder.getName(), retrieverBuilder); + builder.endObject(); + builder.field(INFERENCE_ID_FIELD.getPreferredName(), inferenceId); + builder.field(INFERENCE_TEXT_FIELD.getPreferredName(), inferenceText); + builder.field(FIELD_FIELD.getPreferredName(), field); + builder.field(RANK_WINDOW_SIZE_FIELD.getPreferredName(), rankWindowSize); + if (minScore != null) { + builder.field(MIN_SCORE_FIELD.getPreferredName(), minScore); + } + } + + @Override + protected boolean doEquals(Object other) { + TextSimilarityRankRetrieverBuilder that = (TextSimilarityRankRetrieverBuilder) other; + return Objects.equals(retrieverBuilder, that.retrieverBuilder) + && Objects.equals(inferenceId, that.inferenceId) + && Objects.equals(inferenceText, that.inferenceText) + && Objects.equals(field, that.field) + && Objects.equals(rankWindowSize, that.rankWindowSize) + && Objects.equals(minScore, that.minScore); + } + + @Override + protected int doHashCode() { + return Objects.hash(retrieverBuilder, inferenceId, inferenceText, field, rankWindowSize, minScore); + } +} diff --git a/x-pack/plugin/inference/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/x-pack/plugin/inference/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification new file mode 100644 index 0000000000000..f3e40336744d3 --- /dev/null +++ b/x-pack/plugin/inference/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification @@ -0,0 +1,8 @@ +# +# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +# or more contributor license agreements. Licensed under the Elastic License +# 2.0; you may not use this file except in compliance with the Elastic License +# 2.0. +# + +org.elasticsearch.xpack.inference.InferenceFeatures diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankBuilderTests.java new file mode 100644 index 0000000000000..9ea28242f3605 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankBuilderTests.java @@ -0,0 +1,94 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.rank.textsimilarity; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.json.JsonXContent; + +import java.io.IOException; + +import static org.elasticsearch.search.rank.RankBuilder.DEFAULT_RANK_WINDOW_SIZE; + +public class TextSimilarityRankBuilderTests extends AbstractXContentSerializingTestCase { + + @Override + protected TextSimilarityRankBuilder createTestInstance() { + return new TextSimilarityRankBuilder( + "my-field", + "my-inference-id", + "my-inference-text", + randomIntBetween(1, 1000), + randomBoolean() ? null : randomFloat() + ); + } + + @Override + protected TextSimilarityRankBuilder mutateInstance(TextSimilarityRankBuilder instance) throws IOException { + String field = instance.field(); + String inferenceId = instance.inferenceId(); + String inferenceText = instance.inferenceText(); + int rankWindowSize = instance.rankWindowSize(); + Float minScore = instance.minScore(); + + int mutate = randomIntBetween(0, 4); + switch (mutate) { + case 0 -> field = field + randomAlphaOfLength(2); + case 1 -> inferenceId = inferenceId + randomAlphaOfLength(2); + case 2 -> inferenceText = inferenceText + randomAlphaOfLength(2); + case 3 -> rankWindowSize = randomValueOtherThan(instance.rankWindowSize(), this::randomRankWindowSize); + case 4 -> minScore = randomValueOtherThan(instance.minScore(), this::randomMinScore); + default -> throw new IllegalStateException("Requested to modify more than available parameters."); + } + return new TextSimilarityRankBuilder(field, inferenceId, inferenceText, rankWindowSize, minScore); + } + + @Override + protected Writeable.Reader instanceReader() { + return TextSimilarityRankBuilder::new; + } + + @Override + protected TextSimilarityRankBuilder doParseInstance(XContentParser parser) throws IOException { + parser.nextToken(); + assertEquals(parser.currentToken(), XContentParser.Token.START_OBJECT); + parser.nextToken(); + assertEquals(parser.currentToken(), XContentParser.Token.FIELD_NAME); + assertEquals(parser.currentName(), TextSimilarityRankBuilder.NAME); + TextSimilarityRankBuilder builder = TextSimilarityRankBuilder.PARSER.parse(parser, null); + parser.nextToken(); + assertEquals(parser.currentToken(), XContentParser.Token.END_OBJECT); + parser.nextToken(); + assertNull(parser.currentToken()); + return builder; + } + + private int randomRankWindowSize() { + return randomIntBetween(0, 1000); + } + + private float randomMinScore() { + return randomFloatBetween(-1.0f, 1.0f, true); + } + + public void testParserDefaults() throws IOException { + String json = """ + { + "field": "my-field", + "inference_id": "my-inference-id", + "inference_text": "my-inference-text" + }"""; + + try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) { + TextSimilarityRankBuilder parsed = TextSimilarityRankBuilder.PARSER.parse(parser, null); + assertEquals(DEFAULT_RANK_WINDOW_SIZE, parsed.rankWindowSize()); + } + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankFeaturePhaseRankCoordinatorContextTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankFeaturePhaseRankCoordinatorContextTests.java new file mode 100644 index 0000000000000..50d91a2271de6 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankFeaturePhaseRankCoordinatorContextTests.java @@ -0,0 +1,65 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.rank.textsimilarity; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.search.rank.feature.RankFeatureDoc; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.argThat; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + +public class TextSimilarityRankFeaturePhaseRankCoordinatorContextTests extends ESTestCase { + + private final Client mockClient = mock(Client.class); + + TextSimilarityRankFeaturePhaseRankCoordinatorContext subject = new TextSimilarityRankFeaturePhaseRankCoordinatorContext( + 10, + 0, + 100, + mockClient, + "my-inference-id", + "some query", + 0.0f + ); + + public void testComputeScores() { + RankFeatureDoc featureDoc1 = new RankFeatureDoc(0, 1.0f, 0); + featureDoc1.featureData("text 1"); + RankFeatureDoc featureDoc2 = new RankFeatureDoc(1, 3.0f, 1); + featureDoc2.featureData("text 2"); + RankFeatureDoc featureDoc3 = new RankFeatureDoc(2, 2.0f, 0); + featureDoc3.featureData("text 3"); + RankFeatureDoc[] featureDocs = new RankFeatureDoc[] { featureDoc1, featureDoc2, featureDoc3 }; + + subject.computeScores(featureDocs, new ActionListener<>() { + @Override + public void onResponse(float[] floats) { + assertArrayEquals(new float[] { 1.0f, 3.0f, 2.0f }, floats, 0.0f); + } + + @Override + public void onFailure(Exception e) { + fail(); + } + }); + + verify(mockClient).execute( + eq(InferenceAction.INSTANCE), + argThat(actionRequest -> ((InferenceAction.Request) actionRequest).getTaskType().equals(TaskType.RERANK)), + any() + ); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java new file mode 100644 index 0000000000000..a3605aade1fa1 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.rank.textsimilarity; + +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.rank.RankBuilder; +import org.elasticsearch.search.rank.rerank.AbstractRerankerIT; +import org.elasticsearch.xpack.inference.InferencePlugin; + +import java.util.Collection; +import java.util.List; + +public class TextSimilarityRankMultiNodeTests extends AbstractRerankerIT { + + private static final String inferenceId = "inference-id"; + private static final String inferenceText = "inference-text"; + private static final float minScore = 0.0f; + + @Override + protected RankBuilder getRankBuilder(int rankWindowSize, String rankFeatureField) { + return new TextSimilarityRankBuilder(rankFeatureField, inferenceId, inferenceText, rankWindowSize, minScore); + } + + @Override + protected RankBuilder getThrowingRankBuilder(int rankWindowSize, String rankFeatureField, ThrowingRankBuilderType type) { + return new TextSimilarityTestPlugin.ThrowingMockRequestActionBasedRankBuilder( + rankWindowSize, + rankFeatureField, + inferenceId, + inferenceText, + minScore, + type.name() + ); + } + + @Override + protected Collection> pluginsNeeded() { + return List.of(InferencePlugin.class, TextSimilarityTestPlugin.class); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilderTests.java new file mode 100644 index 0000000000000..51f240be6fbeb --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilderTests.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.rank.textsimilarity; + +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.search.retriever.RetrieverBuilder; +import org.elasticsearch.search.retriever.RetrieverParserContext; +import org.elasticsearch.search.retriever.TestRetrieverBuilder; +import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.usage.SearchUsage; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import static org.elasticsearch.search.rank.RankBuilder.DEFAULT_RANK_WINDOW_SIZE; + +public class TextSimilarityRankRetrieverBuilderTests extends AbstractXContentTestCase { + + /** + * Creates a random {@link TextSimilarityRankRetrieverBuilder}. The created instance + * is not guaranteed to pass {@link SearchRequest} validation. This is purely + * for x-content testing. + */ + public static TextSimilarityRankRetrieverBuilder createRandomTextSimilarityRankRetrieverBuilder() { + return new TextSimilarityRankRetrieverBuilder( + TestRetrieverBuilder.createRandomTestRetrieverBuilder(), + randomAlphaOfLength(10), + randomAlphaOfLength(20), + randomAlphaOfLength(50), + randomIntBetween(1, 10000), + randomBoolean() ? null : randomFloatBetween(-1.0f, 1.0f, true) + ); + } + + @Override + protected TextSimilarityRankRetrieverBuilder createTestInstance() { + return createRandomTextSimilarityRankRetrieverBuilder(); + } + + @Override + protected TextSimilarityRankRetrieverBuilder doParseInstance(XContentParser parser) { + return TextSimilarityRankRetrieverBuilder.PARSER.apply( + parser, + new RetrieverParserContext( + new SearchUsage(), + nf -> nf == RetrieverBuilder.RETRIEVERS_SUPPORTED + || nf == TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_RETRIEVER_SUPPORTED + ) + ); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + List entries = new ArrayList<>(); + entries.add( + new NamedXContentRegistry.Entry( + RetrieverBuilder.class, + TestRetrieverBuilder.TEST_SPEC.getName(), + (p, c) -> TestRetrieverBuilder.TEST_SPEC.getParser().fromXContent(p, (RetrieverParserContext) c), + TestRetrieverBuilder.TEST_SPEC.getName().getForRestApiVersion() + ) + ); + entries.add( + new NamedXContentRegistry.Entry( + RetrieverBuilder.class, + new ParseField(TextSimilarityRankBuilder.NAME), + (p, c) -> TextSimilarityRankRetrieverBuilder.PARSER.apply(p, (RetrieverParserContext) c) + ) + ); + return new NamedXContentRegistry(entries); + } + + public void testParserDefaults() throws IOException { + String json = """ + { + "retriever": { + "test": { + "value": "my-test-retriever" + } + }, + "field": "my-field", + "inference_id": "my-inference-id", + "inference_text": "my-inference-text" + }"""; + + try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) { + TextSimilarityRankRetrieverBuilder parsed = TextSimilarityRankRetrieverBuilder.PARSER.parse(parser, null); + assertEquals(DEFAULT_RANK_WINDOW_SIZE, parsed.rankWindowSize()); + } + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java new file mode 100644 index 0000000000000..7fbfe70dbcfe7 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java @@ -0,0 +1,173 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.rank.textsimilarity; + +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.rank.context.RankFeaturePhaseRankCoordinatorContext; +import org.elasticsearch.search.rank.rerank.AbstractRerankerIT; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.inference.InferencePlugin; +import org.junit.Before; + +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import static org.hamcrest.Matchers.containsString; + +public class TextSimilarityRankTests extends ESSingleNodeTestCase { + + /** + * {@code TextSimilarityRankBuilder} that simulates an inference call that returns a different number of results as the input. + */ + public static class InvalidInferenceResultCountProvidingTextSimilarityRankBuilder extends TextSimilarityRankBuilder { + + public InvalidInferenceResultCountProvidingTextSimilarityRankBuilder( + String field, + String inferenceId, + String inferenceText, + int rankWindowSize, + Float minScore + ) { + super(field, inferenceId, inferenceText, rankWindowSize, minScore); + } + + @Override + public RankFeaturePhaseRankCoordinatorContext buildRankFeaturePhaseCoordinatorContext(int size, int from, Client client) { + return new TextSimilarityRankFeaturePhaseRankCoordinatorContext( + size, + from, + rankWindowSize(), + client, + inferenceId, + inferenceText, + minScore + ) { + @Override + protected InferenceAction.Request generateRequest(List docFeatures) { + return new InferenceAction.Request( + TaskType.RERANK, + inferenceId, + inferenceText, + docFeatures, + Map.of("invalidInferenceResultCount", true), + InputType.SEARCH, + InferenceAction.Request.DEFAULT_TIMEOUT + ); + } + }; + } + } + + private static final String inferenceId = "inference-id"; + private static final String inferenceText = "inference-text"; + private static final float minScore = 0.0f; + + private Client client; + + @Override + protected Collection> getPlugins() { + return List.of(InferencePlugin.class, TextSimilarityTestPlugin.class); + } + + @Before + public void setup() { + // Initialize index with a few documents + client = client(); + for (int i = 0; i < 5; i++) { + client.prepareIndex("my-index").setId(String.valueOf(i)).setSource(Collections.singletonMap("text", String.valueOf(i))).get(); + } + client.admin().indices().prepareRefresh("my-index").get(); + } + + public void testRerank() { + ElasticsearchAssertions.assertNoFailuresAndResponse( + // Execute search with text similarity reranking + client.prepareSearch() + .setRankBuilder(new TextSimilarityRankBuilder("text", "my-rerank-model", "my query", 100, 0.0f)) + .setQuery(QueryBuilders.matchAllQuery()), + response -> { + // Verify order, rank and score of results + SearchHit[] hits = response.getHits().getHits(); + assertEquals(5, hits.length); + assertHitHasRankScoreAndText(hits[0], 1, 4.0f, "4"); + assertHitHasRankScoreAndText(hits[1], 2, 3.0f, "3"); + assertHitHasRankScoreAndText(hits[2], 3, 2.0f, "2"); + assertHitHasRankScoreAndText(hits[3], 4, 1.0f, "1"); + assertHitHasRankScoreAndText(hits[4], 5, 0.0f, "0"); + } + ); + } + + public void testRerankWithMinScore() { + ElasticsearchAssertions.assertNoFailuresAndResponse( + // Execute search with text similarity reranking + client.prepareSearch() + .setRankBuilder(new TextSimilarityRankBuilder("text", "my-rerank-model", "my query", 100, 1.5f)) + .setQuery(QueryBuilders.matchAllQuery()), + response -> { + // Verify order, rank and score of results + SearchHit[] hits = response.getHits().getHits(); + assertEquals(3, hits.length); + assertHitHasRankScoreAndText(hits[0], 1, 4.0f, "4"); + assertHitHasRankScoreAndText(hits[1], 2, 3.0f, "3"); + assertHitHasRankScoreAndText(hits[2], 3, 2.0f, "2"); + } + ); + } + + public void testRerankInferenceFailure() { + ElasticsearchAssertions.assertFailures( + // Execute search with text similarity reranking + client.prepareSearch() + .setRankBuilder( + new TextSimilarityTestPlugin.ThrowingMockRequestActionBasedRankBuilder( + 100, + "text", + "my-rerank-model", + "my query", + 0.7f, + AbstractRerankerIT.ThrowingRankBuilderType.THROWING_RANK_FEATURE_PHASE_COORDINATOR_CONTEXT.name() + ) + ) + .setQuery(QueryBuilders.matchAllQuery()), + RestStatus.INTERNAL_SERVER_ERROR, + containsString("Failed to execute phase [rank-feature], Computing updated ranks for results failed") + ); + } + + public void testRerankInferenceResultMismatch() { + ElasticsearchAssertions.assertFailures( + // Execute search with text similarity reranking + client.prepareSearch() + .setRankBuilder( + new InvalidInferenceResultCountProvidingTextSimilarityRankBuilder("text", "my-rerank-model", "my query", 100, 1.5f) + ) + .setQuery(QueryBuilders.matchAllQuery()), + RestStatus.INTERNAL_SERVER_ERROR, + containsString("Failed to execute phase [rank-feature], Computing updated ranks for results failed") + ); + } + + private static void assertHitHasRankScoreAndText(SearchHit hit, int expectedRank, float expectedScore, String expectedText) { + assertEquals(expectedRank, hit.getRank()); + assertEquals(expectedScore, hit.getScore(), 0.0f); + assertEquals(expectedText, Objects.requireNonNull(hit.getSourceAsMap()).get("text")); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityTestPlugin.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityTestPlugin.java new file mode 100644 index 0000000000000..1e457a1a27c92 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityTestPlugin.java @@ -0,0 +1,287 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.rank.textsimilarity; + +import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.TopDocs; +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.search.SearchPhaseController; +import org.elasticsearch.action.support.ActionFilter; +import org.elasticsearch.action.support.ActionFilterChain; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.query.QuerySearchResult; +import org.elasticsearch.search.rank.RankBuilder; +import org.elasticsearch.search.rank.RankShardResult; +import org.elasticsearch.search.rank.context.QueryPhaseRankCoordinatorContext; +import org.elasticsearch.search.rank.context.QueryPhaseRankShardContext; +import org.elasticsearch.search.rank.context.RankFeaturePhaseRankCoordinatorContext; +import org.elasticsearch.search.rank.context.RankFeaturePhaseRankShardContext; +import org.elasticsearch.search.rank.rerank.AbstractRerankerIT; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import static java.util.Collections.singletonList; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +/** + * Plugin for text similarity tests. Defines a filter for modifying inference call behavior, as well as a {@code TextSimilarityRankBuilder} + * implementation that can be configured to throw an exception at various stages of processing. + */ +public class TextSimilarityTestPlugin extends Plugin implements ActionPlugin { + + private static final String inferenceId = "inference-id"; + private static final String inferenceText = "inference-text"; + private static final float minScore = 0.0f; + + private final SetOnce testFilter = new SetOnce<>(); + + @Override + public Collection createComponents(PluginServices services) { + testFilter.set(new TestFilter()); + return Collections.emptyList(); + } + + @Override + public List getActionFilters() { + return singletonList(testFilter.get()); + } + + private static final String THROWING_REQUEST_ACTION_BASED_RANK_BUILDER_NAME = "throwing_request_action_based_rank"; + + @Override + public List getNamedWriteables() { + return List.of( + new NamedWriteableRegistry.Entry( + RankBuilder.class, + THROWING_REQUEST_ACTION_BASED_RANK_BUILDER_NAME, + ThrowingMockRequestActionBasedRankBuilder::new + ) + ); + } + + /** + * Action filter that captures the inference action and injects a mock response. + */ + static class TestFilter implements ActionFilter { + + @Override + public int order() { + return Integer.MIN_VALUE; + } + + @Override + @SuppressWarnings("unchecked") + public void apply( + Task task, + String action, + Request request, + ActionListener listener, + ActionFilterChain chain + ) { + // For any other action than inference, execute normally + if (action.equals(InferenceAction.INSTANCE.name()) == false) { + chain.proceed(task, action, request, listener); + return; + } + + assert request instanceof InferenceAction.Request; + boolean shouldThrow = (boolean) ((InferenceAction.Request) request).getTaskSettings().getOrDefault("throwing", false); + boolean hasInvalidInferenceResultCount = (boolean) ((InferenceAction.Request) request).getTaskSettings() + .getOrDefault("invalidInferenceResultCount", false); + + if (shouldThrow) { + listener.onFailure(new UnsupportedOperationException("simulated failure")); + } else { + List rankedDocsResults = new ArrayList<>(); + List inputs = ((InferenceAction.Request) request).getInput(); + int resultCount = hasInvalidInferenceResultCount ? inputs.size() - 1 : inputs.size(); + for (int i = 0; i < resultCount; i++) { + rankedDocsResults.add(new RankedDocsResults.RankedDoc(i, Float.parseFloat(inputs.get(i)), inputs.get(i))); + } + ActionResponse response = new InferenceAction.Response(new RankedDocsResults(rankedDocsResults)); + listener.onResponse((Response) response); + } + } + } + + public static class ThrowingMockRequestActionBasedRankBuilder extends TextSimilarityRankBuilder { + + public static final ParseField FIELD_FIELD = new ParseField("field"); + public static final ParseField INFERENCE_ID = new ParseField("inference_id"); + public static final ParseField INFERENCE_TEXT = new ParseField("inference_text"); + public static final ParseField THROWING_TYPE_FIELD = new ParseField("throwing-type"); + + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "throwing_request_action_based_rank", + args -> { + int rankWindowSize = args[0] == null ? DEFAULT_RANK_WINDOW_SIZE : (int) args[0]; + String field = (String) args[1]; + if (field == null || field.isEmpty()) { + throw new IllegalArgumentException("Field cannot be null or empty"); + } + final String inferenceId = (String) args[2]; + final String inferenceText = (String) args[3]; + final float minScore = (float) args[4]; + String throwingType = (String) args[5]; + return new ThrowingMockRequestActionBasedRankBuilder( + rankWindowSize, + field, + inferenceId, + inferenceText, + minScore, + throwingType + ); + } + ); + + static { + PARSER.declareInt(optionalConstructorArg(), RANK_WINDOW_SIZE_FIELD); + PARSER.declareString(constructorArg(), FIELD_FIELD); + PARSER.declareString(constructorArg(), INFERENCE_ID); + PARSER.declareString(constructorArg(), INFERENCE_TEXT); + PARSER.declareString(constructorArg(), THROWING_TYPE_FIELD); + } + + protected final AbstractRerankerIT.ThrowingRankBuilderType throwingRankBuilderType; + + public ThrowingMockRequestActionBasedRankBuilder( + final int rankWindowSize, + final String field, + final String inferenceId, + final String inferenceText, + final float minScore, + final String throwingType + ) { + super(field, inferenceId, inferenceText, rankWindowSize, minScore); + this.throwingRankBuilderType = AbstractRerankerIT.ThrowingRankBuilderType.valueOf(throwingType); + } + + public ThrowingMockRequestActionBasedRankBuilder(StreamInput in) throws IOException { + super(in); + this.throwingRankBuilderType = in.readEnum(AbstractRerankerIT.ThrowingRankBuilderType.class); + } + + @Override + public void doWriteTo(StreamOutput out) throws IOException { + super.doWriteTo(out); + out.writeEnum(throwingRankBuilderType); + } + + @Override + public void doXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + super.doXContent(builder, params); + builder.field(THROWING_TYPE_FIELD.getPreferredName(), throwingRankBuilderType); + } + + @Override + public QueryPhaseRankShardContext buildQueryPhaseShardContext(List queries, int from) { + if (this.throwingRankBuilderType == AbstractRerankerIT.ThrowingRankBuilderType.THROWING_QUERY_PHASE_SHARD_CONTEXT) + return new QueryPhaseRankShardContext(queries, rankWindowSize()) { + @Override + public RankShardResult combineQueryPhaseResults(List rankResults) { + throw new UnsupportedOperationException("qps - simulated failure"); + } + }; + else { + return super.buildQueryPhaseShardContext(queries, from); + } + } + + @Override + public QueryPhaseRankCoordinatorContext buildQueryPhaseCoordinatorContext(int size, int from) { + if (this.throwingRankBuilderType == AbstractRerankerIT.ThrowingRankBuilderType.THROWING_QUERY_PHASE_COORDINATOR_CONTEXT) + return new QueryPhaseRankCoordinatorContext(rankWindowSize()) { + @Override + public ScoreDoc[] rankQueryPhaseResults( + List querySearchResults, + SearchPhaseController.TopDocsStats topDocStats + ) { + throw new UnsupportedOperationException("qpc - simulated failure"); + } + }; + else { + return super.buildQueryPhaseCoordinatorContext(size, from); + } + } + + @Override + public RankFeaturePhaseRankShardContext buildRankFeaturePhaseShardContext() { + if (this.throwingRankBuilderType == AbstractRerankerIT.ThrowingRankBuilderType.THROWING_RANK_FEATURE_PHASE_SHARD_CONTEXT) + return new RankFeaturePhaseRankShardContext(field()) { + @Override + public RankShardResult buildRankFeatureShardResult(SearchHits hits, int shardId) { + throw new UnsupportedOperationException("rfs - simulated failure"); + } + }; + else { + return super.buildRankFeaturePhaseShardContext(); + } + } + + @Override + public RankFeaturePhaseRankCoordinatorContext buildRankFeaturePhaseCoordinatorContext(int size, int from, Client client) { + if (this.throwingRankBuilderType == AbstractRerankerIT.ThrowingRankBuilderType.THROWING_RANK_FEATURE_PHASE_COORDINATOR_CONTEXT) + return new TextSimilarityRankFeaturePhaseRankCoordinatorContext( + size, + from, + rankWindowSize(), + client, + inferenceId, + inferenceText, + minScore + ) { + @Override + protected InferenceAction.Request generateRequest(List docFeatures) { + return new InferenceAction.Request( + TaskType.RERANK, + inferenceId, + inferenceText, + docFeatures, + Map.of("throwing", true), + InputType.SEARCH, + InferenceAction.Request.DEFAULT_TIMEOUT + ); + } + }; + else { + return super.buildRankFeaturePhaseCoordinatorContext(size, from, client); + } + } + + @Override + public String getWriteableName() { + return "throwing_request_action_based_rank"; + } + } + +} diff --git a/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java b/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java index 2f6127c44957f..c84fdd871f857 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java +++ b/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java @@ -22,6 +22,7 @@ public class InferenceRestIT extends ESClientYamlSuiteTestCase { public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .setting("xpack.security.enabled", "false") .setting("xpack.security.http.ssl.enabled", "false") + .setting("xpack.license.self_generated.type", "trial") .plugin("inference-service-test") .feature(FeatureFlag.SEMANTIC_TEXT_ENABLED) .distribution(DistributionType.DEFAULT) diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_text_similarity_rank_retriever.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_text_similarity_rank_retriever.yml new file mode 100644 index 0000000000000..2e01e2b9c8d04 --- /dev/null +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_text_similarity_rank_retriever.yml @@ -0,0 +1,90 @@ +setup: + - requires: + cluster_features: "text_similarity_reranker_retriever_supported" + reason: semantic reranking introduced in 8.15.0 + test_runner_features: "close_to" + + - do: + inference.put: + task_type: rerank + inference_id: my-rerank-model + body: > + { + "service": "test_reranking_service", + "service_settings": { + "model_id": "my_model", + "api_key": "abc64" + }, + "task_settings": { + } + } + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + text: + type: text + topic: + type: keyword + + - do: + index: + index: test-index + id: doc_1 + body: + text: "As seen from Earth, a solar eclipse happens when the Moon is directly between the Earth and the Sun." + topic: ["science"] + refresh: true + + - do: + index: + index: test-index + id: doc_2 + body: + text: "The phases of the Moon come from the position of the Moon relative to the Earth and Sun." + topic: ["science"] + refresh: true + + - do: + index: + index: test-index + id: doc_3 + body: + text: "Sun Moon Lake is a lake in Nantou County, Taiwan. It is the largest lake in Taiwan." + topic: ["geography"] + refresh: true +--- +"Simple text similarity rank retriever": + + - do: + search: + index: test-index + body: + track_total_hits: true + fields: [ "text", "topic" ] + retriever: + text_similarity_reranker: + retriever: + standard: + query: + term: + topic: "science" + rank_window_size: 10 + inference_id: my-rerank-model + inference_text: "How often does the moon hide the sun?" + field: text + size: 10 + + - match: { hits.total.value : 2 } + - length: { hits.hits: 2 } + + - match: { hits.hits.0._id: "doc_2" } + - match: { hits.hits.0._rank: 1 } + - close_to: { hits.hits.0._score: { value: 0.4, error: 0.001 } } + + - match: { hits.hits.1._id: "doc_1" } + - match: { hits.hits.1._rank: 2 } + - close_to: { hits.hits.1._score: { value: 0.2, error: 0.001 } } From 9b6cca1f69e639cec1ffac1303298b72965c6afc Mon Sep 17 00:00:00 2001 From: Mikhail Berezovskiy Date: Fri, 28 Jun 2024 13:53:08 -0700 Subject: [PATCH 061/216] Add TLS close_notify handler to http server (#109899) > "close_notify" alert is used to indicate orderly closure of one > direction of the connection. Upon receiving such an alert, the TLS > implementation SHOULD indicate end-of-data to the application. This PR addresses issue when HTTP client sends TLS close_notify alert and wait for response from our HTTP server. But we dont handle close_notify alerts today, only full connection termination. Some clients might hang for a while until other timeouts fire up. In this change I introduce an event listener for SslCloseCompletionEvent in the Netty4HttpPipeliningHandler, that handles close_notify alerts. When we receive alert we will close connection immediately. It might introduce server response truncation, but we rely on client to send close_notify when no more data is expected from the server. Added warning logging, can be spotted now in unit and integ tests ``` [2024-06-25T21:08:49,310][WARN ][o.e.h.n.Netty4HttpPipeliningHandler] [node_t1] received TLS close_notify, closing connection [id: 0xfb96648b, L:/[0:0:0:0:0:0:0:1]:13484 - R:/[0:0:0:0:0:0:0:1]:63937] ``` Fixes #76642 --- .../netty4/Netty4HttpPipeliningHandler.java | 11 + x-pack/plugin/security/build.gradle | 5 + .../SecurityNetty4TransportCloseNotifyIT.java | 175 +++++++++++ ...y4HttpServerTransportCloseNotifyTests.java | 286 ++++++++++++++++++ 4 files changed, 477 insertions(+) create mode 100644 x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4TransportCloseNotifyIT.java create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportCloseNotifyTests.java diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java index c9beeef246703..b915011514d9a 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpPipeliningHandler.java @@ -21,6 +21,7 @@ import io.netty.handler.codec.http.FullHttpRequest; import io.netty.handler.codec.http.HttpObject; import io.netty.handler.codec.http.HttpResponse; +import io.netty.handler.ssl.SslCloseCompletionEvent; import io.netty.util.ReferenceCountUtil; import io.netty.util.concurrent.Future; import io.netty.util.concurrent.PromiseCombiner; @@ -477,6 +478,16 @@ public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { } } + @Override + public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception { + if (evt instanceof SslCloseCompletionEvent closeEvent) { + if (closeEvent.isSuccess() && ctx.channel().isActive()) { + logger.trace("received TLS close_notify, closing connection {}", ctx.channel()); + ctx.channel().close(); + } + } + } + private record WriteOperation(HttpObject msg, ChannelPromise promise) { void failAsClosedChannel() { diff --git a/x-pack/plugin/security/build.gradle b/x-pack/plugin/security/build.gradle index 07308d5d29a9a..0bba1e845b338 100644 --- a/x-pack/plugin/security/build.gradle +++ b/x-pack/plugin/security/build.gradle @@ -151,6 +151,11 @@ dependencies { testImplementation('org.apache.directory.server:ldap-client-test:2.0.0-M24') testImplementation('org.apache.directory.server:apacheds-interceptor-kerberos:2.0.0-M24') testImplementation('org.apache.directory.mavibot:mavibot:1.0.0-M8') + + // netty self signed certificate dependency + testImplementation('org.bouncycastle:bcprov-jdk18on:1.78.1') + testImplementation ('org.bouncycastle:bcutil-jdk18on:1.78.1') + testImplementation('org.bouncycastle:bcpkix-jdk18on:1.78.1') } tasks.named("test").configure { diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4TransportCloseNotifyIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4TransportCloseNotifyIT.java new file mode 100644 index 0000000000000..f09007bebd80c --- /dev/null +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4TransportCloseNotifyIT.java @@ -0,0 +1,175 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.transport.netty4; + +import io.netty.bootstrap.Bootstrap; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.SimpleChannelInboundHandler; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioSocketChannel; +import io.netty.handler.codec.http.DefaultFullHttpRequest; +import io.netty.handler.codec.http.FullHttpResponse; +import io.netty.handler.codec.http.HttpHeaderNames; +import io.netty.handler.codec.http.HttpMethod; +import io.netty.handler.codec.http.HttpObjectAggregator; +import io.netty.handler.codec.http.HttpRequestEncoder; +import io.netty.handler.codec.http.HttpResponseDecoder; +import io.netty.handler.codec.http.HttpVersion; +import io.netty.handler.ssl.SslContextBuilder; +import io.netty.handler.ssl.SslHandler; +import io.netty.handler.ssl.util.InsecureTrustManagerFactory; +import io.netty.handler.ssl.util.SelfSignedCertificate; + +import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; +import org.elasticsearch.action.support.CancellableActionTestPlugin; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase.ClusterScope; +import org.elasticsearch.test.ESIntegTestCase.Scope; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.SecurityIntegTestCase; + +import java.util.Collection; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; + +import static org.elasticsearch.test.TaskAssertions.assertAllTasksHaveFinished; +import static org.elasticsearch.test.rest.ESRestTestCase.basicAuthHeaderValue; + +@ClusterScope(numDataNodes = 0, scope = Scope.TEST) +@ESTestCase.WithoutSecurityManager +@SuppressForbidden(reason = "requires java.io.File for netty self-signed certificate") +public class SecurityNetty4TransportCloseNotifyIT extends SecurityIntegTestCase { + + @Override + protected boolean addMockHttpTransport() { + return false; + } + + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + try { + var ssc = new SelfSignedCertificate(); + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + .put("xpack.security.http.ssl.enabled", true) + .put("xpack.security.http.ssl.key", ssc.privateKey().getPath()) + .put("xpack.security.http.ssl.certificate", ssc.certificate().getPath()) + .build(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + @Override + protected Collection> nodePlugins() { + return CollectionUtils.appendToCopy(super.nodePlugins(), CancellableActionTestPlugin.class); + } + + Bootstrap setupNettyClient(String node, Consumer responseHandler) throws Exception { + var sslCtx = SslContextBuilder.forClient().trustManager(InsecureTrustManagerFactory.INSTANCE).build(); + var httpServer = internalCluster().getInstance(HttpServerTransport.class, node); + var remoteAddr = randomFrom(httpServer.boundAddress().boundAddresses()); + return new Bootstrap().group(new NioEventLoopGroup(1)) + .channel(NioSocketChannel.class) + .remoteAddress(remoteAddr.getAddress(), remoteAddr.getPort()) + .handler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel ch) { + var p = ch.pipeline(); + p.addLast(sslCtx.newHandler(ch.alloc())); + p.addLast(new HttpRequestEncoder()); + p.addLast(new HttpResponseDecoder()); + p.addLast(new HttpObjectAggregator(4096)); + p.addLast(new SimpleChannelInboundHandler() { + @Override + protected void channelRead0(ChannelHandlerContext ctx, FullHttpResponse msg) { + responseHandler.accept(msg); + } + }); + } + }); + } + + /** + * Ensures that receiving close_notify on server will close connection. + * Simulates normal connection flow where client and server exchange a few requests and responses. + * After an exchange client sends close_notify and expects the server to close connection. + */ + public void testSendCloseNotifyAfterHttpGetRequests() throws Exception { + var node = internalCluster().startNode(); + var serverRespQueue = new ArrayBlockingQueue(10); + var client = setupNettyClient(node, serverRespQueue::add); + try { + var channel = client.connect().sync().channel(); + + // send some HTTP GET requests before closing a channel + var nReq = randomIntBetween(1, 10); // nothing particular about number 10 + for (int i = 0; i < nReq; i++) { + var req = newHttpGetReq("/"); + channel.writeAndFlush(req).get(5, TimeUnit.SECONDS); + } + assertBusy(() -> assertEquals(nReq, serverRespQueue.size())); + assertTrue(serverRespQueue.stream().allMatch(resp -> resp.status().code() == 200)); + + // send close_notify alert and wait for channel closure + var sslHandler = channel.pipeline().get(SslHandler.class); + sslHandler.closeOutbound(); + try { + assertTrue("server must close connection", channel.closeFuture().await(5000)); + } finally { + channel.close().sync(); + } + } finally { + client.config().group().shutdownGracefully().sync(); + } + } + + /** + * Ensures that receiving close_notify will close connection and cancel running action. + */ + public void testSendCloseNotifyCancelAction() throws Exception { + var node = internalCluster().startNode(); + var indexName = "close-notify-cancel"; + createIndex(indexName); + ensureGreen(indexName); + var gotResponse = new AtomicBoolean(false); + var client = setupNettyClient(node, resp -> gotResponse.set(true)); + var actionName = ClusterStateAction.NAME; + try (var capturingAction = CancellableActionTestPlugin.capturingActionOnNode(actionName, node)) { + var channel = client.connect().sync().channel(); + var req = newHttpGetReq("/_cluster/state"); + channel.writeAndFlush(req); + var ssl = channel.pipeline().get(SslHandler.class); + capturingAction.captureAndCancel(ssl::closeOutbound); + try { + assertTrue("server must close connection", channel.closeFuture().await(5000)); + assertAllTasksHaveFinished(actionName); + assertFalse("must cancel action before http response", gotResponse.get()); + } finally { + channel.close().sync(); + } + } finally { + client.config().group().shutdownGracefully().sync(); + } + } + + private DefaultFullHttpRequest newHttpGetReq(String uri) { + var req = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, uri); + req.headers().add(HttpHeaderNames.AUTHORIZATION, basicAuthHeaderValue(nodeClientUsername(), nodeClientPassword())); + return req; + } + +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportCloseNotifyTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportCloseNotifyTests.java new file mode 100644 index 0000000000000..e61f1e4e21661 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportCloseNotifyTests.java @@ -0,0 +1,286 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.transport.netty4; + +import io.netty.bootstrap.Bootstrap; +import io.netty.channel.Channel; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.SimpleChannelInboundHandler; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioSocketChannel; +import io.netty.handler.codec.http.DefaultFullHttpRequest; +import io.netty.handler.codec.http.FullHttpResponse; +import io.netty.handler.codec.http.HttpMethod; +import io.netty.handler.codec.http.HttpObjectAggregator; +import io.netty.handler.codec.http.HttpRequestEncoder; +import io.netty.handler.codec.http.HttpResponseDecoder; +import io.netty.handler.codec.http.HttpVersion; +import io.netty.handler.ssl.SslContextBuilder; +import io.netty.handler.ssl.SslHandler; +import io.netty.handler.ssl.util.InsecureTrustManagerFactory; +import io.netty.handler.ssl.util.SelfSignedCertificate; +import io.netty.util.concurrent.Future; + +import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.http.AbstractHttpServerTransportTestCase; +import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.http.netty4.Netty4HttpServerTransport; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.telemetry.tracing.Tracer; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.netty4.SharedGroupFactory; +import org.elasticsearch.transport.netty4.TLSConfig; +import org.elasticsearch.xpack.core.ssl.SSLService; + +import java.security.cert.CertificateException; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.BlockingDeque; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.LinkedBlockingDeque; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.function.Consumer; + +import javax.net.ssl.SSLException; + +@ESTestCase.WithoutSecurityManager +@SuppressForbidden(reason = "requires java.io.File for netty self-signed certificate") +public class SecurityNetty4HttpServerTransportCloseNotifyTests extends AbstractHttpServerTransportTestCase { + + private static T safePoll(BlockingQueue queue) { + try { + var t = queue.poll(5, TimeUnit.SECONDS); + if (t == null) { + throw new AssertionError("queue is empty"); + } else { + return t; + } + } catch (Exception e) { + throw new AssertionError(e); + } + } + + private static void safeAwait(Future nettyFuture) { + try { + nettyFuture.get(5, TimeUnit.SECONDS); + } catch (InterruptedException | ExecutionException | TimeoutException e) { + throw new AssertionError(e); + } + } + + /** + * Setup {@link Netty4HttpServerTransport} with SSL enabled and self-signed certificate. + * All HTTP requests accumulate in the dispatcher reqQueue. + * The server will not reply to request automatically, to send response poll the queue. + */ + private HttpServer setupHttpServer(String tlsProtocols) throws CertificateException { + var ssc = new SelfSignedCertificate(); + var threadPool = new TestThreadPool("tls-close-notify"); + var dispatcher = new QueuedDispatcher(); + var secureSettings = new MockSecureSettings(); + secureSettings.setString("xpack.security.http.ssl.secure_key_passphrase", "testnode"); + var settings = Settings.builder() + .put("xpack.security.http.ssl.enabled", true) + .put("xpack.security.http.ssl.key", ssc.privateKey().getPath()) + .put("xpack.security.http.ssl.certificate", ssc.certificate().getPath()) + .put("path.home", createTempDir()) + .setSecureSettings(secureSettings) + .put("xpack.security.http.ssl.supported_protocols", tlsProtocols) + .build(); + var env = TestEnvironment.newEnvironment(settings); + var sslService = new SSLService(env); + var server = new Netty4HttpServerTransport( + settings, + new NetworkService(Collections.emptyList()), + threadPool, + xContentRegistry(), + dispatcher, + randomClusterSettings(), + new SharedGroupFactory(settings), + Tracer.NOOP, + new TLSConfig(sslService.getHttpTransportSSLConfiguration(), sslService::createSSLEngine), + null, + randomFrom((httpPreRequest, channel, listener) -> listener.onResponse(null), null) + ); + server.start(); + return new HttpServer(server, dispatcher, threadPool); + } + + /** + * Set up a Netty HTTPs client and connect to server. + * Configured with self-signed certificate trust. + * Server responses accumulate in the respQueue, and exceptions in the errQueue. + */ + private HttpClient setupHttpClient(HttpServer server) throws SSLException, InterruptedException { + var clientSslCtx = SslContextBuilder.forClient().trustManager(InsecureTrustManagerFactory.INSTANCE).build(); + var remoteAddr = randomFrom(server.netty.boundAddress().boundAddresses()); + var respQueue = new LinkedBlockingDeque(); + var errQueue = new LinkedBlockingDeque(); + var bootstrap = new Bootstrap().group(new NioEventLoopGroup(1)) + .channel(NioSocketChannel.class) + .remoteAddress(remoteAddr.getAddress(), remoteAddr.getPort()) + .handler(new ChannelInitializer() { + @Override + protected void initChannel(SocketChannel ch) { + var p = ch.pipeline(); + p.addLast(clientSslCtx.newHandler(ch.alloc())); + p.addLast(new HttpRequestEncoder()); + p.addLast(new HttpResponseDecoder()); + p.addLast(new HttpObjectAggregator(server.netty.handlingSettings.maxContentLength() * 2)); + p.addLast(new SimpleChannelInboundHandler() { + @Override + protected void channelRead0(ChannelHandlerContext ctx, FullHttpResponse msg) { + respQueue.add(msg); + } + + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { + errQueue.add(cause); + } + }); + } + }); + var channel = bootstrap.connect().sync().channel(); + return new HttpClient(bootstrap, channel, respQueue, errQueue); + } + + /** + * Setup server and client, establish ssl connection, blocks until handshake is done + */ + private ConnectionCtx connectClientAndServer(String tlsVersion) { + try { + var server = setupHttpServer(tlsVersion); + var client = setupHttpClient(server); + var ssl = client.channel.pipeline().get(SslHandler.class); + safeAwait(ssl.handshakeFuture()); + assertEquals(tlsVersion, ssl.engine().getSession().getProtocol()); + return new ConnectionCtx(tlsVersion, server, client); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + private void runForAllTlsVersions(Consumer test) { + List.of("TLSv1.2", "TLSv1.3").forEach(test); + } + + /** + * This test ensures that sending close_notify from the client on the idle channel trigger close connection from the server. + */ + public void testCloseIdleConnection() { + runForAllTlsVersions(tlsVersion -> { + try (var ctx = connectClientAndServer(tlsVersion)) { + var ssl = ctx.client.channel.pipeline().get(SslHandler.class); + ssl.closeOutbound(); + safeAwait(ctx.client.channel.closeFuture()); + } + }); + } + + /** + * This tests ensures that sending close_notify after HTTP response close the channel immediately. + * It should be similar to idle test, but in this test we await http request and response. + */ + public void testSendCloseNotifyAfterHttpResponse() { + runForAllTlsVersions(tlsVersion -> { + try (var ctx = connectClientAndServer(tlsVersion)) { + ctx.client.channel.writeAndFlush(new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/index")); + var serverRequestCtx = safePoll(ctx.server.dispatcher.reqQueue); + serverRequestCtx.restChannel.sendResponse(new RestResponse(RestStatus.OK, "")); + safePoll(ctx.client.respQueue); + var ssl = ctx.client.channel.pipeline().get(SslHandler.class); + ssl.closeOutbound(); + safeAwait(ctx.client.channel.closeFuture()); + } + }); + } + + /** + * This test ensures that sending close_notify with outstanding requests close channel immediately. + */ + public void testSendCloseNotifyBeforeHttpResponse() { + runForAllTlsVersions(tlsVersion -> { + try (var ctx = connectClientAndServer(tlsVersion)) { + var server = ctx.server; + var client = ctx.client; + + var nRequests = randomIntBetween(1, 5); + for (int i = 0; i < nRequests; i++) { + client.channel.writeAndFlush(new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/index")); + } + assertBusy(() -> assertEquals(nRequests, server.dispatcher.reqQueue.size())); + + // after the server receives requests send close_notify, before server responses + var ssl = client.channel.pipeline().get(SslHandler.class); + ssl.closeOutbound(); + + safeAwait(ctx.client.channel.closeFuture()); + assertTrue(client.errQueue.isEmpty()); + assertTrue(client.respQueue.isEmpty()); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + + } + + private record HttpServer(Netty4HttpServerTransport netty, QueuedDispatcher dispatcher, ThreadPool threadPool) {} + + private record HttpClient( + Bootstrap netty, + Channel channel, + BlockingDeque respQueue, + BlockingDeque errQueue + ) {} + + private record ConnectionCtx(String tlsProtocol, HttpServer server, HttpClient client) implements AutoCloseable { + + @Override + public void close() { + // need to release not consumed requests, will complain about buffer leaks after GC + server.dispatcher.reqQueue.forEach(r -> r.request.getHttpRequest().release()); + server.netty.stop(); + server.threadPool.shutdownNow(); + safeAwait(client.netty.config().group().shutdownGracefully()); + } + } + + private static class QueuedDispatcher implements HttpServerTransport.Dispatcher { + BlockingQueue reqQueue = new LinkedBlockingDeque<>(); + BlockingDeque errQueue = new LinkedBlockingDeque<>(); + + @Override + public void dispatchRequest(RestRequest request, RestChannel channel, ThreadContext threadContext) { + reqQueue.add(new ReqCtx(request, channel, threadContext)); + } + + @Override + public void dispatchBadRequest(RestChannel channel, ThreadContext threadContext, Throwable cause) { + errQueue.add(new ErrCtx(channel, threadContext, cause)); + } + + record ReqCtx(RestRequest request, RestChannel restChannel, ThreadContext threadContext) {} + + record ErrCtx(RestChannel restChannel, ThreadContext threadContext, Throwable cause) {} + } + +} From 5409aa7dcf1de1db3938dc30c942935c4b959149 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 28 Jun 2024 18:07:16 -0700 Subject: [PATCH 062/216] Support mixed aggregates in METRICS (#110206) This pull request supports mixed aggregates in the METRICS command. Non-rate aggregates will be rewritten as a pair of `to_partial` and `from_partial` aggregates: - The `to_partial` aggregates will be executed in the first pass and always produce an intermediate output regardless of the aggregate mode. - The `from_partial` aggregates will be executed in the second pass and always receive the intermediate output produced by `to_partial`. Example: **METRICS k8s max(rate(request)), max(memory_used)** becomes: ``` METRICS k8s | STATS rate(request), $p1=to_partial(max(memory_used)) BY _tsid | STATS max(`rate(request)`), `max(memory_used)` = from_partial($p1, max($_)) ``` **METRICS k8s max(rate(request)), avg(memory_used) BY host** becomes: ``` METRICS k8s | STATS rate(request), $p1=to_partial(sum(memory_used)), $p2=to_partial(count(memory_used)), values(host) BY _tsid | STATS max(`rate(request)`), $sum=from_partial($p1, sum($_)), $count=from_partial($p2, count($_)) BY host=`values(host)` | EVAL `avg(memory_used)` = $sum / $count | KEEP `max(rate(request))`, `avg(memory_used)`, host ``` **METRICS k8s min(memory_used), sum(rate(request)) BY pod, bucket(@timestamp, 5m)** becomes: ``` METRICS k8s | EVAL `bucket(@timestamp, 5m)` = datetrunc(@timestamp, '5m') | STATS rate(request), $p1=to_partial(min(memory_used)), VALUES(pod) BY _tsid, `bucket(@timestamp, 5m)` | STATS sum(`rate(request)`), `min(memory_used)` = from_partial($p1, min($)) BY pod=`VALUES(pod)`, `bucket(@timestamp, 5m)` | KEEP `min(memory_used)`, `sum(rate(request))`, pod, `bucket(@timestamp, 5m)` ``` ---- I also took a different approach for this. The alternative is to extend the runtime to support scatter/gather via exchange. We could have two pipelines: one aggregate grouped by _tsid (and time bucket), and another grouped by the user-specified keys. These pipelines expand to fill necessary blocks so that they have the same output. However, this requires replicating most of the aggregate rules for dual aggregates. Hence, I opted for the approach in this PR, which doesn't change anything with non-metrics, making it safer. However, the dual aggregates should have better performance and use less memory than the approach in this PR. Relates #109979 --- .../xpack/esql/core/type/DataType.java | 3 +- .../FromPartialAggregatorFunction.java | 99 ++++++++++ ...FromPartialGroupingAggregatorFunction.java | 105 ++++++++++ .../aggregation/IntermediateStateDesc.java | 6 +- .../ToPartialAggregatorFunction.java | 84 ++++++++ .../ToPartialGroupingAggregatorFunction.java | 111 +++++++++++ .../compute/data/CompositeBlock.java | 4 + .../src/main/resources/k8s-metrics.csv-spec | 37 ++++ .../xpack/esql/action/TimeSeriesIT.java | 116 ++++++++++- .../function/aggregate/AggregateFunction.java | 13 +- .../function/aggregate/FromPartial.java | 162 +++++++++++++++ .../expression/function/aggregate/Rate.java | 5 + .../function/aggregate/ToPartial.java | 186 ++++++++++++++++++ .../rules/TranslateMetricsAggregate.java | 94 ++++++--- .../esql/plan/physical/EstimatesRowSize.java | 2 +- .../AbstractPhysicalOperationProviders.java | 11 +- .../xpack/esql/planner/AggregateMapper.java | 30 ++- .../esql/planner/LocalExecutionPlanner.java | 2 +- .../xpack/esql/planner/PlannerUtils.java | 1 + .../xpack/esql/type/EsqlDataTypes.java | 2 + .../esql/action/EsqlQueryResponseTests.java | 2 +- .../optimizer/LogicalPlanOptimizerTests.java | 113 +++++++++++ 22 files changed, 1130 insertions(+), 58 deletions(-) create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FromPartialAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FromPartialGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ToPartialAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ToPartialGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java index a8ef1ea689878..0b43d517b8f1e 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java @@ -68,7 +68,8 @@ public enum DataType { GEO_SHAPE(builder().esType("geo_shape").unknownSize().docValues()), DOC_DATA_TYPE(builder().esType("_doc").size(Integer.BYTES * 3)), - TSID_DATA_TYPE(builder().esType("_tsid").unknownSize().docValues()); + TSID_DATA_TYPE(builder().esType("_tsid").unknownSize().docValues()), + PARTIAL_AGG(builder().esType("partial_agg").unknownSize()); private final String typeName; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FromPartialAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FromPartialAggregatorFunction.java new file mode 100644 index 0000000000000..8d4ce2971d34b --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FromPartialAggregatorFunction.java @@ -0,0 +1,99 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.CompositeBlock; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasables; + +import java.util.List; + +/** + * @see ToPartialGroupingAggregatorFunction + */ +public class FromPartialAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("partial", ElementType.COMPOSITE, "partial_agg") + ); + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + private final DriverContext driverContext; + private final GroupingAggregatorFunction groupingAggregator; + private final int inputChannel; + private boolean receivedInput = false; + + public FromPartialAggregatorFunction(DriverContext driverContext, GroupingAggregatorFunction groupingAggregator, int inputChannel) { + this.driverContext = driverContext; + this.groupingAggregator = groupingAggregator; + this.inputChannel = inputChannel; + } + + @Override + public void addRawInput(Page page) { + addIntermediateInput(page); + } + + @Override + public void addIntermediateInput(Page page) { + try (IntVector groupIds = driverContext.blockFactory().newConstantIntVector(0, page.getPositionCount())) { + if (page.getPositionCount() > 0) { + receivedInput = true; + } + final CompositeBlock inputBlock = page.getBlock(inputChannel); + groupingAggregator.addIntermediateInput(0, groupIds, inputBlock.asPage()); + } + } + + private IntVector outputPositions() { + return driverContext.blockFactory().newConstantIntVector(0, receivedInput ? 1 : 0); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + final Block[] partialBlocks = new Block[groupingAggregator.intermediateBlockCount()]; + boolean success = false; + try (IntVector selected = outputPositions()) { + groupingAggregator.evaluateIntermediate(partialBlocks, 0, selected); + blocks[offset] = new CompositeBlock(partialBlocks); + success = true; + } finally { + if (success == false) { + Releasables.close(partialBlocks); + } + } + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + try (IntVector selected = outputPositions()) { + groupingAggregator.evaluateFinal(blocks, offset, selected, driverContext); + } + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void close() { + Releasables.close(groupingAggregator); + } + + @Override + public String toString() { + return getClass().getSimpleName() + "[" + "channel=" + inputChannel + ",delegate=" + groupingAggregator + "]"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FromPartialGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FromPartialGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..675fbe88f1984 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FromPartialGroupingAggregatorFunction.java @@ -0,0 +1,105 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.CompositeBlock; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasables; + +import java.util.List; + +/** + * @see ToPartialGroupingAggregatorFunction + */ +public class FromPartialGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("partial", ElementType.COMPOSITE, "partial_agg") + ); + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + private final GroupingAggregatorFunction delegate; + private final int inputChannel; + + public FromPartialGroupingAggregatorFunction(GroupingAggregatorFunction delegate, int inputChannel) { + this.delegate = delegate; + this.inputChannel = inputChannel; + } + + @Override + public AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { + return new AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + assert false : "Intermediate group id must not have nulls"; + throw new IllegalStateException("Intermediate group id must not have nulls"); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addIntermediateInput(positionOffset, groupIds, page); + } + }; + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groupIdVector, Page page) { + final CompositeBlock inputBlock = page.getBlock(inputChannel); + delegate.addIntermediateInput(positionOffset, groupIdVector, inputBlock.asPage()); + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input instanceof FromPartialGroupingAggregatorFunction toPartial) { + input = toPartial.delegate; + } + delegate.addIntermediateRowInput(groupId, input, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + Block[] partialBlocks = new Block[delegate.intermediateBlockCount()]; + boolean success = false; + try { + delegate.evaluateIntermediate(partialBlocks, 0, selected); + blocks[offset] = new CompositeBlock(partialBlocks); + success = true; + } finally { + if (success == false) { + Releasables.close(partialBlocks); + } + } + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + delegate.evaluateFinal(blocks, offset, selected, driverContext); + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void close() { + Releasables.close(delegate); + } + + @Override + public String toString() { + return getClass().getSimpleName() + "[" + "channel=" + inputChannel + ",delegate=" + delegate + "]"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/IntermediateStateDesc.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/IntermediateStateDesc.java index 22766c36953c4..ced3ffaef970e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/IntermediateStateDesc.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/IntermediateStateDesc.java @@ -10,4 +10,8 @@ import org.elasticsearch.compute.data.ElementType; /** Intermediate aggregation state descriptor. Intermediate state is a list of these. */ -public record IntermediateStateDesc(String name, ElementType type) {} +public record IntermediateStateDesc(String name, ElementType type, String dataType) { + public IntermediateStateDesc(String name, ElementType type) { + this(name, type, ""); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ToPartialAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ToPartialAggregatorFunction.java new file mode 100644 index 0000000000000..2083b8ebbfff2 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ToPartialAggregatorFunction.java @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.CompositeBlock; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasables; + +import java.util.List; + +/** + * @see ToPartialGroupingAggregatorFunction + */ +public class ToPartialAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("partial", ElementType.COMPOSITE, "partial_agg") + ); + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + private final AggregatorFunction delegate; + private final List channels; + + public ToPartialAggregatorFunction(AggregatorFunction delegate, List channels) { + this.delegate = delegate; + this.channels = channels; + } + + @Override + public void addRawInput(Page page) { + delegate.addRawInput(page); + } + + @Override + public void addIntermediateInput(Page page) { + final CompositeBlock inputBlock = page.getBlock(channels.get(0)); + delegate.addIntermediateInput(inputBlock.asPage()); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + final Block[] partialBlocks = new Block[delegate.intermediateBlockCount()]; + boolean success = false; + try { + delegate.evaluateIntermediate(partialBlocks, 0, driverContext); + blocks[offset] = new CompositeBlock(partialBlocks); + success = true; + } finally { + if (success == false) { + Releasables.close(partialBlocks); + } + } + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + evaluateIntermediate(blocks, offset, driverContext); + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void close() { + Releasables.close(delegate); + } + + @Override + public String toString() { + return getClass().getSimpleName() + "[" + "channels=" + channels + ",delegate=" + delegate + "]"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ToPartialGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ToPartialGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..13d4bd5d6c0d6 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ToPartialGroupingAggregatorFunction.java @@ -0,0 +1,111 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.CompositeBlock; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasables; + +import java.util.List; + +/** + * An internal aggregate function that always emits intermediate (or partial) output regardless of the aggregate mode. + * The intermediate output should be consumed by {@link FromPartialGroupingAggregatorFunction}, which always receives + * the intermediate input. Since an intermediate aggregate output can consist of multiple blocks, we wrap these output + * blocks in a single composite block. The {@link FromPartialGroupingAggregatorFunction} then unwraps this input block + * into multiple primitive blocks and passes them to the delegating GroupingAggregatorFunction. + * Both of these commands yield the same result, except the second plan executes aggregates twice: + *

    + * ```
    + * | ... before
    + * | af(x) BY g
    + * | ... after
    + * ```
    + * ```
    + * | ... before
    + * | $x = to_partial(af(x)) BY g
    + * | from_partial($x, af(_)) BY g
    + * | ...  after
    + * 
    + * ``` + */ +public class ToPartialGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("partial", ElementType.COMPOSITE, "partial_agg") + ); + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + private final GroupingAggregatorFunction delegate; + private final List channels; + + public ToPartialGroupingAggregatorFunction(GroupingAggregatorFunction delegate, List channels) { + this.delegate = delegate; + this.channels = channels; + } + + @Override + public AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { + return delegate.prepareProcessPage(seenGroupIds, page); + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groupIdVector, Page page) { + final CompositeBlock inputBlock = page.getBlock(channels.get(0)); + delegate.addIntermediateInput(positionOffset, groupIdVector, inputBlock.asPage()); + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input instanceof ToPartialGroupingAggregatorFunction toPartial) { + input = toPartial.delegate; + } + delegate.addIntermediateRowInput(groupId, input, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + final Block[] partialBlocks = new Block[delegate.intermediateBlockCount()]; + boolean success = false; + try { + delegate.evaluateIntermediate(partialBlocks, 0, selected); + blocks[offset] = new CompositeBlock(partialBlocks); + success = true; + } finally { + if (success == false) { + Releasables.close(partialBlocks); + } + } + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + evaluateIntermediate(blocks, offset, selected); + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void close() { + Releasables.close(delegate); + } + + @Override + public String toString() { + return getClass().getSimpleName() + "[" + "channels=" + channels + ",delegate=" + delegate + "]"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/CompositeBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/CompositeBlock.java index 8d7b8d57bbaa5..dbbcf905a2e57 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/CompositeBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/CompositeBlock.java @@ -58,6 +58,10 @@ public B getBlock(int blockIndex) { return block; } + public Page asPage() { + return new Page(positionCount, blocks); + } + /** * Returns the number of blocks in this composite block. */ diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/k8s-metrics.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/k8s-metrics.csv-spec index 3976329501894..9cfa84df643fa 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/k8s-metrics.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/k8s-metrics.csv-spec @@ -36,6 +36,30 @@ max_cost: double 0.16151685393258428 ; +maxRateAndBytes +required_capability: metrics_syntax +METRICS k8s max(rate(network.total_bytes_in, 1minute)), max(network.bytes_in); + +max(rate(network.total_bytes_in, 1minute)): double | max(network.bytes_in): long +790.4235090751945 | 1021 +; + +`maxRateAndMarkupBytes` +required_capability: metrics_syntax +METRICS k8s max(rate(network.total_bytes_in, 1minute)), max(network.bytes_in * 1.05); + +max(rate(network.total_bytes_in, 1minute)): double | max(network.bytes_in * 1.05): double +790.4235090751945 | 1072.05 +; + +maxRateAndBytesAndCost +required_capability: metrics_syntax +METRICS k8s max(rate(network.total_bytes_in, 1minute)), max(network.bytes_in), max(rate(network.total_cost)); + +max(rate(network.total_bytes_in, 1minute)): double| max(network.bytes_in): long| max(rate(network.total_cost)): double +790.4235090751945 | 1021 | 0.16151685393258428 +; + sumRate required_capability: metrics_syntax METRICS k8s bytes=sum(rate(network.total_bytes_in)), sum(rate(network.total_cost)) BY cluster | SORT cluster; @@ -79,6 +103,19 @@ max(rate(network.total_bytes_in)): double | time_bucket:date | cluster: 9.823232323232324 | 2024-05-10T00:15:00.000Z | staging ; +BytesAndCostByBucketAndCluster +required_capability: metrics_syntax +METRICS k8s max(rate(network.total_bytes_in)), max(network.cost) BY time_bucket = bucket(@timestamp,5minute), cluster | SORT time_bucket DESC, cluster | LIMIT 6; + +max(rate(network.total_bytes_in)): double | max(network.cost): double | time_bucket:date | cluster: keyword +10.594594594594595 | 10.75 | 2024-05-10T00:20:00.000Z | prod +5.586206896551724 | 11.875 | 2024-05-10T00:20:00.000Z | qa +5.37037037037037 | 9.5 | 2024-05-10T00:20:00.000Z | staging +15.913978494623656 | 12.375 | 2024-05-10T00:15:00.000Z | prod +23.702205882352942 | 12.125 | 2024-05-10T00:15:00.000Z | qa +9.823232323232324 | 11.5 | 2024-05-10T00:15:00.000Z | staging +; + oneRateWithBucketAndClusterThenFilter required_capability: metrics_syntax METRICS k8s max(rate(network.total_bytes_in)) BY time_bucket = bucket(@timestamp,5minute), cluster | WHERE cluster=="prod" | SORT time_bucket DESC | LIMIT 3; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java index d7c15ad07e350..2ee6ef57e6571 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java @@ -249,6 +249,28 @@ record RateKey(String cluster, String host) { assertThat((double) values.get(0).get(0), closeTo(avg, 0.1)); assertThat((double) values.get(0).get(1), closeTo(avg, 0.1)); } + try (var resp = run("METRICS hosts max(rate(request_count)), min(rate(request_count)), min(cpu), max(cpu)")) { + assertThat( + resp.columns(), + equalTo( + List.of( + new ColumnInfo("max(rate(request_count))", "double"), + new ColumnInfo("min(rate(request_count))", "double"), + new ColumnInfo("min(cpu)", "double"), + new ColumnInfo("max(cpu)", "double") + ) + ) + ); + List> values = EsqlTestUtils.getValuesList(resp); + assertThat(values, hasSize(1)); + assertThat(values.get(0), hasSize(4)); + assertThat((double) values.get(0).get(0), closeTo(rates.stream().mapToDouble(d -> d).max().orElse(0.0), 0.1)); + assertThat((double) values.get(0).get(1), closeTo(rates.stream().mapToDouble(d -> d).min().orElse(0.0), 0.1)); + double minCpu = docs.stream().mapToDouble(d -> d.cpu).min().orElse(Long.MAX_VALUE); + double maxCpu = docs.stream().mapToDouble(d -> d.cpu).max().orElse(Long.MIN_VALUE); + assertThat((double) values.get(0).get(2), closeTo(minCpu, 0.1)); + assertThat((double) values.get(0).get(3), closeTo(maxCpu, 0.1)); + } } public void testRateGroupedByCluster() { @@ -436,16 +458,21 @@ record RateKey(String host, String cluster, long interval) {} groups.computeIfAbsent(key, k -> new ArrayList<>()).add(new RequestCounter(doc.timestamp, doc.requestCount)); } record GroupKey(String cluster, long interval) {} - Map> buckets = new HashMap<>(); + Map> rateBuckets = new HashMap<>(); for (Map.Entry> e : groups.entrySet()) { RateKey key = e.getKey(); - List values = buckets.computeIfAbsent(new GroupKey(key.cluster, key.interval), k -> new ArrayList<>()); + List values = rateBuckets.computeIfAbsent(new GroupKey(key.cluster, key.interval), k -> new ArrayList<>()); Double rate = computeRate(e.getValue()); if (rate != null) { values.add(rate); } } - List sortedKeys = buckets.keySet() + Map> cpuBuckets = new HashMap<>(); + for (Doc doc : docs) { + GroupKey key = new GroupKey(doc.cluster, rounding.round(doc.timestamp)); + cpuBuckets.computeIfAbsent(key, k -> new ArrayList<>()).add(doc.cpu); + } + List sortedKeys = rateBuckets.keySet() .stream() .sorted(Comparator.comparing(GroupKey::interval).thenComparing(GroupKey::cluster)) .limit(5) @@ -472,7 +499,7 @@ METRICS hosts sum(rate(request_count)) BY ts=bucket(@timestamp, 1 minute), clust var key = sortedKeys.get(i); assertThat(row.get(1), equalTo(DEFAULT_DATE_TIME_FORMATTER.formatMillis(key.interval))); assertThat(row.get(2), equalTo(key.cluster)); - List bucketValues = buckets.get(key); + List bucketValues = rateBuckets.get(key); if (bucketValues.isEmpty()) { assertNull(row.get(0)); } else { @@ -502,7 +529,7 @@ METRICS hosts avg(rate(request_count)) BY ts=bucket(@timestamp, 1minute), cluste var key = sortedKeys.get(i); assertThat(row.get(1), equalTo(DEFAULT_DATE_TIME_FORMATTER.formatMillis(key.interval))); assertThat(row.get(2), equalTo(key.cluster)); - List bucketValues = buckets.get(key); + List bucketValues = rateBuckets.get(key); if (bucketValues.isEmpty()) { assertNull(row.get(0)); } else { @@ -534,7 +561,7 @@ METRICS hosts avg(rate(request_count, 1minute)), avg(rate(request_count)) BY ts= var key = sortedKeys.get(i); assertThat(row.get(2), equalTo(DEFAULT_DATE_TIME_FORMATTER.formatMillis(key.interval))); assertThat(row.get(3), equalTo(key.cluster)); - List bucketValues = buckets.get(key); + List bucketValues = rateBuckets.get(key); if (bucketValues.isEmpty()) { assertNull(row.get(0)); assertNull(row.get(1)); @@ -577,7 +604,7 @@ METRICS hosts avg(rate(request_count, 1minute)), avg(rate(request_count)) BY ts= var key = sortedKeys.get(i); assertThat(row.get(3), equalTo(DEFAULT_DATE_TIME_FORMATTER.formatMillis(key.interval))); assertThat(row.get(4), equalTo(key.cluster)); - List bucketValues = buckets.get(key); + List bucketValues = rateBuckets.get(key); if (bucketValues.isEmpty()) { assertNull(row.get(0)); assertNull(row.get(1)); @@ -590,6 +617,81 @@ METRICS hosts avg(rate(request_count, 1minute)), avg(rate(request_count)) BY ts= assertEquals(row.get(0), row.get(2)); } } + try (var resp = run(""" + METRICS hosts sum(rate(request_count)), max(cpu) BY ts=bucket(@timestamp, 1 minute), cluster + | SORT ts, cluster + | LIMIT 5""")) { + assertThat( + resp.columns(), + equalTo( + List.of( + new ColumnInfo("sum(rate(request_count))", "double"), + new ColumnInfo("max(cpu)", "double"), + new ColumnInfo("ts", "date"), + new ColumnInfo("cluster", "keyword") + ) + ) + ); + List> values = EsqlTestUtils.getValuesList(resp); + assertThat(values, hasSize(sortedKeys.size())); + for (int i = 0; i < sortedKeys.size(); i++) { + List row = values.get(i); + assertThat(row, hasSize(4)); + var key = sortedKeys.get(i); + assertThat(row.get(2), equalTo(DEFAULT_DATE_TIME_FORMATTER.formatMillis(key.interval))); + assertThat(row.get(3), equalTo(key.cluster)); + List rateBucket = rateBuckets.get(key); + if (rateBucket.isEmpty()) { + assertNull(row.get(0)); + } else { + assertThat((double) row.get(0), closeTo(rateBucket.stream().mapToDouble(d -> d).sum(), 0.1)); + } + List cpuBucket = cpuBuckets.get(key); + if (cpuBuckets.isEmpty()) { + assertNull(row.get(1)); + } else { + assertThat((double) row.get(1), closeTo(cpuBucket.stream().mapToDouble(d -> d).max().orElse(0.0), 0.1)); + } + } + } + try (var resp = run(""" + METRICS hosts sum(rate(request_count)), avg(cpu) BY ts=bucket(@timestamp, 1 minute), cluster + | SORT ts, cluster + | LIMIT 5""")) { + assertThat( + resp.columns(), + equalTo( + List.of( + new ColumnInfo("sum(rate(request_count))", "double"), + new ColumnInfo("avg(cpu)", "double"), + new ColumnInfo("ts", "date"), + new ColumnInfo("cluster", "keyword") + ) + ) + ); + List> values = EsqlTestUtils.getValuesList(resp); + assertThat(values, hasSize(sortedKeys.size())); + for (int i = 0; i < sortedKeys.size(); i++) { + List row = values.get(i); + assertThat(row, hasSize(4)); + var key = sortedKeys.get(i); + assertThat(row.get(2), equalTo(DEFAULT_DATE_TIME_FORMATTER.formatMillis(key.interval))); + assertThat(row.get(3), equalTo(key.cluster)); + List rateBucket = rateBuckets.get(key); + if (rateBucket.isEmpty()) { + assertNull(row.get(0)); + } else { + assertThat((double) row.get(0), closeTo(rateBucket.stream().mapToDouble(d -> d).sum(), 0.1)); + } + List cpuBucket = cpuBuckets.get(key); + if (cpuBuckets.isEmpty()) { + assertNull(row.get(1)); + } else { + double avg = cpuBucket.stream().mapToDouble(d -> d).sum() / cpuBucket.size(); + assertThat((double) row.get(1), closeTo(avg, 0.1)); + } + } + } } public void testApplyRateBeforeFinalGrouping() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java index 7c4c8d63ea96f..0e355e064a788 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java @@ -43,7 +43,10 @@ public static List getNamedWriteables() { Sum.ENTRY, TopList.ENTRY, Values.ENTRY, - Rate.ENTRY + Rate.ENTRY, + // internal functions + ToPartial.ENTRY, + FromPartial.ENTRY ); } @@ -78,6 +81,14 @@ public List parameters() { return parameters; } + /** + * Returns the input expressions used in aggregation. + * Defaults to a list containing the only the input field. + */ + public List inputExpressions() { + return List.of(field); + } + @Override protected TypeResolution resolveType() { return TypeResolutions.isExact(field, sourceText(), DEFAULT); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java new file mode 100644 index 0000000000000..d7d6237c564c3 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java @@ -0,0 +1,162 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.compute.aggregation.Aggregator; +import org.elasticsearch.compute.aggregation.AggregatorFunction; +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.compute.aggregation.FromPartialAggregatorFunction; +import org.elasticsearch.compute.aggregation.FromPartialGroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.GroupingAggregator; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.xpack.esql.core.expression.AttributeSet; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; +import org.elasticsearch.xpack.esql.planner.ToAggregator; + +import java.io.IOException; +import java.util.List; +import java.util.stream.IntStream; + +/** + * @see ToPartial + */ +public class FromPartial extends AggregateFunction implements ToAggregator { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( + Expression.class, + "FromPartial", + FromPartial::new + ); + + private final Expression function; + + public FromPartial(Source source, Expression field, Expression function) { + super(source, field, List.of(function)); + this.function = function; + } + + private FromPartial(StreamInput in) throws IOException { + this(Source.readFrom((PlanStreamInput) in), ((PlanStreamInput) in).readExpression(), ((PlanStreamInput) in).readExpression()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + PlanStreamOutput planOut = (PlanStreamOutput) out; + planOut.writeExpression(function); + } + + public Expression function() { + return function; + } + + @Override + public DataType dataType() { + return function.dataType(); + } + + @Override + protected TypeResolution resolveType() { + return TypeResolution.TYPE_RESOLVED; + } + + @Override + public AttributeSet references() { + return field().references(); // exclude the function and its argument + } + + @Override + public Expression replaceChildren(List newChildren) { + return new FromPartial(source(), newChildren.get(0), newChildren.get(1)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, FromPartial::new, field(), function); + } + + @Override + public AggregatorFunctionSupplier supplier(List inputChannels) { + final ToAggregator toAggregator = (ToAggregator) function; + if (inputChannels.size() != 1) { + assert false : "from_partial aggregation requires exactly one input channel; got " + inputChannels; + throw new IllegalArgumentException("from_partial aggregation requires exactly one input channel; got " + inputChannels); + } + final int inputChannel = inputChannels.get(0); + return new AggregatorFunctionSupplier() { + @Override + public AggregatorFunction aggregator(DriverContext driverContext) { + assert false : "aggregatorFactory() is override"; + throw new UnsupportedOperationException(); + } + + @Override + public GroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + assert false : "groupingAggregatorFactory() is override"; + throw new UnsupportedOperationException(); + } + + @Override + public Aggregator.Factory aggregatorFactory(AggregatorMode mode) { + final AggregatorFunctionSupplier supplier; + try (var dummy = toAggregator.supplier(inputChannels).aggregator(DriverContext.getLocalDriver())) { + var intermediateChannels = IntStream.range(0, dummy.intermediateBlockCount()).boxed().toList(); + supplier = toAggregator.supplier(intermediateChannels); + } + return new Aggregator.Factory() { + @Override + public Aggregator apply(DriverContext driverContext) { + // use groupingAggregator since we can receive intermediate output from a grouping aggregate + final var groupingAggregator = supplier.groupingAggregator(driverContext); + return new Aggregator(new FromPartialAggregatorFunction(driverContext, groupingAggregator, inputChannel), mode); + } + + @Override + public String describe() { + return "from_partial(" + supplier.describe() + ")"; + } + }; + } + + @Override + public GroupingAggregator.Factory groupingAggregatorFactory(AggregatorMode mode) { + final AggregatorFunctionSupplier supplier; + try (var dummy = toAggregator.supplier(inputChannels).aggregator(DriverContext.getLocalDriver())) { + var intermediateChannels = IntStream.range(0, dummy.intermediateBlockCount()).boxed().toList(); + supplier = toAggregator.supplier(intermediateChannels); + } + return new GroupingAggregator.Factory() { + @Override + public GroupingAggregator apply(DriverContext driverContext) { + final GroupingAggregatorFunction aggregator = supplier.groupingAggregator(driverContext); + return new GroupingAggregator(new FromPartialGroupingAggregatorFunction(aggregator, inputChannel), mode); + } + + @Override + public String describe() { + return "from_partial(" + supplier.describe() + ")"; + } + }; + } + + @Override + public String describe() { + return "from_partial"; + } + }; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java index 3d38c66119ead..6da6d42f2a8f1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java @@ -147,6 +147,11 @@ long unitInMillis() { throw new IllegalArgumentException("function [" + sourceText() + "] has invalid unit [" + unit.sourceText() + "]"); } + @Override + public List inputExpressions() { + return List.of(field(), timestamp); + } + @Override public AggregatorFunctionSupplier supplier(List inputChannels) { if (inputChannels.size() != 2 && inputChannels.size() != 3) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java new file mode 100644 index 0000000000000..805985b5302cf --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java @@ -0,0 +1,186 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.compute.aggregation.Aggregator; +import org.elasticsearch.compute.aggregation.AggregatorFunction; +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.compute.aggregation.FromPartialGroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.GroupingAggregator; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.aggregation.ToPartialAggregatorFunction; +import org.elasticsearch.compute.aggregation.ToPartialGroupingAggregatorFunction; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; +import org.elasticsearch.xpack.esql.planner.ToAggregator; + +import java.io.IOException; +import java.util.List; +import java.util.stream.IntStream; + +/** + * An internal aggregate function that always emits intermediate (or partial) output regardless + * of the aggregate mode. The intermediate output should be consumed by {@link FromPartial}, + * which always receives the intermediate input. Since an intermediate aggregate output can + * consist of multiple blocks, we wrap these output blocks in a single composite block. + * The {@link FromPartial} then unwraps this input block into multiple primitive blocks and + * passes them to the delegating GroupingAggregatorFunction. + *

    + * Both of these commands yield the same result, except the second plan executes aggregates twice: + *

    + * ```
    + * | ... before
    + * | af(x) BY g
    + * | ... after
    + * ```
    + * ```
    + * | ... before
    + * | $x = to_partial(af(x)) BY g
    + * | from_partial($x, af(_)) BY g
    + * | ...  after
    + * 
    + * ``` + * @see ToPartialGroupingAggregatorFunction + * @see FromPartialGroupingAggregatorFunction + */ +public class ToPartial extends AggregateFunction implements ToAggregator { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( + Expression.class, + "ToPartial", + ToPartial::new + ); + + private final Expression function; + + public ToPartial(Source source, AggregateFunction function) { + super(source, function.field(), List.of(function)); + this.function = function; + } + + private ToPartial(Source source, Expression field, Expression function) { + super(source, field, List.of(function)); + this.function = function; + } + + private ToPartial(StreamInput in) throws IOException { + this(Source.readFrom((PlanStreamInput) in), ((PlanStreamInput) in).readExpression(), ((PlanStreamInput) in).readExpression()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + PlanStreamOutput planOut = (PlanStreamOutput) out; + planOut.writeExpression(function); + } + + public Expression function() { + return function; + } + + @Override + public DataType dataType() { + return DataType.PARTIAL_AGG; + } + + @Override + protected TypeResolution resolveType() { + return function.typeResolved(); + } + + @Override + public Expression replaceChildren(List newChildren) { + return new ToPartial(source(), newChildren.get(0), newChildren.get(1)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, ToPartial::new, field(), function); + } + + @Override + public AggregatorFunctionSupplier supplier(List inputChannels) { + final ToAggregator toAggregator = (ToAggregator) function; + return new AggregatorFunctionSupplier() { + @Override + public AggregatorFunction aggregator(DriverContext driverContext) { + assert false : "aggregatorFactory() is override"; + throw new UnsupportedOperationException(); + } + + @Override + public GroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + assert false : "groupingAggregatorFactory() is override"; + throw new UnsupportedOperationException(); + } + + @Override + public Aggregator.Factory aggregatorFactory(AggregatorMode mode) { + final AggregatorFunctionSupplier supplier; + if (mode.isInputPartial()) { + try (var dummy = toAggregator.supplier(inputChannels).aggregator(DriverContext.getLocalDriver())) { + var intermediateChannels = IntStream.range(0, dummy.intermediateBlockCount()).boxed().toList(); + supplier = toAggregator.supplier(intermediateChannels); + } + } else { + supplier = toAggregator.supplier(inputChannels); + } + return new Aggregator.Factory() { + @Override + public Aggregator apply(DriverContext driverContext) { + final AggregatorFunction aggregatorFunction = supplier.aggregator(driverContext); + return new Aggregator(new ToPartialAggregatorFunction(aggregatorFunction, inputChannels), mode); + } + + @Override + public String describe() { + return "to_partial(" + supplier.describe() + ")"; + } + }; + } + + @Override + public GroupingAggregator.Factory groupingAggregatorFactory(AggregatorMode mode) { + final AggregatorFunctionSupplier supplier; + if (mode.isInputPartial()) { + try (var dummy = toAggregator.supplier(inputChannels).aggregator(DriverContext.getLocalDriver())) { + var intermediateChannels = IntStream.range(0, dummy.intermediateBlockCount()).boxed().toList(); + supplier = toAggregator.supplier(intermediateChannels); + } + } else { + supplier = toAggregator.supplier(inputChannels); + } + return new GroupingAggregator.Factory() { + @Override + public GroupingAggregator apply(DriverContext driverContext) { + final GroupingAggregatorFunction aggregatorFunction = supplier.groupingAggregator(driverContext); + return new GroupingAggregator(new ToPartialGroupingAggregatorFunction(aggregatorFunction, inputChannels), mode); + } + + @Override + public String describe() { + return "to_partial(" + supplier.describe() + ")"; + } + }; + } + + @Override + public String describe() { + return "to_partial"; + } + }; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/TranslateMetricsAggregate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/TranslateMetricsAggregate.java index 0a62dccf80c1f..88486bcb864dc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/TranslateMetricsAggregate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/TranslateMetricsAggregate.java @@ -20,7 +20,9 @@ import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; +import org.elasticsearch.xpack.esql.expression.function.aggregate.FromPartial; import org.elasticsearch.xpack.esql.expression.function.aggregate.Rate; +import org.elasticsearch.xpack.esql.expression.function.aggregate.ToPartial; import org.elasticsearch.xpack.esql.expression.function.aggregate.Values; import org.elasticsearch.xpack.esql.expression.function.grouping.Bucket; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; @@ -80,7 +82,39 @@ * | EVAL `avg(rate(request))` = `sum(rate(request))` / `count(rate(request))` * | KEEP `avg(rate(request))`, host, `bucket(@timestamp, 1minute)` * - * Mixing between rate and non-rate aggregates will be supported later. + * + * Non-rate aggregates will be rewritten as a pair of to_partial and from_partial aggregates, where the `to_partial` + * aggregates will be executed in the first pass and always produce an intermediate output regardless of the aggregate + * mode. The `from_partial` aggregates will be executed on the second pass and always receive intermediate output + * produced by `to_partial`. Examples: + * + *
    + * METRICS k8s max(rate(request)), max(memory_used) becomes:
    + *
    + * METRICS k8s
    + * | STATS rate(request), $p1=to_partial(max(memory_used)) BY _tsid
    + * | STATS max(`rate(request)`), `max(memory_used)` = from_partial($p1, max($_))
    + *
    + * METRICS k8s max(rate(request)) avg(memory_used) BY host
    + *
    + * becomes
    + *
    + * METRICS k8s
    + * | STATS rate(request), $p1=to_partial(sum(memory_used)), $p2=to_partial(count(memory_used)), VALUES(host) BY _tsid
    + * | STATS max(`rate(request)`), $sum=from_partial($p1, sum($_)), $count=from_partial($p2, count($_)) BY host=`VALUES(host)`
    + * | EVAL `avg(memory_used)` = $sum / $count
    + * | KEEP `max(rate(request))`, `avg(memory_used)`, host
    + *
    + * METRICS k8s min(memory_used) sum(rate(request)) BY pod, bucket(@timestamp, 5m)
    + *
    + * becomes
    + *
    + * METRICS k8s
    + * | EVAL `bucket(@timestamp, 5m)` = datetrunc(@timestamp, '5m')
    + * | STATS rate(request), $p1=to_partial(min(memory_used)), VALUES(pod) BY _tsid, `bucket(@timestamp, 5m)`
    + * | STATS sum(`rate(request)`), `min(memory_used)` = from_partial($p1, min($)) BY pod=`VALUES(pod)`, `bucket(@timestamp, 5m)`
    + * | KEEP `min(memory_used)`, `sum(rate(request))`, pod, `bucket(@timestamp, 5m)`
    + * 
    */ public final class TranslateMetricsAggregate extends OptimizerRules.OptimizerRule { @@ -98,34 +132,34 @@ protected LogicalPlan rule(Aggregate aggregate) { } LogicalPlan translate(Aggregate metrics) { - Map rateAggs = new HashMap<>(); // TODO - List nonRateAggs = new ArrayList<>(); - List outerRateAggs = new ArrayList<>(); + Map rateAggs = new HashMap<>(); + List firstPassAggs = new ArrayList<>(); + List secondPassAggs = new ArrayList<>(); for (NamedExpression agg : metrics.aggregates()) { - if (agg instanceof Alias alias) { - // METRICS af(rate(counter)) becomes STATS $rate_1=rate(counter) | STATS `af(rate(counter))`=af($rate_1) - if (alias.child() instanceof AggregateFunction outerRate) { - Holder changed = new Holder<>(Boolean.FALSE); - Expression outerAgg = outerRate.transformDown(Rate.class, rate -> { - changed.set(Boolean.TRUE); - Alias rateAgg = rateAggs.computeIfAbsent(rate, k -> new Alias(rate.source(), agg.name(), rate)); - return rateAgg.toAttribute(); + if (agg instanceof Alias alias && alias.child() instanceof AggregateFunction af) { + Holder changed = new Holder<>(Boolean.FALSE); + Expression outerAgg = af.transformDown(Rate.class, rate -> { + changed.set(Boolean.TRUE); + Alias rateAgg = rateAggs.computeIfAbsent(rate, k -> { + Alias newRateAgg = new Alias(rate.source(), agg.name(), rate); + firstPassAggs.add(newRateAgg); + return newRateAgg; }); - if (changed.get()) { - outerRateAggs.add(new Alias(alias.source(), alias.name(), null, outerAgg, agg.id())); - } + return rateAgg.toAttribute(); + }); + if (changed.get()) { + secondPassAggs.add(new Alias(alias.source(), alias.name(), null, outerAgg, agg.id())); } else { - nonRateAggs.add(agg); + var toPartial = new Alias(agg.source(), alias.name(), new ToPartial(agg.source(), af)); + var fromPartial = new FromPartial(agg.source(), toPartial.toAttribute(), af); + firstPassAggs.add(toPartial); + secondPassAggs.add(new Alias(alias.source(), alias.name(), null, fromPartial, alias.id())); } } } if (rateAggs.isEmpty()) { return toStandardAggregate(metrics); } - if (nonRateAggs.isEmpty() == false) { - // TODO: support this - throw new IllegalArgumentException("regular aggregates with rate aggregates are not supported yet"); - } Holder tsid = new Holder<>(); Holder timestamp = new Holder<>(); metrics.forEachDown(EsRelation.class, r -> { @@ -142,9 +176,9 @@ LogicalPlan translate(Aggregate metrics) { throw new IllegalArgumentException("_tsid or @timestamp field are missing from the metrics source"); } // metrics aggregates must be grouped by _tsid (and time-bucket) first and re-group by users key - List initialGroupings = new ArrayList<>(); - initialGroupings.add(tsid.get()); - List finalGroupings = new ArrayList<>(); + List firstPassGroupings = new ArrayList<>(); + firstPassGroupings.add(tsid.get()); + List secondPassGroupings = new ArrayList<>(); Holder timeBucketRef = new Holder<>(); metrics.child().forEachExpressionUp(NamedExpression.class, e -> { for (Expression child : e.children()) { @@ -157,7 +191,6 @@ LogicalPlan translate(Aggregate metrics) { } }); NamedExpression timeBucket = timeBucketRef.get(); - List initialAggs = new ArrayList<>(rateAggs.values()); for (Expression group : metrics.groupings()) { if (group instanceof Attribute == false) { throw new EsqlIllegalArgumentException("expected named expression for grouping; got " + group); @@ -166,19 +199,18 @@ LogicalPlan translate(Aggregate metrics) { final NamedExpression newFinalGroup; if (timeBucket != null && g.id().equals(timeBucket.id())) { newFinalGroup = timeBucket.toAttribute(); - initialGroupings.add(newFinalGroup); + firstPassGroupings.add(newFinalGroup); } else { newFinalGroup = new Alias(g.source(), g.name(), null, new Values(g.source(), g), g.id()); - initialAggs.add(newFinalGroup); + firstPassAggs.add(newFinalGroup); } - finalGroupings.add(new Alias(g.source(), g.name(), null, newFinalGroup.toAttribute(), g.id())); + secondPassGroupings.add(new Alias(g.source(), g.name(), null, newFinalGroup.toAttribute(), g.id())); } - var finalAggregates = Stream.concat(outerRateAggs.stream(), nonRateAggs.stream()).toList(); return newAggregate( - newAggregate(metrics.child(), Aggregate.AggregateType.METRICS, initialAggs, initialGroupings), + newAggregate(metrics.child(), Aggregate.AggregateType.METRICS, firstPassAggs, firstPassGroupings), Aggregate.AggregateType.STANDARD, - finalAggregates, - finalGroupings + secondPassAggs, + secondPassGroupings ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java index 8b9b5398b3cec..40c9067efbeda 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java @@ -119,7 +119,7 @@ static int estimateSize(DataType dataType) { case LONG -> Long.BYTES; case NULL -> 0; // TODO: provide a specific estimate for aggregated_metrics_double - case COMPOSITE -> throw new EsqlIllegalArgumentException("can't estimate size for composite blocks"); + case COMPOSITE -> 50; case UNKNOWN -> throw new EsqlIllegalArgumentException("[unknown] can't be the result of field extraction"); }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java index 38695748632de..3971e79cdc6d9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java @@ -257,16 +257,7 @@ private void aggregatesToFactory( ); } } else { - List inputExpressions = new ArrayList<>(); - inputExpressions.add(field); - for (Expression param : aggregateFunction.parameters()) { - if (param.foldable() == false) { - inputExpressions.add(param); - } else { - Object ignored = param.fold(); - } - } - sourceAttr = inputExpressions.stream().map(e -> { + sourceAttr = aggregateFunction.inputExpressions().stream().map(e -> { Attribute attr = Expressions.attribute(e); if (attr == null) { throw new EsqlIllegalArgumentException( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java index 22ee8074fdb3a..55a691a165d56 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.planner; +import org.elasticsearch.common.Strings; import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.core.Tuple; @@ -24,6 +25,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.expression.function.aggregate.CountDistinct; +import org.elasticsearch.xpack.esql.expression.function.aggregate.FromPartial; import org.elasticsearch.xpack.esql.expression.function.aggregate.Max; import org.elasticsearch.xpack.esql.expression.function.aggregate.MedianAbsoluteDeviation; import org.elasticsearch.xpack.esql.expression.function.aggregate.Min; @@ -33,6 +35,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialAggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; +import org.elasticsearch.xpack.esql.expression.function.aggregate.ToPartial; import org.elasticsearch.xpack.esql.expression.function.aggregate.TopList; import org.elasticsearch.xpack.esql.expression.function.aggregate.Values; @@ -65,7 +68,11 @@ final class AggregateMapper { Sum.class, Values.class, TopList.class, - Rate.class + Rate.class, + + // internal function + FromPartial.class, + ToPartial.class ); /** Record of agg Class, type, and grouping (or non-grouping). */ @@ -151,9 +158,13 @@ private static Stream, Tuple>> typeAndNames(Class types = List.of("Int", "Long", "Double"); } else if (Rate.class.isAssignableFrom(clazz)) { types = List.of("Int", "Long", "Double"); - } else { - assert clazz == CountDistinct.class : "Expected CountDistinct, got: " + clazz; + } else if (FromPartial.class.isAssignableFrom(clazz) || ToPartial.class.isAssignableFrom(clazz)) { + types = List.of(""); // no type + } else if (CountDistinct.class.isAssignableFrom(clazz)) { types = Stream.concat(NUMERIC.stream(), Stream.of("Boolean", "BytesRef")).toList(); + } else { + assert false : "unknown aggregate type " + clazz; + throw new IllegalArgumentException("unknown aggregate type " + clazz); } return combinations(types, extraConfigs).map(combo -> new Tuple<>(clazz, combo)); } @@ -233,7 +244,15 @@ private static String determinePackageName(Class clazz) { /** Maps intermediate state description to named expressions. */ private static Stream isToNE(List intermediateStateDescs) { - return intermediateStateDescs.stream().map(is -> new ReferenceAttribute(Source.EMPTY, is.name(), toDataType(is.type()))); + return intermediateStateDescs.stream().map(is -> { + final DataType dataType; + if (Strings.isEmpty(is.dataType())) { + dataType = toDataType(is.type()); + } else { + dataType = DataType.fromEs(is.dataType()); + } + return new ReferenceAttribute(Source.EMPTY, is.name(), dataType); + }); } /** Returns the data type for the engines element type. */ @@ -254,6 +273,9 @@ private static String dataTypeToString(DataType type, Class aggClass) { if (aggClass == Count.class) { return ""; // no type distinction } + if (aggClass == ToPartial.class || aggClass == FromPartial.class) { + return ""; + } if (type.equals(DataType.BOOLEAN)) { return "Boolean"; } else if (type.equals(DataType.INTEGER) || type.equals(DataType.COUNTER_INTEGER)) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 84c18ed63bb4f..ddf5fa6eaf8a3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -353,7 +353,7 @@ private PhysicalOperation planTopN(TopNExec topNExec, LocalExecutionPlannerConte case GEO_POINT, CARTESIAN_POINT, GEO_SHAPE, CARTESIAN_SHAPE, COUNTER_LONG, COUNTER_INTEGER, COUNTER_DOUBLE -> TopNEncoder.DEFAULT_UNSORTABLE; // unsupported fields are encoded as BytesRef, we'll use the same encoder; all values should be null at this point - case UNSUPPORTED -> TopNEncoder.UNSUPPORTED; + case PARTIAL_AGG, UNSUPPORTED -> TopNEncoder.UNSUPPORTED; case SOURCE -> throw new EsqlIllegalArgumentException("No TopN sorting encoder for type " + inverse.get(channel).type()); }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java index 20138f34c1041..a729cec893126 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java @@ -253,6 +253,7 @@ public static ElementType toElementType(DataType dataType, MappedFieldType.Field case TSID_DATA_TYPE -> ElementType.BYTES_REF; case GEO_POINT, CARTESIAN_POINT -> fieldExtractPreference == DOC_VALUES ? ElementType.LONG : ElementType.BYTES_REF; case GEO_SHAPE, CARTESIAN_SHAPE -> ElementType.BYTES_REF; + case PARTIAL_AGG -> ElementType.COMPOSITE; case SHORT, BYTE, DATE_PERIOD, TIME_DURATION, OBJECT, NESTED, FLOAT, HALF_FLOAT, SCALED_FLOAT -> throw EsqlIllegalArgumentException.illegalDataType(dataType); }; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java index a7d7bb66a4818..8a75d3f379dd3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java @@ -18,6 +18,7 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.NESTED; import static org.elasticsearch.xpack.esql.core.type.DataType.NULL; import static org.elasticsearch.xpack.esql.core.type.DataType.OBJECT; +import static org.elasticsearch.xpack.esql.core.type.DataType.PARTIAL_AGG; import static org.elasticsearch.xpack.esql.core.type.DataType.SCALED_FLOAT; import static org.elasticsearch.xpack.esql.core.type.DataType.SHORT; import static org.elasticsearch.xpack.esql.core.type.DataType.SOURCE; @@ -89,6 +90,7 @@ public static boolean isRepresentable(DataType t) { && t != SCALED_FLOAT && t != SOURCE && t != HALF_FLOAT + && t != PARTIAL_AGG && t.isCounter() == false; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index ead0eb9ee0635..4d41218b2165f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -123,7 +123,7 @@ EsqlQueryResponse randomResponseAsync(boolean columnar, EsqlQueryResponse.Profil private ColumnInfo randomColumnInfo() { DataType type = randomValueOtherThanMany( - t -> false == DataType.isPrimitive(t) || t == DataType.DATE_PERIOD || t == DataType.TIME_DURATION, + t -> false == DataType.isPrimitive(t) || t == DataType.DATE_PERIOD || t == DataType.TIME_DURATION || t == DataType.PARTIAL_AGG, () -> randomFrom(DataType.types()) ).widenSmallNumeric(); return new ColumnInfo(randomAlphaOfLength(10), type.esType()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 9611924e9a6bb..05c40ce5bd85f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -58,6 +58,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Avg; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.expression.function.aggregate.CountDistinct; +import org.elasticsearch.xpack.esql.expression.function.aggregate.FromPartial; import org.elasticsearch.xpack.esql.expression.function.aggregate.Max; import org.elasticsearch.xpack.esql.expression.function.aggregate.Median; import org.elasticsearch.xpack.esql.expression.function.aggregate.MedianAbsoluteDeviation; @@ -66,6 +67,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Rate; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; +import org.elasticsearch.xpack.esql.expression.function.aggregate.ToPartial; import org.elasticsearch.xpack.esql.expression.function.aggregate.Values; import org.elasticsearch.xpack.esql.expression.function.grouping.Bucket; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble; @@ -5167,6 +5169,68 @@ public void testTranslateMetricsWithoutGrouping() { assertThat(Expressions.attribute(rate.field()).name(), equalTo("network.total_bytes_in")); } + public void testTranslateMixedAggsWithoutGrouping() { + assumeTrue("requires snapshot builds", Build.current().isSnapshot()); + var query = "METRICS k8s max(rate(network.total_bytes_in)), max(network.cost)"; + var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query))); + Limit limit = as(plan, Limit.class); + Aggregate finalAggs = as(limit.child(), Aggregate.class); + Aggregate aggsByTsid = as(finalAggs.child(), Aggregate.class); + as(aggsByTsid.child(), EsRelation.class); + + assertThat(finalAggs.aggregateType(), equalTo(Aggregate.AggregateType.STANDARD)); + assertThat(finalAggs.aggregates(), hasSize(2)); + Max maxRate = as(Alias.unwrap(finalAggs.aggregates().get(0)), Max.class); + FromPartial maxCost = as(Alias.unwrap(finalAggs.aggregates().get(1)), FromPartial.class); + assertThat(Expressions.attribute(maxRate.field()).id(), equalTo(aggsByTsid.aggregates().get(0).id())); + assertThat(Expressions.attribute(maxCost.field()).id(), equalTo(aggsByTsid.aggregates().get(1).id())); + assertThat(finalAggs.groupings(), empty()); + + assertThat(aggsByTsid.aggregateType(), equalTo(Aggregate.AggregateType.METRICS)); + assertThat(aggsByTsid.aggregates(), hasSize(2)); + Rate rate = as(Alias.unwrap(aggsByTsid.aggregates().get(0)), Rate.class); + assertThat(Expressions.attribute(rate.field()).name(), equalTo("network.total_bytes_in")); + ToPartial toPartialMaxCost = as(Alias.unwrap(aggsByTsid.aggregates().get(1)), ToPartial.class); + assertThat(Expressions.attribute(toPartialMaxCost.field()).name(), equalTo("network.cost")); + } + + public void testTranslateMixedAggsWithMathWithoutGrouping() { + assumeTrue("requires snapshot builds", Build.current().isSnapshot()); + var query = "METRICS k8s max(rate(network.total_bytes_in)), max(network.cost + 0.2) * 1.1"; + var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query))); + Project project = as(plan, Project.class); + Eval mulEval = as(project.child(), Eval.class); + assertThat(mulEval.fields(), hasSize(1)); + Mul mul = as(Alias.unwrap(mulEval.fields().get(0)), Mul.class); + Limit limit = as(mulEval.child(), Limit.class); + Aggregate finalAggs = as(limit.child(), Aggregate.class); + assertThat(finalAggs.aggregates(), hasSize(2)); + Aggregate aggsByTsid = as(finalAggs.child(), Aggregate.class); + assertThat(aggsByTsid.aggregates(), hasSize(2)); + Eval addEval = as(aggsByTsid.child(), Eval.class); + assertThat(addEval.fields(), hasSize(1)); + Add add = as(Alias.unwrap(addEval.fields().get(0)), Add.class); + as(addEval.child(), EsRelation.class); + + assertThat(Expressions.attribute(mul.left()).id(), equalTo(finalAggs.aggregates().get(1).id())); + assertThat(mul.right().fold(), equalTo(1.1)); + + assertThat(finalAggs.aggregateType(), equalTo(Aggregate.AggregateType.STANDARD)); + Max maxRate = as(Alias.unwrap(finalAggs.aggregates().get(0)), Max.class); + FromPartial maxCost = as(Alias.unwrap(finalAggs.aggregates().get(1)), FromPartial.class); + assertThat(Expressions.attribute(maxRate.field()).id(), equalTo(aggsByTsid.aggregates().get(0).id())); + assertThat(Expressions.attribute(maxCost.field()).id(), equalTo(aggsByTsid.aggregates().get(1).id())); + assertThat(finalAggs.groupings(), empty()); + + assertThat(aggsByTsid.aggregateType(), equalTo(Aggregate.AggregateType.METRICS)); + Rate rate = as(Alias.unwrap(aggsByTsid.aggregates().get(0)), Rate.class); + assertThat(Expressions.attribute(rate.field()).name(), equalTo("network.total_bytes_in")); + ToPartial toPartialMaxCost = as(Alias.unwrap(aggsByTsid.aggregates().get(1)), ToPartial.class); + assertThat(Expressions.attribute(toPartialMaxCost.field()).id(), equalTo(addEval.fields().get(0).id())); + assertThat(Expressions.attribute(add.left()).name(), equalTo("network.cost")); + assertThat(add.right().fold(), equalTo(0.2)); + } + public void testTranslateMetricsGroupedByOneDimension() { assumeTrue("requires snapshot builds", Build.current().isSnapshot()); var query = "METRICS k8s sum(rate(network.total_bytes_in)) BY cluster | SORT cluster | LIMIT 10"; @@ -5296,6 +5360,55 @@ METRICS k8s avg(rate(network.total_bytes_in)) BY pod, bucket(@timestamp, 5 minut assertThat(Expressions.attribute(clusterValues.field()).name(), equalTo("cluster")); } + public void testTranslateMixedAggsGroupedByTimeBucketAndDimensions() { + assumeTrue("requires snapshot builds", Build.current().isSnapshot()); + var query = """ + METRICS k8s avg(rate(network.total_bytes_in)), avg(network.cost) BY bucket(@timestamp, 5 minute), cluster + | SORT cluster + | LIMIT 10 + """; + var plan = logicalOptimizer.optimize(metricsAnalyzer.analyze(parser.createStatement(query))); + Project project = as(plan, Project.class); + TopN topN = as(project.child(), TopN.class); + Eval eval = as(topN.child(), Eval.class); + assertThat(eval.fields(), hasSize(2)); + Div div = as(Alias.unwrap(eval.fields().get(0)), Div.class); + Aggregate finalAgg = as(eval.child(), Aggregate.class); + Aggregate aggsByTsid = as(finalAgg.child(), Aggregate.class); + Eval bucket = as(aggsByTsid.child(), Eval.class); + as(bucket.child(), EsRelation.class); + assertThat(Expressions.attribute(div.left()).id(), equalTo(finalAgg.aggregates().get(0).id())); + assertThat(Expressions.attribute(div.right()).id(), equalTo(finalAgg.aggregates().get(1).id())); + + assertThat(finalAgg.aggregateType(), equalTo(Aggregate.AggregateType.STANDARD)); + assertThat(finalAgg.aggregates(), hasSize(6)); // sum, count, sum, count, bucket, cluster + Sum sumRate = as(Alias.unwrap(finalAgg.aggregates().get(0)), Sum.class); + Count countRate = as(Alias.unwrap(finalAgg.aggregates().get(1)), Count.class); + assertThat(Expressions.attribute(sumRate.field()).id(), equalTo(aggsByTsid.aggregates().get(0).id())); + assertThat(Expressions.attribute(countRate.field()).id(), equalTo(aggsByTsid.aggregates().get(0).id())); + + FromPartial sumCost = as(Alias.unwrap(finalAgg.aggregates().get(2)), FromPartial.class); + FromPartial countCost = as(Alias.unwrap(finalAgg.aggregates().get(3)), FromPartial.class); + assertThat(Expressions.attribute(sumCost.field()).id(), equalTo(aggsByTsid.aggregates().get(1).id())); + assertThat(Expressions.attribute(countCost.field()).id(), equalTo(aggsByTsid.aggregates().get(2).id())); + + assertThat(finalAgg.groupings(), hasSize(2)); + assertThat(Expressions.attribute(finalAgg.groupings().get(0)).id(), equalTo(aggsByTsid.aggregates().get(3).id())); + + assertThat(aggsByTsid.aggregateType(), equalTo(Aggregate.AggregateType.METRICS)); + assertThat(aggsByTsid.aggregates(), hasSize(5)); // rate, to_partial(sum(cost)), to_partial(count(cost)), values(cluster), bucket + Rate rate = as(Alias.unwrap(aggsByTsid.aggregates().get(0)), Rate.class); + assertThat(Expressions.attribute(rate.field()).name(), equalTo("network.total_bytes_in")); + ToPartial toPartialSum = as(Alias.unwrap(aggsByTsid.aggregates().get(1)), ToPartial.class); + assertThat(toPartialSum.function(), instanceOf(Sum.class)); + assertThat(Expressions.attribute(toPartialSum.field()).name(), equalTo("network.cost")); + ToPartial toPartialCount = as(Alias.unwrap(aggsByTsid.aggregates().get(2)), ToPartial.class); + assertThat(toPartialCount.function(), instanceOf(Count.class)); + assertThat(Expressions.attribute(toPartialCount.field()).name(), equalTo("network.cost")); + Values clusterValues = as(Alias.unwrap(aggsByTsid.aggregates().get(4)), Values.class); + assertThat(Expressions.attribute(clusterValues.field()).name(), equalTo("cluster")); + } + public void testAdjustMetricsRateBeforeFinalAgg() { assumeTrue("requires snapshot builds", Build.current().isSnapshot()); var query = """ From 3bc485cafe621b38ab47bf3e283eacb0f4909388 Mon Sep 17 00:00:00 2001 From: David Turner Date: Sun, 30 Jun 2024 16:41:31 +0100 Subject: [PATCH 063/216] AwaitsFix for #110272 --- .../elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java index 6d84a295889c3..af09b61adaf45 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java @@ -9,6 +9,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.compute.data.Page; import org.elasticsearch.dissect.DissectParser; import org.elasticsearch.xpack.esql.core.capabilities.UnresolvedException; @@ -49,6 +50,7 @@ import java.util.function.Consumer; import java.util.function.Predicate; +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/110272") public class EsqlNodeSubclassTests> extends NodeSubclassTests { private static final List> CLASSES_WITH_MIN_TWO_CHILDREN = List.of(Concat.class, CIDRMatch.class); From b906ce3d66cf3d654cfbb2cb323593e1d26ec1f3 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Sun, 30 Jun 2024 20:01:31 +0300 Subject: [PATCH 064/216] ESQL: change from quoting from backtick to quote (#108395) * ESQL: change from quoting from backtick to quote For historical reasons, the source declaration inside FROM command is treated as an identifier, using backticks (`) for escaping the value. This is inconsistent since the source is not an identifier (field name) but an index name which has different semantics. `index` means a field name index while "index" means a literal with said value. In case of FROM, the index name/location is more like a literal (also in unquoted form) than an identifier (that is a reference to a value). This PR tweaks the grammar and plugs in the quoted string logic so that both the single quote (") and triple quote (""") are allowed. * Update grammar * Add more tests * Add a few more tests * Add extra test * Update docs/changelog/108395.yaml * Adress review comments * Add doc note * Revert test rename * Fix quoting with remote cluster * Update docs/reference/esql/source-commands/from.asciidoc Co-authored-by: marciw <333176+marciw@users.noreply.github.com> --------- Co-authored-by: Bogdan Pintea Co-authored-by: Bogdan Pintea Co-authored-by: marciw <333176+marciw@users.noreply.github.com> Co-authored-by: Elastic Machine --- docs/changelog/108395.yaml | 5 + .../esql/source-commands/from.asciidoc | 8 + .../src/main/resources/k8s-metrics.csv-spec | 10 +- .../src/main/resources/lookup.csv-spec | 8 +- .../src/main/resources/string.csv-spec | 10 +- .../esql/src/main/antlr/EsqlBaseLexer.g4 | 41 +- .../esql/src/main/antlr/EsqlBaseLexer.tokens | 2 +- .../esql/src/main/antlr/EsqlBaseParser.g4 | 22 +- .../esql/src/main/antlr/EsqlBaseParser.tokens | 2 +- .../xpack/esql/action/EsqlCapabilities.java | 7 +- .../xpack/esql/parser/AbstractBuilder.java | 27 +- .../xpack/esql/parser/EsqlBaseLexer.interp | 22 +- .../xpack/esql/parser/EsqlBaseLexer.java | 1786 +++++++++-------- .../xpack/esql/parser/EsqlBaseParser.interp | 8 +- .../xpack/esql/parser/EsqlBaseParser.java | 1725 ++++++++-------- .../parser/EsqlBaseParserBaseListener.java | 28 +- .../parser/EsqlBaseParserBaseVisitor.java | 16 +- .../esql/parser/EsqlBaseParserListener.java | 28 +- .../esql/parser/EsqlBaseParserVisitor.java | 16 +- .../xpack/esql/parser/ExpressionBuilder.java | 2 +- .../xpack/esql/parser/IdentifierBuilder.java | 27 +- .../xpack/esql/parser/LogicalPlanBuilder.java | 10 +- .../esql/parser/StatementParserTests.java | 146 +- 23 files changed, 2168 insertions(+), 1788 deletions(-) create mode 100644 docs/changelog/108395.yaml diff --git a/docs/changelog/108395.yaml b/docs/changelog/108395.yaml new file mode 100644 index 0000000000000..c33cf169a99fa --- /dev/null +++ b/docs/changelog/108395.yaml @@ -0,0 +1,5 @@ +pr: 108395 +summary: "ESQL: change from quoting from backtick to quote" +area: ES|QL +type: bug +issues: [] diff --git a/docs/reference/esql/source-commands/from.asciidoc b/docs/reference/esql/source-commands/from.asciidoc index d81c46530e089..9ab21e8996aa0 100644 --- a/docs/reference/esql/source-commands/from.asciidoc +++ b/docs/reference/esql/source-commands/from.asciidoc @@ -82,3 +82,11 @@ Use the optional `METADATA` directive to enable < channel(HIDDEN) ; -fragment INDEX_UNQUOTED_IDENTIFIER_PART - : ~[=`|,[\]/ \t\r\n] - | '/' ~[*/] // allow single / but not followed by another / or * which would start a comment +// in 8.14 ` were not allowed +// this has been relaxed in 8.15 since " is used for quoting +fragment UNQUOTED_SOURCE_PART + : ~[:"=|,[\]/ \t\r\n] + | '/' ~[*/] // allow single / but not followed by another / or * which would start a comment -- used in index pattern date spec ; -INDEX_UNQUOTED_IDENTIFIER - : INDEX_UNQUOTED_IDENTIFIER_PART+ +UNQUOTED_SOURCE + : UNQUOTED_SOURCE_PART+ ; // @@ -202,15 +204,13 @@ mode FROM_MODE; FROM_PIPE : PIPE -> type(PIPE), popMode; FROM_OPENING_BRACKET : OPENING_BRACKET -> type(OPENING_BRACKET); FROM_CLOSING_BRACKET : CLOSING_BRACKET -> type(CLOSING_BRACKET); +FROM_COLON : COLON -> type(COLON); FROM_COMMA : COMMA -> type(COMMA); FROM_ASSIGN : ASSIGN -> type(ASSIGN); -FROM_QUOTED_STRING : QUOTED_STRING -> type(QUOTED_STRING); - METADATA : 'metadata'; -FROM_INDEX_UNQUOTED_IDENTIFIER - : INDEX_UNQUOTED_IDENTIFIER -> type(INDEX_UNQUOTED_IDENTIFIER) - ; +FROM_UNQUOTED_SOURCE : UNQUOTED_SOURCE -> type(UNQUOTED_SOURCE); +FROM_QUOTED_SOURCE : QUOTED_STRING -> type(QUOTED_STRING); FROM_LINE_COMMENT : LINE_COMMENT -> channel(HIDDEN) @@ -301,10 +301,6 @@ ENRICH_POLICY_NAME : (ENRICH_POLICY_NAME_BODY+ COLON)? ENRICH_POLICY_NAME_BODY+ ; -ENRICH_QUOTED_IDENTIFIER - : QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER) - ; - ENRICH_MODE_UNQUOTED_VALUE : ENRICH_POLICY_NAME -> type(ENRICH_POLICY_NAME) ; @@ -321,7 +317,7 @@ ENRICH_WS : WS -> channel(HIDDEN) ; -// submode for Enrich to allow different lexing between policy identifier (loose) and field identifiers +// submode for Enrich to allow different lexing between policy source (loose) and field identifiers mode ENRICH_FIELD_MODE; ENRICH_FIELD_PIPE : PIPE -> type(PIPE), popMode, popMode; ENRICH_FIELD_ASSIGN : ASSIGN -> type(ASSIGN); @@ -353,13 +349,13 @@ ENRICH_FIELD_WS // LOOKUP ON key mode LOOKUP_MODE; LOOKUP_PIPE : PIPE -> type(PIPE), popMode; +LOOKUP_COLON : COLON -> type(COLON); LOOKUP_COMMA : COMMA -> type(COMMA); LOOKUP_DOT: DOT -> type(DOT); LOOKUP_ON : ON -> type(ON), pushMode(LOOKUP_FIELD_MODE); -LOOKUP_INDEX_UNQUOTED_IDENTIFIER - : INDEX_UNQUOTED_IDENTIFIER -> type(INDEX_UNQUOTED_IDENTIFIER) - ; +LOOKUP_UNQUOTED_SOURCE: UNQUOTED_SOURCE -> type(UNQUOTED_SOURCE); +LOOKUP_QUOTED_SOURCE : QUOTED_STRING -> type(QUOTED_STRING); LOOKUP_LINE_COMMENT : LINE_COMMENT -> channel(HIDDEN) @@ -486,9 +482,8 @@ SETTING_WS mode METRICS_MODE; METRICS_PIPE : PIPE -> type(PIPE), popMode; -METRICS_INDEX_UNQUOTED_IDENTIFIER - : INDEX_UNQUOTED_IDENTIFIER -> type(INDEX_UNQUOTED_IDENTIFIER), popMode, pushMode(CLOSING_METRICS_MODE) - ; +METRICS_UNQUOTED_SOURCE: UNQUOTED_SOURCE -> type(UNQUOTED_SOURCE), popMode, pushMode(CLOSING_METRICS_MODE); +METRICS_QUOTED_SOURCE : QUOTED_STRING -> type(QUOTED_STRING), popMode, pushMode(CLOSING_METRICS_MODE); METRICS_LINE_COMMENT : LINE_COMMENT -> channel(HIDDEN) @@ -505,6 +500,10 @@ METRICS_WS // TODO: remove this workaround mode - see https://github.com/elastic/elasticsearch/issues/108528 mode CLOSING_METRICS_MODE; +CLOSING_METRICS_COLON + : COLON -> type(COLON), popMode, pushMode(METRICS_MODE) + ; + CLOSING_METRICS_COMMA : COMMA -> type(COMMA), popMode, pushMode(METRICS_MODE) ; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens index 04798fc3dca8a..63eb3a86419a3 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens @@ -22,7 +22,7 @@ UNKNOWN_CMD=21 LINE_COMMENT=22 MULTILINE_COMMENT=23 WS=24 -INDEX_UNQUOTED_IDENTIFIER=25 +UNQUOTED_SOURCE=25 EXPLAIN_WS=26 EXPLAIN_LINE_COMMENT=27 EXPLAIN_MULTILINE_COMMENT=28 diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index 69d65ea9a214b..89059822d367b 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -106,11 +106,21 @@ field ; fromCommand - : FROM indexIdentifier (COMMA indexIdentifier)* metadata? + : FROM indexPattern (COMMA indexPattern)* metadata? ; -indexIdentifier - : INDEX_UNQUOTED_IDENTIFIER +indexPattern + : clusterString COLON indexString + | indexString + ; + +clusterString + : UNQUOTED_SOURCE + ; + +indexString + : UNQUOTED_SOURCE + | QUOTED_STRING ; metadata @@ -119,7 +129,7 @@ metadata ; metadataOption - : METADATA indexIdentifier (COMMA indexIdentifier)* + : METADATA UNQUOTED_SOURCE (COMMA UNQUOTED_SOURCE)* ; deprecated_metadata @@ -127,7 +137,7 @@ deprecated_metadata ; metricsCommand - : METRICS indexIdentifier (COMMA indexIdentifier)* aggregates=fields? (BY grouping=fields)? + : METRICS indexPattern (COMMA indexPattern)* aggregates=fields? (BY grouping=fields)? ; evalCommand @@ -280,5 +290,5 @@ enrichWithClause ; lookupCommand - : LOOKUP tableName=INDEX_UNQUOTED_IDENTIFIER ON matchFields=qualifiedNamePatterns + : LOOKUP tableName=indexPattern ON matchFields=qualifiedNamePatterns ; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens index 04798fc3dca8a..63eb3a86419a3 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens @@ -22,7 +22,7 @@ UNKNOWN_CMD=21 LINE_COMMENT=22 MULTILINE_COMMENT=23 WS=24 -INDEX_UNQUOTED_IDENTIFIER=25 +UNQUOTED_SOURCE=25 EXPLAIN_WS=26 EXPLAIN_LINE_COMMENT=27 EXPLAIN_MULTILINE_COMMENT=28 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 12b54126fbabb..43b1ae8d66906 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -96,7 +96,12 @@ public enum Cap { * Fix to GROK and DISSECT that allows extracting attributes with the same name as the input * https://github.com/elastic/elasticsearch/issues/110184 */ - GROK_DISSECT_MASKING; + GROK_DISSECT_MASKING, + + /** + * Support for quoting index sources in double quotes. + */ + DOUBLE_QUOTES_SOURCE_ENCLOSING; Cap() { snapshotOnly = false; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/AbstractBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/AbstractBuilder.java index 0ec1d0b742726..ce8c743106411 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/AbstractBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/AbstractBuilder.java @@ -20,28 +20,31 @@ public Object visit(ParseTree tree) { } @Override - public Object visitTerminal(TerminalNode node) { + public Source visitTerminal(TerminalNode node) { return ParserUtils.source(node); } - static String unquoteString(Source source) { - String text = source.text(); - if (text == null) { + static String unquote(Source source) { + return unquote(source.text()); + } + + static String unquote(String string) { + if (string == null) { return null; } // unescaped strings can be interpreted directly - if (text.startsWith("\"\"\"")) { - return text.substring(3, text.length() - 3); + if (string.startsWith("\"\"\"")) { + return string.substring(3, string.length() - 3); } - text = text.substring(1, text.length() - 1); + string = string.substring(1, string.length() - 1); StringBuilder sb = new StringBuilder(); - for (int i = 0; i < text.length();) { - if (text.charAt(i) == '\\') { + for (int i = 0; i < string.length();) { + if (string.charAt(i) == '\\') { // ANTLR4 Grammar guarantees there is always a character after the `\` - switch (text.charAt(++i)) { + switch (string.charAt(++i)) { case 't' -> sb.append('\t'); case 'n' -> sb.append('\n'); case 'r' -> sb.append('\r'); @@ -51,11 +54,11 @@ static String unquoteString(Source source) { // will be interpreted as regex, so we have to escape it default -> // unknown escape sequence, pass through as-is, e.g: `...\w...` - sb.append('\\').append(text.charAt(i)); + sb.append('\\').append(string.charAt(i)); } i++; } else { - sb.append(text.charAt(i++)); + sb.append(string.charAt(i++)); } } return sb.toString(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index eb3689d0900d3..f5484f3e7070f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -151,7 +151,7 @@ UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS -INDEX_UNQUOTED_IDENTIFIER +UNQUOTED_SOURCE EXPLAIN_WS EXPLAIN_LINE_COMMENT EXPLAIN_MULTILINE_COMMENT @@ -277,8 +277,8 @@ UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS -INDEX_UNQUOTED_IDENTIFIER_PART -INDEX_UNQUOTED_IDENTIFIER +UNQUOTED_SOURCE_PART +UNQUOTED_SOURCE EXPLAIN_OPENING_BRACKET EXPLAIN_PIPE EXPLAIN_WS @@ -345,11 +345,12 @@ EXPR_WS FROM_PIPE FROM_OPENING_BRACKET FROM_CLOSING_BRACKET +FROM_COLON FROM_COMMA FROM_ASSIGN -FROM_QUOTED_STRING METADATA -FROM_INDEX_UNQUOTED_IDENTIFIER +FROM_UNQUOTED_SOURCE +FROM_QUOTED_SOURCE FROM_LINE_COMMENT FROM_MULTILINE_COMMENT FROM_WS @@ -377,7 +378,6 @@ ON WITH ENRICH_POLICY_NAME_BODY ENRICH_POLICY_NAME -ENRICH_QUOTED_IDENTIFIER ENRICH_MODE_UNQUOTED_VALUE ENRICH_LINE_COMMENT ENRICH_MULTILINE_COMMENT @@ -393,10 +393,12 @@ ENRICH_FIELD_LINE_COMMENT ENRICH_FIELD_MULTILINE_COMMENT ENRICH_FIELD_WS LOOKUP_PIPE +LOOKUP_COLON LOOKUP_COMMA LOOKUP_DOT LOOKUP_ON -LOOKUP_INDEX_UNQUOTED_IDENTIFIER +LOOKUP_UNQUOTED_SOURCE +LOOKUP_QUOTED_SOURCE LOOKUP_LINE_COMMENT LOOKUP_MULTILINE_COMMENT LOOKUP_WS @@ -431,10 +433,12 @@ SETTING_LINE_COMMENT SETTTING_MULTILINE_COMMENT SETTING_WS METRICS_PIPE -METRICS_INDEX_UNQUOTED_IDENTIFIER +METRICS_UNQUOTED_SOURCE +METRICS_QUOTED_SOURCE METRICS_LINE_COMMENT METRICS_MULTILINE_COMMENT METRICS_WS +CLOSING_METRICS_COLON CLOSING_METRICS_COMMA CLOSING_METRICS_LINE_COMMENT CLOSING_METRICS_MULTILINE_COMMENT @@ -467,4 +471,4 @@ METRICS_MODE CLOSING_METRICS_MODE atn: -[4, 0, 124, 1422, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 4, 20, 567, 8, 20, 11, 20, 12, 20, 568, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 577, 8, 21, 10, 21, 12, 21, 580, 9, 21, 1, 21, 3, 21, 583, 8, 21, 1, 21, 3, 21, 586, 8, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 595, 8, 22, 10, 22, 12, 22, 598, 9, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 4, 23, 606, 8, 23, 11, 23, 12, 23, 607, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 3, 24, 615, 8, 24, 1, 25, 4, 25, 618, 8, 25, 11, 25, 12, 25, 619, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 1, 36, 3, 36, 659, 8, 36, 1, 36, 4, 36, 662, 8, 36, 11, 36, 12, 36, 663, 1, 37, 1, 37, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 3, 39, 673, 8, 39, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 3, 41, 680, 8, 41, 1, 42, 1, 42, 1, 42, 5, 42, 685, 8, 42, 10, 42, 12, 42, 688, 9, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 696, 8, 42, 10, 42, 12, 42, 699, 9, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 3, 42, 706, 8, 42, 1, 42, 3, 42, 709, 8, 42, 3, 42, 711, 8, 42, 1, 43, 4, 43, 714, 8, 43, 11, 43, 12, 43, 715, 1, 44, 4, 44, 719, 8, 44, 11, 44, 12, 44, 720, 1, 44, 1, 44, 5, 44, 725, 8, 44, 10, 44, 12, 44, 728, 9, 44, 1, 44, 1, 44, 4, 44, 732, 8, 44, 11, 44, 12, 44, 733, 1, 44, 4, 44, 737, 8, 44, 11, 44, 12, 44, 738, 1, 44, 1, 44, 5, 44, 743, 8, 44, 10, 44, 12, 44, 746, 9, 44, 3, 44, 748, 8, 44, 1, 44, 1, 44, 1, 44, 1, 44, 4, 44, 754, 8, 44, 11, 44, 12, 44, 755, 1, 44, 1, 44, 3, 44, 760, 8, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 5, 80, 882, 8, 80, 10, 80, 12, 80, 885, 9, 80, 1, 80, 1, 80, 4, 80, 889, 8, 80, 11, 80, 12, 80, 890, 3, 80, 893, 8, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 5, 83, 907, 8, 83, 10, 83, 12, 83, 910, 9, 83, 1, 83, 1, 83, 3, 83, 914, 8, 83, 1, 83, 4, 83, 917, 8, 83, 11, 83, 12, 83, 918, 3, 83, 921, 8, 83, 1, 84, 1, 84, 4, 84, 925, 8, 84, 11, 84, 12, 84, 926, 1, 84, 1, 84, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 3, 103, 1012, 8, 103, 1, 104, 1, 104, 3, 104, 1016, 8, 104, 1, 104, 5, 104, 1019, 8, 104, 10, 104, 12, 104, 1022, 9, 104, 1, 104, 1, 104, 3, 104, 1026, 8, 104, 1, 104, 4, 104, 1029, 8, 104, 11, 104, 12, 104, 1030, 3, 104, 1033, 8, 104, 1, 105, 1, 105, 4, 105, 1037, 8, 105, 11, 105, 12, 105, 1038, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 123, 4, 123, 1114, 8, 123, 11, 123, 12, 123, 1115, 1, 123, 1, 123, 3, 123, 1120, 8, 123, 1, 123, 4, 123, 1123, 8, 123, 11, 123, 12, 123, 1124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 173, 4, 173, 1343, 8, 173, 11, 173, 12, 173, 1344, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 2, 596, 697, 0, 190, 16, 1, 18, 2, 20, 3, 22, 4, 24, 5, 26, 6, 28, 7, 30, 8, 32, 9, 34, 10, 36, 11, 38, 12, 40, 13, 42, 14, 44, 15, 46, 16, 48, 17, 50, 18, 52, 19, 54, 20, 56, 21, 58, 22, 60, 23, 62, 24, 64, 0, 66, 25, 68, 0, 70, 0, 72, 26, 74, 27, 76, 28, 78, 29, 80, 0, 82, 0, 84, 0, 86, 0, 88, 0, 90, 0, 92, 0, 94, 0, 96, 0, 98, 0, 100, 30, 102, 31, 104, 32, 106, 33, 108, 34, 110, 35, 112, 36, 114, 37, 116, 38, 118, 39, 120, 40, 122, 41, 124, 42, 126, 43, 128, 44, 130, 45, 132, 46, 134, 47, 136, 48, 138, 49, 140, 50, 142, 51, 144, 52, 146, 53, 148, 54, 150, 55, 152, 56, 154, 57, 156, 58, 158, 59, 160, 60, 162, 61, 164, 62, 166, 63, 168, 64, 170, 65, 172, 66, 174, 67, 176, 68, 178, 69, 180, 70, 182, 71, 184, 0, 186, 72, 188, 73, 190, 74, 192, 75, 194, 0, 196, 0, 198, 0, 200, 0, 202, 0, 204, 0, 206, 76, 208, 0, 210, 77, 212, 78, 214, 79, 216, 0, 218, 0, 220, 0, 222, 0, 224, 0, 226, 80, 228, 81, 230, 82, 232, 83, 234, 0, 236, 0, 238, 0, 240, 0, 242, 84, 244, 0, 246, 85, 248, 86, 250, 87, 252, 0, 254, 0, 256, 88, 258, 89, 260, 0, 262, 90, 264, 0, 266, 0, 268, 91, 270, 92, 272, 93, 274, 0, 276, 0, 278, 0, 280, 0, 282, 0, 284, 0, 286, 0, 288, 94, 290, 95, 292, 96, 294, 0, 296, 0, 298, 0, 300, 0, 302, 0, 304, 97, 306, 98, 308, 99, 310, 0, 312, 0, 314, 0, 316, 0, 318, 100, 320, 101, 322, 102, 324, 0, 326, 0, 328, 0, 330, 0, 332, 103, 334, 104, 336, 105, 338, 0, 340, 106, 342, 107, 344, 108, 346, 109, 348, 0, 350, 110, 352, 111, 354, 112, 356, 113, 358, 0, 360, 114, 362, 115, 364, 116, 366, 117, 368, 118, 370, 0, 372, 0, 374, 119, 376, 120, 378, 121, 380, 0, 382, 122, 384, 123, 386, 124, 388, 0, 390, 0, 392, 0, 394, 0, 16, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1448, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 60, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 0, 66, 1, 0, 0, 0, 1, 68, 1, 0, 0, 0, 1, 70, 1, 0, 0, 0, 1, 72, 1, 0, 0, 0, 1, 74, 1, 0, 0, 0, 1, 76, 1, 0, 0, 0, 2, 78, 1, 0, 0, 0, 2, 100, 1, 0, 0, 0, 2, 102, 1, 0, 0, 0, 2, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 116, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 2, 128, 1, 0, 0, 0, 2, 130, 1, 0, 0, 0, 2, 132, 1, 0, 0, 0, 2, 134, 1, 0, 0, 0, 2, 136, 1, 0, 0, 0, 2, 138, 1, 0, 0, 0, 2, 140, 1, 0, 0, 0, 2, 142, 1, 0, 0, 0, 2, 144, 1, 0, 0, 0, 2, 146, 1, 0, 0, 0, 2, 148, 1, 0, 0, 0, 2, 150, 1, 0, 0, 0, 2, 152, 1, 0, 0, 0, 2, 154, 1, 0, 0, 0, 2, 156, 1, 0, 0, 0, 2, 158, 1, 0, 0, 0, 2, 160, 1, 0, 0, 0, 2, 162, 1, 0, 0, 0, 2, 164, 1, 0, 0, 0, 2, 166, 1, 0, 0, 0, 2, 168, 1, 0, 0, 0, 2, 170, 1, 0, 0, 0, 2, 172, 1, 0, 0, 0, 2, 174, 1, 0, 0, 0, 2, 176, 1, 0, 0, 0, 2, 178, 1, 0, 0, 0, 2, 180, 1, 0, 0, 0, 2, 182, 1, 0, 0, 0, 2, 186, 1, 0, 0, 0, 2, 188, 1, 0, 0, 0, 2, 190, 1, 0, 0, 0, 2, 192, 1, 0, 0, 0, 3, 194, 1, 0, 0, 0, 3, 196, 1, 0, 0, 0, 3, 198, 1, 0, 0, 0, 3, 200, 1, 0, 0, 0, 3, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 3, 208, 1, 0, 0, 0, 3, 210, 1, 0, 0, 0, 3, 212, 1, 0, 0, 0, 3, 214, 1, 0, 0, 0, 4, 216, 1, 0, 0, 0, 4, 218, 1, 0, 0, 0, 4, 220, 1, 0, 0, 0, 4, 226, 1, 0, 0, 0, 4, 228, 1, 0, 0, 0, 4, 230, 1, 0, 0, 0, 4, 232, 1, 0, 0, 0, 5, 234, 1, 0, 0, 0, 5, 236, 1, 0, 0, 0, 5, 238, 1, 0, 0, 0, 5, 240, 1, 0, 0, 0, 5, 242, 1, 0, 0, 0, 5, 244, 1, 0, 0, 0, 5, 246, 1, 0, 0, 0, 5, 248, 1, 0, 0, 0, 5, 250, 1, 0, 0, 0, 6, 252, 1, 0, 0, 0, 6, 254, 1, 0, 0, 0, 6, 256, 1, 0, 0, 0, 6, 258, 1, 0, 0, 0, 6, 262, 1, 0, 0, 0, 6, 264, 1, 0, 0, 0, 6, 266, 1, 0, 0, 0, 6, 268, 1, 0, 0, 0, 6, 270, 1, 0, 0, 0, 6, 272, 1, 0, 0, 0, 7, 274, 1, 0, 0, 0, 7, 276, 1, 0, 0, 0, 7, 278, 1, 0, 0, 0, 7, 280, 1, 0, 0, 0, 7, 282, 1, 0, 0, 0, 7, 284, 1, 0, 0, 0, 7, 286, 1, 0, 0, 0, 7, 288, 1, 0, 0, 0, 7, 290, 1, 0, 0, 0, 7, 292, 1, 0, 0, 0, 8, 294, 1, 0, 0, 0, 8, 296, 1, 0, 0, 0, 8, 298, 1, 0, 0, 0, 8, 300, 1, 0, 0, 0, 8, 302, 1, 0, 0, 0, 8, 304, 1, 0, 0, 0, 8, 306, 1, 0, 0, 0, 8, 308, 1, 0, 0, 0, 9, 310, 1, 0, 0, 0, 9, 312, 1, 0, 0, 0, 9, 314, 1, 0, 0, 0, 9, 316, 1, 0, 0, 0, 9, 318, 1, 0, 0, 0, 9, 320, 1, 0, 0, 0, 9, 322, 1, 0, 0, 0, 10, 324, 1, 0, 0, 0, 10, 326, 1, 0, 0, 0, 10, 328, 1, 0, 0, 0, 10, 330, 1, 0, 0, 0, 10, 332, 1, 0, 0, 0, 10, 334, 1, 0, 0, 0, 10, 336, 1, 0, 0, 0, 11, 338, 1, 0, 0, 0, 11, 340, 1, 0, 0, 0, 11, 342, 1, 0, 0, 0, 11, 344, 1, 0, 0, 0, 11, 346, 1, 0, 0, 0, 12, 348, 1, 0, 0, 0, 12, 350, 1, 0, 0, 0, 12, 352, 1, 0, 0, 0, 12, 354, 1, 0, 0, 0, 12, 356, 1, 0, 0, 0, 13, 358, 1, 0, 0, 0, 13, 360, 1, 0, 0, 0, 13, 362, 1, 0, 0, 0, 13, 364, 1, 0, 0, 0, 13, 366, 1, 0, 0, 0, 13, 368, 1, 0, 0, 0, 14, 370, 1, 0, 0, 0, 14, 372, 1, 0, 0, 0, 14, 374, 1, 0, 0, 0, 14, 376, 1, 0, 0, 0, 14, 378, 1, 0, 0, 0, 15, 380, 1, 0, 0, 0, 15, 382, 1, 0, 0, 0, 15, 384, 1, 0, 0, 0, 15, 386, 1, 0, 0, 0, 15, 388, 1, 0, 0, 0, 15, 390, 1, 0, 0, 0, 15, 392, 1, 0, 0, 0, 15, 394, 1, 0, 0, 0, 16, 396, 1, 0, 0, 0, 18, 406, 1, 0, 0, 0, 20, 413, 1, 0, 0, 0, 22, 422, 1, 0, 0, 0, 24, 429, 1, 0, 0, 0, 26, 439, 1, 0, 0, 0, 28, 446, 1, 0, 0, 0, 30, 453, 1, 0, 0, 0, 32, 467, 1, 0, 0, 0, 34, 474, 1, 0, 0, 0, 36, 482, 1, 0, 0, 0, 38, 491, 1, 0, 0, 0, 40, 498, 1, 0, 0, 0, 42, 508, 1, 0, 0, 0, 44, 520, 1, 0, 0, 0, 46, 529, 1, 0, 0, 0, 48, 535, 1, 0, 0, 0, 50, 542, 1, 0, 0, 0, 52, 549, 1, 0, 0, 0, 54, 557, 1, 0, 0, 0, 56, 566, 1, 0, 0, 0, 58, 572, 1, 0, 0, 0, 60, 589, 1, 0, 0, 0, 62, 605, 1, 0, 0, 0, 64, 614, 1, 0, 0, 0, 66, 617, 1, 0, 0, 0, 68, 621, 1, 0, 0, 0, 70, 626, 1, 0, 0, 0, 72, 631, 1, 0, 0, 0, 74, 635, 1, 0, 0, 0, 76, 639, 1, 0, 0, 0, 78, 643, 1, 0, 0, 0, 80, 647, 1, 0, 0, 0, 82, 649, 1, 0, 0, 0, 84, 651, 1, 0, 0, 0, 86, 654, 1, 0, 0, 0, 88, 656, 1, 0, 0, 0, 90, 665, 1, 0, 0, 0, 92, 667, 1, 0, 0, 0, 94, 672, 1, 0, 0, 0, 96, 674, 1, 0, 0, 0, 98, 679, 1, 0, 0, 0, 100, 710, 1, 0, 0, 0, 102, 713, 1, 0, 0, 0, 104, 759, 1, 0, 0, 0, 106, 761, 1, 0, 0, 0, 108, 764, 1, 0, 0, 0, 110, 768, 1, 0, 0, 0, 112, 772, 1, 0, 0, 0, 114, 774, 1, 0, 0, 0, 116, 777, 1, 0, 0, 0, 118, 779, 1, 0, 0, 0, 120, 784, 1, 0, 0, 0, 122, 786, 1, 0, 0, 0, 124, 792, 1, 0, 0, 0, 126, 798, 1, 0, 0, 0, 128, 803, 1, 0, 0, 0, 130, 805, 1, 0, 0, 0, 132, 808, 1, 0, 0, 0, 134, 811, 1, 0, 0, 0, 136, 816, 1, 0, 0, 0, 138, 820, 1, 0, 0, 0, 140, 825, 1, 0, 0, 0, 142, 831, 1, 0, 0, 0, 144, 834, 1, 0, 0, 0, 146, 836, 1, 0, 0, 0, 148, 842, 1, 0, 0, 0, 150, 844, 1, 0, 0, 0, 152, 849, 1, 0, 0, 0, 154, 852, 1, 0, 0, 0, 156, 855, 1, 0, 0, 0, 158, 858, 1, 0, 0, 0, 160, 860, 1, 0, 0, 0, 162, 863, 1, 0, 0, 0, 164, 865, 1, 0, 0, 0, 166, 868, 1, 0, 0, 0, 168, 870, 1, 0, 0, 0, 170, 872, 1, 0, 0, 0, 172, 874, 1, 0, 0, 0, 174, 876, 1, 0, 0, 0, 176, 892, 1, 0, 0, 0, 178, 894, 1, 0, 0, 0, 180, 899, 1, 0, 0, 0, 182, 920, 1, 0, 0, 0, 184, 922, 1, 0, 0, 0, 186, 930, 1, 0, 0, 0, 188, 932, 1, 0, 0, 0, 190, 936, 1, 0, 0, 0, 192, 940, 1, 0, 0, 0, 194, 944, 1, 0, 0, 0, 196, 949, 1, 0, 0, 0, 198, 953, 1, 0, 0, 0, 200, 957, 1, 0, 0, 0, 202, 961, 1, 0, 0, 0, 204, 965, 1, 0, 0, 0, 206, 969, 1, 0, 0, 0, 208, 978, 1, 0, 0, 0, 210, 982, 1, 0, 0, 0, 212, 986, 1, 0, 0, 0, 214, 990, 1, 0, 0, 0, 216, 994, 1, 0, 0, 0, 218, 999, 1, 0, 0, 0, 220, 1003, 1, 0, 0, 0, 222, 1011, 1, 0, 0, 0, 224, 1032, 1, 0, 0, 0, 226, 1036, 1, 0, 0, 0, 228, 1040, 1, 0, 0, 0, 230, 1044, 1, 0, 0, 0, 232, 1048, 1, 0, 0, 0, 234, 1052, 1, 0, 0, 0, 236, 1057, 1, 0, 0, 0, 238, 1061, 1, 0, 0, 0, 240, 1065, 1, 0, 0, 0, 242, 1069, 1, 0, 0, 0, 244, 1072, 1, 0, 0, 0, 246, 1076, 1, 0, 0, 0, 248, 1080, 1, 0, 0, 0, 250, 1084, 1, 0, 0, 0, 252, 1088, 1, 0, 0, 0, 254, 1093, 1, 0, 0, 0, 256, 1098, 1, 0, 0, 0, 258, 1103, 1, 0, 0, 0, 260, 1110, 1, 0, 0, 0, 262, 1119, 1, 0, 0, 0, 264, 1126, 1, 0, 0, 0, 266, 1130, 1, 0, 0, 0, 268, 1134, 1, 0, 0, 0, 270, 1138, 1, 0, 0, 0, 272, 1142, 1, 0, 0, 0, 274, 1146, 1, 0, 0, 0, 276, 1152, 1, 0, 0, 0, 278, 1156, 1, 0, 0, 0, 280, 1160, 1, 0, 0, 0, 282, 1164, 1, 0, 0, 0, 284, 1168, 1, 0, 0, 0, 286, 1172, 1, 0, 0, 0, 288, 1176, 1, 0, 0, 0, 290, 1180, 1, 0, 0, 0, 292, 1184, 1, 0, 0, 0, 294, 1188, 1, 0, 0, 0, 296, 1193, 1, 0, 0, 0, 298, 1197, 1, 0, 0, 0, 300, 1201, 1, 0, 0, 0, 302, 1206, 1, 0, 0, 0, 304, 1210, 1, 0, 0, 0, 306, 1214, 1, 0, 0, 0, 308, 1218, 1, 0, 0, 0, 310, 1222, 1, 0, 0, 0, 312, 1228, 1, 0, 0, 0, 314, 1232, 1, 0, 0, 0, 316, 1236, 1, 0, 0, 0, 318, 1240, 1, 0, 0, 0, 320, 1244, 1, 0, 0, 0, 322, 1248, 1, 0, 0, 0, 324, 1252, 1, 0, 0, 0, 326, 1257, 1, 0, 0, 0, 328, 1261, 1, 0, 0, 0, 330, 1265, 1, 0, 0, 0, 332, 1269, 1, 0, 0, 0, 334, 1273, 1, 0, 0, 0, 336, 1277, 1, 0, 0, 0, 338, 1281, 1, 0, 0, 0, 340, 1286, 1, 0, 0, 0, 342, 1291, 1, 0, 0, 0, 344, 1295, 1, 0, 0, 0, 346, 1299, 1, 0, 0, 0, 348, 1303, 1, 0, 0, 0, 350, 1308, 1, 0, 0, 0, 352, 1318, 1, 0, 0, 0, 354, 1322, 1, 0, 0, 0, 356, 1326, 1, 0, 0, 0, 358, 1330, 1, 0, 0, 0, 360, 1335, 1, 0, 0, 0, 362, 1342, 1, 0, 0, 0, 364, 1346, 1, 0, 0, 0, 366, 1350, 1, 0, 0, 0, 368, 1354, 1, 0, 0, 0, 370, 1358, 1, 0, 0, 0, 372, 1363, 1, 0, 0, 0, 374, 1369, 1, 0, 0, 0, 376, 1373, 1, 0, 0, 0, 378, 1377, 1, 0, 0, 0, 380, 1381, 1, 0, 0, 0, 382, 1387, 1, 0, 0, 0, 384, 1391, 1, 0, 0, 0, 386, 1395, 1, 0, 0, 0, 388, 1399, 1, 0, 0, 0, 390, 1405, 1, 0, 0, 0, 392, 1411, 1, 0, 0, 0, 394, 1417, 1, 0, 0, 0, 396, 397, 5, 100, 0, 0, 397, 398, 5, 105, 0, 0, 398, 399, 5, 115, 0, 0, 399, 400, 5, 115, 0, 0, 400, 401, 5, 101, 0, 0, 401, 402, 5, 99, 0, 0, 402, 403, 5, 116, 0, 0, 403, 404, 1, 0, 0, 0, 404, 405, 6, 0, 0, 0, 405, 17, 1, 0, 0, 0, 406, 407, 5, 100, 0, 0, 407, 408, 5, 114, 0, 0, 408, 409, 5, 111, 0, 0, 409, 410, 5, 112, 0, 0, 410, 411, 1, 0, 0, 0, 411, 412, 6, 1, 1, 0, 412, 19, 1, 0, 0, 0, 413, 414, 5, 101, 0, 0, 414, 415, 5, 110, 0, 0, 415, 416, 5, 114, 0, 0, 416, 417, 5, 105, 0, 0, 417, 418, 5, 99, 0, 0, 418, 419, 5, 104, 0, 0, 419, 420, 1, 0, 0, 0, 420, 421, 6, 2, 2, 0, 421, 21, 1, 0, 0, 0, 422, 423, 5, 101, 0, 0, 423, 424, 5, 118, 0, 0, 424, 425, 5, 97, 0, 0, 425, 426, 5, 108, 0, 0, 426, 427, 1, 0, 0, 0, 427, 428, 6, 3, 0, 0, 428, 23, 1, 0, 0, 0, 429, 430, 5, 101, 0, 0, 430, 431, 5, 120, 0, 0, 431, 432, 5, 112, 0, 0, 432, 433, 5, 108, 0, 0, 433, 434, 5, 97, 0, 0, 434, 435, 5, 105, 0, 0, 435, 436, 5, 110, 0, 0, 436, 437, 1, 0, 0, 0, 437, 438, 6, 4, 3, 0, 438, 25, 1, 0, 0, 0, 439, 440, 5, 102, 0, 0, 440, 441, 5, 114, 0, 0, 441, 442, 5, 111, 0, 0, 442, 443, 5, 109, 0, 0, 443, 444, 1, 0, 0, 0, 444, 445, 6, 5, 4, 0, 445, 27, 1, 0, 0, 0, 446, 447, 5, 103, 0, 0, 447, 448, 5, 114, 0, 0, 448, 449, 5, 111, 0, 0, 449, 450, 5, 107, 0, 0, 450, 451, 1, 0, 0, 0, 451, 452, 6, 6, 0, 0, 452, 29, 1, 0, 0, 0, 453, 454, 5, 105, 0, 0, 454, 455, 5, 110, 0, 0, 455, 456, 5, 108, 0, 0, 456, 457, 5, 105, 0, 0, 457, 458, 5, 110, 0, 0, 458, 459, 5, 101, 0, 0, 459, 460, 5, 115, 0, 0, 460, 461, 5, 116, 0, 0, 461, 462, 5, 97, 0, 0, 462, 463, 5, 116, 0, 0, 463, 464, 5, 115, 0, 0, 464, 465, 1, 0, 0, 0, 465, 466, 6, 7, 0, 0, 466, 31, 1, 0, 0, 0, 467, 468, 5, 107, 0, 0, 468, 469, 5, 101, 0, 0, 469, 470, 5, 101, 0, 0, 470, 471, 5, 112, 0, 0, 471, 472, 1, 0, 0, 0, 472, 473, 6, 8, 1, 0, 473, 33, 1, 0, 0, 0, 474, 475, 5, 108, 0, 0, 475, 476, 5, 105, 0, 0, 476, 477, 5, 109, 0, 0, 477, 478, 5, 105, 0, 0, 478, 479, 5, 116, 0, 0, 479, 480, 1, 0, 0, 0, 480, 481, 6, 9, 0, 0, 481, 35, 1, 0, 0, 0, 482, 483, 5, 108, 0, 0, 483, 484, 5, 111, 0, 0, 484, 485, 5, 111, 0, 0, 485, 486, 5, 107, 0, 0, 486, 487, 5, 117, 0, 0, 487, 488, 5, 112, 0, 0, 488, 489, 1, 0, 0, 0, 489, 490, 6, 10, 5, 0, 490, 37, 1, 0, 0, 0, 491, 492, 5, 109, 0, 0, 492, 493, 5, 101, 0, 0, 493, 494, 5, 116, 0, 0, 494, 495, 5, 97, 0, 0, 495, 496, 1, 0, 0, 0, 496, 497, 6, 11, 6, 0, 497, 39, 1, 0, 0, 0, 498, 499, 5, 109, 0, 0, 499, 500, 5, 101, 0, 0, 500, 501, 5, 116, 0, 0, 501, 502, 5, 114, 0, 0, 502, 503, 5, 105, 0, 0, 503, 504, 5, 99, 0, 0, 504, 505, 5, 115, 0, 0, 505, 506, 1, 0, 0, 0, 506, 507, 6, 12, 7, 0, 507, 41, 1, 0, 0, 0, 508, 509, 5, 109, 0, 0, 509, 510, 5, 118, 0, 0, 510, 511, 5, 95, 0, 0, 511, 512, 5, 101, 0, 0, 512, 513, 5, 120, 0, 0, 513, 514, 5, 112, 0, 0, 514, 515, 5, 97, 0, 0, 515, 516, 5, 110, 0, 0, 516, 517, 5, 100, 0, 0, 517, 518, 1, 0, 0, 0, 518, 519, 6, 13, 8, 0, 519, 43, 1, 0, 0, 0, 520, 521, 5, 114, 0, 0, 521, 522, 5, 101, 0, 0, 522, 523, 5, 110, 0, 0, 523, 524, 5, 97, 0, 0, 524, 525, 5, 109, 0, 0, 525, 526, 5, 101, 0, 0, 526, 527, 1, 0, 0, 0, 527, 528, 6, 14, 9, 0, 528, 45, 1, 0, 0, 0, 529, 530, 5, 114, 0, 0, 530, 531, 5, 111, 0, 0, 531, 532, 5, 119, 0, 0, 532, 533, 1, 0, 0, 0, 533, 534, 6, 15, 0, 0, 534, 47, 1, 0, 0, 0, 535, 536, 5, 115, 0, 0, 536, 537, 5, 104, 0, 0, 537, 538, 5, 111, 0, 0, 538, 539, 5, 119, 0, 0, 539, 540, 1, 0, 0, 0, 540, 541, 6, 16, 10, 0, 541, 49, 1, 0, 0, 0, 542, 543, 5, 115, 0, 0, 543, 544, 5, 111, 0, 0, 544, 545, 5, 114, 0, 0, 545, 546, 5, 116, 0, 0, 546, 547, 1, 0, 0, 0, 547, 548, 6, 17, 0, 0, 548, 51, 1, 0, 0, 0, 549, 550, 5, 115, 0, 0, 550, 551, 5, 116, 0, 0, 551, 552, 5, 97, 0, 0, 552, 553, 5, 116, 0, 0, 553, 554, 5, 115, 0, 0, 554, 555, 1, 0, 0, 0, 555, 556, 6, 18, 0, 0, 556, 53, 1, 0, 0, 0, 557, 558, 5, 119, 0, 0, 558, 559, 5, 104, 0, 0, 559, 560, 5, 101, 0, 0, 560, 561, 5, 114, 0, 0, 561, 562, 5, 101, 0, 0, 562, 563, 1, 0, 0, 0, 563, 564, 6, 19, 0, 0, 564, 55, 1, 0, 0, 0, 565, 567, 8, 0, 0, 0, 566, 565, 1, 0, 0, 0, 567, 568, 1, 0, 0, 0, 568, 566, 1, 0, 0, 0, 568, 569, 1, 0, 0, 0, 569, 570, 1, 0, 0, 0, 570, 571, 6, 20, 0, 0, 571, 57, 1, 0, 0, 0, 572, 573, 5, 47, 0, 0, 573, 574, 5, 47, 0, 0, 574, 578, 1, 0, 0, 0, 575, 577, 8, 1, 0, 0, 576, 575, 1, 0, 0, 0, 577, 580, 1, 0, 0, 0, 578, 576, 1, 0, 0, 0, 578, 579, 1, 0, 0, 0, 579, 582, 1, 0, 0, 0, 580, 578, 1, 0, 0, 0, 581, 583, 5, 13, 0, 0, 582, 581, 1, 0, 0, 0, 582, 583, 1, 0, 0, 0, 583, 585, 1, 0, 0, 0, 584, 586, 5, 10, 0, 0, 585, 584, 1, 0, 0, 0, 585, 586, 1, 0, 0, 0, 586, 587, 1, 0, 0, 0, 587, 588, 6, 21, 11, 0, 588, 59, 1, 0, 0, 0, 589, 590, 5, 47, 0, 0, 590, 591, 5, 42, 0, 0, 591, 596, 1, 0, 0, 0, 592, 595, 3, 60, 22, 0, 593, 595, 9, 0, 0, 0, 594, 592, 1, 0, 0, 0, 594, 593, 1, 0, 0, 0, 595, 598, 1, 0, 0, 0, 596, 597, 1, 0, 0, 0, 596, 594, 1, 0, 0, 0, 597, 599, 1, 0, 0, 0, 598, 596, 1, 0, 0, 0, 599, 600, 5, 42, 0, 0, 600, 601, 5, 47, 0, 0, 601, 602, 1, 0, 0, 0, 602, 603, 6, 22, 11, 0, 603, 61, 1, 0, 0, 0, 604, 606, 7, 2, 0, 0, 605, 604, 1, 0, 0, 0, 606, 607, 1, 0, 0, 0, 607, 605, 1, 0, 0, 0, 607, 608, 1, 0, 0, 0, 608, 609, 1, 0, 0, 0, 609, 610, 6, 23, 11, 0, 610, 63, 1, 0, 0, 0, 611, 615, 8, 3, 0, 0, 612, 613, 5, 47, 0, 0, 613, 615, 8, 4, 0, 0, 614, 611, 1, 0, 0, 0, 614, 612, 1, 0, 0, 0, 615, 65, 1, 0, 0, 0, 616, 618, 3, 64, 24, 0, 617, 616, 1, 0, 0, 0, 618, 619, 1, 0, 0, 0, 619, 617, 1, 0, 0, 0, 619, 620, 1, 0, 0, 0, 620, 67, 1, 0, 0, 0, 621, 622, 3, 178, 81, 0, 622, 623, 1, 0, 0, 0, 623, 624, 6, 26, 12, 0, 624, 625, 6, 26, 13, 0, 625, 69, 1, 0, 0, 0, 626, 627, 3, 78, 31, 0, 627, 628, 1, 0, 0, 0, 628, 629, 6, 27, 14, 0, 629, 630, 6, 27, 15, 0, 630, 71, 1, 0, 0, 0, 631, 632, 3, 62, 23, 0, 632, 633, 1, 0, 0, 0, 633, 634, 6, 28, 11, 0, 634, 73, 1, 0, 0, 0, 635, 636, 3, 58, 21, 0, 636, 637, 1, 0, 0, 0, 637, 638, 6, 29, 11, 0, 638, 75, 1, 0, 0, 0, 639, 640, 3, 60, 22, 0, 640, 641, 1, 0, 0, 0, 641, 642, 6, 30, 11, 0, 642, 77, 1, 0, 0, 0, 643, 644, 5, 124, 0, 0, 644, 645, 1, 0, 0, 0, 645, 646, 6, 31, 15, 0, 646, 79, 1, 0, 0, 0, 647, 648, 7, 5, 0, 0, 648, 81, 1, 0, 0, 0, 649, 650, 7, 6, 0, 0, 650, 83, 1, 0, 0, 0, 651, 652, 5, 92, 0, 0, 652, 653, 7, 7, 0, 0, 653, 85, 1, 0, 0, 0, 654, 655, 8, 8, 0, 0, 655, 87, 1, 0, 0, 0, 656, 658, 7, 9, 0, 0, 657, 659, 7, 10, 0, 0, 658, 657, 1, 0, 0, 0, 658, 659, 1, 0, 0, 0, 659, 661, 1, 0, 0, 0, 660, 662, 3, 80, 32, 0, 661, 660, 1, 0, 0, 0, 662, 663, 1, 0, 0, 0, 663, 661, 1, 0, 0, 0, 663, 664, 1, 0, 0, 0, 664, 89, 1, 0, 0, 0, 665, 666, 5, 64, 0, 0, 666, 91, 1, 0, 0, 0, 667, 668, 5, 96, 0, 0, 668, 93, 1, 0, 0, 0, 669, 673, 8, 11, 0, 0, 670, 671, 5, 96, 0, 0, 671, 673, 5, 96, 0, 0, 672, 669, 1, 0, 0, 0, 672, 670, 1, 0, 0, 0, 673, 95, 1, 0, 0, 0, 674, 675, 5, 95, 0, 0, 675, 97, 1, 0, 0, 0, 676, 680, 3, 82, 33, 0, 677, 680, 3, 80, 32, 0, 678, 680, 3, 96, 40, 0, 679, 676, 1, 0, 0, 0, 679, 677, 1, 0, 0, 0, 679, 678, 1, 0, 0, 0, 680, 99, 1, 0, 0, 0, 681, 686, 5, 34, 0, 0, 682, 685, 3, 84, 34, 0, 683, 685, 3, 86, 35, 0, 684, 682, 1, 0, 0, 0, 684, 683, 1, 0, 0, 0, 685, 688, 1, 0, 0, 0, 686, 684, 1, 0, 0, 0, 686, 687, 1, 0, 0, 0, 687, 689, 1, 0, 0, 0, 688, 686, 1, 0, 0, 0, 689, 711, 5, 34, 0, 0, 690, 691, 5, 34, 0, 0, 691, 692, 5, 34, 0, 0, 692, 693, 5, 34, 0, 0, 693, 697, 1, 0, 0, 0, 694, 696, 8, 1, 0, 0, 695, 694, 1, 0, 0, 0, 696, 699, 1, 0, 0, 0, 697, 698, 1, 0, 0, 0, 697, 695, 1, 0, 0, 0, 698, 700, 1, 0, 0, 0, 699, 697, 1, 0, 0, 0, 700, 701, 5, 34, 0, 0, 701, 702, 5, 34, 0, 0, 702, 703, 5, 34, 0, 0, 703, 705, 1, 0, 0, 0, 704, 706, 5, 34, 0, 0, 705, 704, 1, 0, 0, 0, 705, 706, 1, 0, 0, 0, 706, 708, 1, 0, 0, 0, 707, 709, 5, 34, 0, 0, 708, 707, 1, 0, 0, 0, 708, 709, 1, 0, 0, 0, 709, 711, 1, 0, 0, 0, 710, 681, 1, 0, 0, 0, 710, 690, 1, 0, 0, 0, 711, 101, 1, 0, 0, 0, 712, 714, 3, 80, 32, 0, 713, 712, 1, 0, 0, 0, 714, 715, 1, 0, 0, 0, 715, 713, 1, 0, 0, 0, 715, 716, 1, 0, 0, 0, 716, 103, 1, 0, 0, 0, 717, 719, 3, 80, 32, 0, 718, 717, 1, 0, 0, 0, 719, 720, 1, 0, 0, 0, 720, 718, 1, 0, 0, 0, 720, 721, 1, 0, 0, 0, 721, 722, 1, 0, 0, 0, 722, 726, 3, 120, 52, 0, 723, 725, 3, 80, 32, 0, 724, 723, 1, 0, 0, 0, 725, 728, 1, 0, 0, 0, 726, 724, 1, 0, 0, 0, 726, 727, 1, 0, 0, 0, 727, 760, 1, 0, 0, 0, 728, 726, 1, 0, 0, 0, 729, 731, 3, 120, 52, 0, 730, 732, 3, 80, 32, 0, 731, 730, 1, 0, 0, 0, 732, 733, 1, 0, 0, 0, 733, 731, 1, 0, 0, 0, 733, 734, 1, 0, 0, 0, 734, 760, 1, 0, 0, 0, 735, 737, 3, 80, 32, 0, 736, 735, 1, 0, 0, 0, 737, 738, 1, 0, 0, 0, 738, 736, 1, 0, 0, 0, 738, 739, 1, 0, 0, 0, 739, 747, 1, 0, 0, 0, 740, 744, 3, 120, 52, 0, 741, 743, 3, 80, 32, 0, 742, 741, 1, 0, 0, 0, 743, 746, 1, 0, 0, 0, 744, 742, 1, 0, 0, 0, 744, 745, 1, 0, 0, 0, 745, 748, 1, 0, 0, 0, 746, 744, 1, 0, 0, 0, 747, 740, 1, 0, 0, 0, 747, 748, 1, 0, 0, 0, 748, 749, 1, 0, 0, 0, 749, 750, 3, 88, 36, 0, 750, 760, 1, 0, 0, 0, 751, 753, 3, 120, 52, 0, 752, 754, 3, 80, 32, 0, 753, 752, 1, 0, 0, 0, 754, 755, 1, 0, 0, 0, 755, 753, 1, 0, 0, 0, 755, 756, 1, 0, 0, 0, 756, 757, 1, 0, 0, 0, 757, 758, 3, 88, 36, 0, 758, 760, 1, 0, 0, 0, 759, 718, 1, 0, 0, 0, 759, 729, 1, 0, 0, 0, 759, 736, 1, 0, 0, 0, 759, 751, 1, 0, 0, 0, 760, 105, 1, 0, 0, 0, 761, 762, 5, 98, 0, 0, 762, 763, 5, 121, 0, 0, 763, 107, 1, 0, 0, 0, 764, 765, 5, 97, 0, 0, 765, 766, 5, 110, 0, 0, 766, 767, 5, 100, 0, 0, 767, 109, 1, 0, 0, 0, 768, 769, 5, 97, 0, 0, 769, 770, 5, 115, 0, 0, 770, 771, 5, 99, 0, 0, 771, 111, 1, 0, 0, 0, 772, 773, 5, 61, 0, 0, 773, 113, 1, 0, 0, 0, 774, 775, 5, 58, 0, 0, 775, 776, 5, 58, 0, 0, 776, 115, 1, 0, 0, 0, 777, 778, 5, 44, 0, 0, 778, 117, 1, 0, 0, 0, 779, 780, 5, 100, 0, 0, 780, 781, 5, 101, 0, 0, 781, 782, 5, 115, 0, 0, 782, 783, 5, 99, 0, 0, 783, 119, 1, 0, 0, 0, 784, 785, 5, 46, 0, 0, 785, 121, 1, 0, 0, 0, 786, 787, 5, 102, 0, 0, 787, 788, 5, 97, 0, 0, 788, 789, 5, 108, 0, 0, 789, 790, 5, 115, 0, 0, 790, 791, 5, 101, 0, 0, 791, 123, 1, 0, 0, 0, 792, 793, 5, 102, 0, 0, 793, 794, 5, 105, 0, 0, 794, 795, 5, 114, 0, 0, 795, 796, 5, 115, 0, 0, 796, 797, 5, 116, 0, 0, 797, 125, 1, 0, 0, 0, 798, 799, 5, 108, 0, 0, 799, 800, 5, 97, 0, 0, 800, 801, 5, 115, 0, 0, 801, 802, 5, 116, 0, 0, 802, 127, 1, 0, 0, 0, 803, 804, 5, 40, 0, 0, 804, 129, 1, 0, 0, 0, 805, 806, 5, 105, 0, 0, 806, 807, 5, 110, 0, 0, 807, 131, 1, 0, 0, 0, 808, 809, 5, 105, 0, 0, 809, 810, 5, 115, 0, 0, 810, 133, 1, 0, 0, 0, 811, 812, 5, 108, 0, 0, 812, 813, 5, 105, 0, 0, 813, 814, 5, 107, 0, 0, 814, 815, 5, 101, 0, 0, 815, 135, 1, 0, 0, 0, 816, 817, 5, 110, 0, 0, 817, 818, 5, 111, 0, 0, 818, 819, 5, 116, 0, 0, 819, 137, 1, 0, 0, 0, 820, 821, 5, 110, 0, 0, 821, 822, 5, 117, 0, 0, 822, 823, 5, 108, 0, 0, 823, 824, 5, 108, 0, 0, 824, 139, 1, 0, 0, 0, 825, 826, 5, 110, 0, 0, 826, 827, 5, 117, 0, 0, 827, 828, 5, 108, 0, 0, 828, 829, 5, 108, 0, 0, 829, 830, 5, 115, 0, 0, 830, 141, 1, 0, 0, 0, 831, 832, 5, 111, 0, 0, 832, 833, 5, 114, 0, 0, 833, 143, 1, 0, 0, 0, 834, 835, 5, 63, 0, 0, 835, 145, 1, 0, 0, 0, 836, 837, 5, 114, 0, 0, 837, 838, 5, 108, 0, 0, 838, 839, 5, 105, 0, 0, 839, 840, 5, 107, 0, 0, 840, 841, 5, 101, 0, 0, 841, 147, 1, 0, 0, 0, 842, 843, 5, 41, 0, 0, 843, 149, 1, 0, 0, 0, 844, 845, 5, 116, 0, 0, 845, 846, 5, 114, 0, 0, 846, 847, 5, 117, 0, 0, 847, 848, 5, 101, 0, 0, 848, 151, 1, 0, 0, 0, 849, 850, 5, 61, 0, 0, 850, 851, 5, 61, 0, 0, 851, 153, 1, 0, 0, 0, 852, 853, 5, 61, 0, 0, 853, 854, 5, 126, 0, 0, 854, 155, 1, 0, 0, 0, 855, 856, 5, 33, 0, 0, 856, 857, 5, 61, 0, 0, 857, 157, 1, 0, 0, 0, 858, 859, 5, 60, 0, 0, 859, 159, 1, 0, 0, 0, 860, 861, 5, 60, 0, 0, 861, 862, 5, 61, 0, 0, 862, 161, 1, 0, 0, 0, 863, 864, 5, 62, 0, 0, 864, 163, 1, 0, 0, 0, 865, 866, 5, 62, 0, 0, 866, 867, 5, 61, 0, 0, 867, 165, 1, 0, 0, 0, 868, 869, 5, 43, 0, 0, 869, 167, 1, 0, 0, 0, 870, 871, 5, 45, 0, 0, 871, 169, 1, 0, 0, 0, 872, 873, 5, 42, 0, 0, 873, 171, 1, 0, 0, 0, 874, 875, 5, 47, 0, 0, 875, 173, 1, 0, 0, 0, 876, 877, 5, 37, 0, 0, 877, 175, 1, 0, 0, 0, 878, 879, 3, 144, 64, 0, 879, 883, 3, 82, 33, 0, 880, 882, 3, 98, 41, 0, 881, 880, 1, 0, 0, 0, 882, 885, 1, 0, 0, 0, 883, 881, 1, 0, 0, 0, 883, 884, 1, 0, 0, 0, 884, 893, 1, 0, 0, 0, 885, 883, 1, 0, 0, 0, 886, 888, 3, 144, 64, 0, 887, 889, 3, 80, 32, 0, 888, 887, 1, 0, 0, 0, 889, 890, 1, 0, 0, 0, 890, 888, 1, 0, 0, 0, 890, 891, 1, 0, 0, 0, 891, 893, 1, 0, 0, 0, 892, 878, 1, 0, 0, 0, 892, 886, 1, 0, 0, 0, 893, 177, 1, 0, 0, 0, 894, 895, 5, 91, 0, 0, 895, 896, 1, 0, 0, 0, 896, 897, 6, 81, 0, 0, 897, 898, 6, 81, 0, 0, 898, 179, 1, 0, 0, 0, 899, 900, 5, 93, 0, 0, 900, 901, 1, 0, 0, 0, 901, 902, 6, 82, 15, 0, 902, 903, 6, 82, 15, 0, 903, 181, 1, 0, 0, 0, 904, 908, 3, 82, 33, 0, 905, 907, 3, 98, 41, 0, 906, 905, 1, 0, 0, 0, 907, 910, 1, 0, 0, 0, 908, 906, 1, 0, 0, 0, 908, 909, 1, 0, 0, 0, 909, 921, 1, 0, 0, 0, 910, 908, 1, 0, 0, 0, 911, 914, 3, 96, 40, 0, 912, 914, 3, 90, 37, 0, 913, 911, 1, 0, 0, 0, 913, 912, 1, 0, 0, 0, 914, 916, 1, 0, 0, 0, 915, 917, 3, 98, 41, 0, 916, 915, 1, 0, 0, 0, 917, 918, 1, 0, 0, 0, 918, 916, 1, 0, 0, 0, 918, 919, 1, 0, 0, 0, 919, 921, 1, 0, 0, 0, 920, 904, 1, 0, 0, 0, 920, 913, 1, 0, 0, 0, 921, 183, 1, 0, 0, 0, 922, 924, 3, 92, 38, 0, 923, 925, 3, 94, 39, 0, 924, 923, 1, 0, 0, 0, 925, 926, 1, 0, 0, 0, 926, 924, 1, 0, 0, 0, 926, 927, 1, 0, 0, 0, 927, 928, 1, 0, 0, 0, 928, 929, 3, 92, 38, 0, 929, 185, 1, 0, 0, 0, 930, 931, 3, 184, 84, 0, 931, 187, 1, 0, 0, 0, 932, 933, 3, 58, 21, 0, 933, 934, 1, 0, 0, 0, 934, 935, 6, 86, 11, 0, 935, 189, 1, 0, 0, 0, 936, 937, 3, 60, 22, 0, 937, 938, 1, 0, 0, 0, 938, 939, 6, 87, 11, 0, 939, 191, 1, 0, 0, 0, 940, 941, 3, 62, 23, 0, 941, 942, 1, 0, 0, 0, 942, 943, 6, 88, 11, 0, 943, 193, 1, 0, 0, 0, 944, 945, 3, 78, 31, 0, 945, 946, 1, 0, 0, 0, 946, 947, 6, 89, 14, 0, 947, 948, 6, 89, 15, 0, 948, 195, 1, 0, 0, 0, 949, 950, 3, 178, 81, 0, 950, 951, 1, 0, 0, 0, 951, 952, 6, 90, 12, 0, 952, 197, 1, 0, 0, 0, 953, 954, 3, 180, 82, 0, 954, 955, 1, 0, 0, 0, 955, 956, 6, 91, 16, 0, 956, 199, 1, 0, 0, 0, 957, 958, 3, 116, 50, 0, 958, 959, 1, 0, 0, 0, 959, 960, 6, 92, 17, 0, 960, 201, 1, 0, 0, 0, 961, 962, 3, 112, 48, 0, 962, 963, 1, 0, 0, 0, 963, 964, 6, 93, 18, 0, 964, 203, 1, 0, 0, 0, 965, 966, 3, 100, 42, 0, 966, 967, 1, 0, 0, 0, 967, 968, 6, 94, 19, 0, 968, 205, 1, 0, 0, 0, 969, 970, 5, 109, 0, 0, 970, 971, 5, 101, 0, 0, 971, 972, 5, 116, 0, 0, 972, 973, 5, 97, 0, 0, 973, 974, 5, 100, 0, 0, 974, 975, 5, 97, 0, 0, 975, 976, 5, 116, 0, 0, 976, 977, 5, 97, 0, 0, 977, 207, 1, 0, 0, 0, 978, 979, 3, 66, 25, 0, 979, 980, 1, 0, 0, 0, 980, 981, 6, 96, 20, 0, 981, 209, 1, 0, 0, 0, 982, 983, 3, 58, 21, 0, 983, 984, 1, 0, 0, 0, 984, 985, 6, 97, 11, 0, 985, 211, 1, 0, 0, 0, 986, 987, 3, 60, 22, 0, 987, 988, 1, 0, 0, 0, 988, 989, 6, 98, 11, 0, 989, 213, 1, 0, 0, 0, 990, 991, 3, 62, 23, 0, 991, 992, 1, 0, 0, 0, 992, 993, 6, 99, 11, 0, 993, 215, 1, 0, 0, 0, 994, 995, 3, 78, 31, 0, 995, 996, 1, 0, 0, 0, 996, 997, 6, 100, 14, 0, 997, 998, 6, 100, 15, 0, 998, 217, 1, 0, 0, 0, 999, 1000, 3, 120, 52, 0, 1000, 1001, 1, 0, 0, 0, 1001, 1002, 6, 101, 21, 0, 1002, 219, 1, 0, 0, 0, 1003, 1004, 3, 116, 50, 0, 1004, 1005, 1, 0, 0, 0, 1005, 1006, 6, 102, 17, 0, 1006, 221, 1, 0, 0, 0, 1007, 1012, 3, 82, 33, 0, 1008, 1012, 3, 80, 32, 0, 1009, 1012, 3, 96, 40, 0, 1010, 1012, 3, 170, 77, 0, 1011, 1007, 1, 0, 0, 0, 1011, 1008, 1, 0, 0, 0, 1011, 1009, 1, 0, 0, 0, 1011, 1010, 1, 0, 0, 0, 1012, 223, 1, 0, 0, 0, 1013, 1016, 3, 82, 33, 0, 1014, 1016, 3, 170, 77, 0, 1015, 1013, 1, 0, 0, 0, 1015, 1014, 1, 0, 0, 0, 1016, 1020, 1, 0, 0, 0, 1017, 1019, 3, 222, 103, 0, 1018, 1017, 1, 0, 0, 0, 1019, 1022, 1, 0, 0, 0, 1020, 1018, 1, 0, 0, 0, 1020, 1021, 1, 0, 0, 0, 1021, 1033, 1, 0, 0, 0, 1022, 1020, 1, 0, 0, 0, 1023, 1026, 3, 96, 40, 0, 1024, 1026, 3, 90, 37, 0, 1025, 1023, 1, 0, 0, 0, 1025, 1024, 1, 0, 0, 0, 1026, 1028, 1, 0, 0, 0, 1027, 1029, 3, 222, 103, 0, 1028, 1027, 1, 0, 0, 0, 1029, 1030, 1, 0, 0, 0, 1030, 1028, 1, 0, 0, 0, 1030, 1031, 1, 0, 0, 0, 1031, 1033, 1, 0, 0, 0, 1032, 1015, 1, 0, 0, 0, 1032, 1025, 1, 0, 0, 0, 1033, 225, 1, 0, 0, 0, 1034, 1037, 3, 224, 104, 0, 1035, 1037, 3, 184, 84, 0, 1036, 1034, 1, 0, 0, 0, 1036, 1035, 1, 0, 0, 0, 1037, 1038, 1, 0, 0, 0, 1038, 1036, 1, 0, 0, 0, 1038, 1039, 1, 0, 0, 0, 1039, 227, 1, 0, 0, 0, 1040, 1041, 3, 58, 21, 0, 1041, 1042, 1, 0, 0, 0, 1042, 1043, 6, 106, 11, 0, 1043, 229, 1, 0, 0, 0, 1044, 1045, 3, 60, 22, 0, 1045, 1046, 1, 0, 0, 0, 1046, 1047, 6, 107, 11, 0, 1047, 231, 1, 0, 0, 0, 1048, 1049, 3, 62, 23, 0, 1049, 1050, 1, 0, 0, 0, 1050, 1051, 6, 108, 11, 0, 1051, 233, 1, 0, 0, 0, 1052, 1053, 3, 78, 31, 0, 1053, 1054, 1, 0, 0, 0, 1054, 1055, 6, 109, 14, 0, 1055, 1056, 6, 109, 15, 0, 1056, 235, 1, 0, 0, 0, 1057, 1058, 3, 112, 48, 0, 1058, 1059, 1, 0, 0, 0, 1059, 1060, 6, 110, 18, 0, 1060, 237, 1, 0, 0, 0, 1061, 1062, 3, 116, 50, 0, 1062, 1063, 1, 0, 0, 0, 1063, 1064, 6, 111, 17, 0, 1064, 239, 1, 0, 0, 0, 1065, 1066, 3, 120, 52, 0, 1066, 1067, 1, 0, 0, 0, 1067, 1068, 6, 112, 21, 0, 1068, 241, 1, 0, 0, 0, 1069, 1070, 5, 97, 0, 0, 1070, 1071, 5, 115, 0, 0, 1071, 243, 1, 0, 0, 0, 1072, 1073, 3, 226, 105, 0, 1073, 1074, 1, 0, 0, 0, 1074, 1075, 6, 114, 22, 0, 1075, 245, 1, 0, 0, 0, 1076, 1077, 3, 58, 21, 0, 1077, 1078, 1, 0, 0, 0, 1078, 1079, 6, 115, 11, 0, 1079, 247, 1, 0, 0, 0, 1080, 1081, 3, 60, 22, 0, 1081, 1082, 1, 0, 0, 0, 1082, 1083, 6, 116, 11, 0, 1083, 249, 1, 0, 0, 0, 1084, 1085, 3, 62, 23, 0, 1085, 1086, 1, 0, 0, 0, 1086, 1087, 6, 117, 11, 0, 1087, 251, 1, 0, 0, 0, 1088, 1089, 3, 78, 31, 0, 1089, 1090, 1, 0, 0, 0, 1090, 1091, 6, 118, 14, 0, 1091, 1092, 6, 118, 15, 0, 1092, 253, 1, 0, 0, 0, 1093, 1094, 3, 178, 81, 0, 1094, 1095, 1, 0, 0, 0, 1095, 1096, 6, 119, 12, 0, 1096, 1097, 6, 119, 23, 0, 1097, 255, 1, 0, 0, 0, 1098, 1099, 5, 111, 0, 0, 1099, 1100, 5, 110, 0, 0, 1100, 1101, 1, 0, 0, 0, 1101, 1102, 6, 120, 24, 0, 1102, 257, 1, 0, 0, 0, 1103, 1104, 5, 119, 0, 0, 1104, 1105, 5, 105, 0, 0, 1105, 1106, 5, 116, 0, 0, 1106, 1107, 5, 104, 0, 0, 1107, 1108, 1, 0, 0, 0, 1108, 1109, 6, 121, 24, 0, 1109, 259, 1, 0, 0, 0, 1110, 1111, 8, 12, 0, 0, 1111, 261, 1, 0, 0, 0, 1112, 1114, 3, 260, 122, 0, 1113, 1112, 1, 0, 0, 0, 1114, 1115, 1, 0, 0, 0, 1115, 1113, 1, 0, 0, 0, 1115, 1116, 1, 0, 0, 0, 1116, 1117, 1, 0, 0, 0, 1117, 1118, 3, 360, 172, 0, 1118, 1120, 1, 0, 0, 0, 1119, 1113, 1, 0, 0, 0, 1119, 1120, 1, 0, 0, 0, 1120, 1122, 1, 0, 0, 0, 1121, 1123, 3, 260, 122, 0, 1122, 1121, 1, 0, 0, 0, 1123, 1124, 1, 0, 0, 0, 1124, 1122, 1, 0, 0, 0, 1124, 1125, 1, 0, 0, 0, 1125, 263, 1, 0, 0, 0, 1126, 1127, 3, 186, 85, 0, 1127, 1128, 1, 0, 0, 0, 1128, 1129, 6, 124, 25, 0, 1129, 265, 1, 0, 0, 0, 1130, 1131, 3, 262, 123, 0, 1131, 1132, 1, 0, 0, 0, 1132, 1133, 6, 125, 26, 0, 1133, 267, 1, 0, 0, 0, 1134, 1135, 3, 58, 21, 0, 1135, 1136, 1, 0, 0, 0, 1136, 1137, 6, 126, 11, 0, 1137, 269, 1, 0, 0, 0, 1138, 1139, 3, 60, 22, 0, 1139, 1140, 1, 0, 0, 0, 1140, 1141, 6, 127, 11, 0, 1141, 271, 1, 0, 0, 0, 1142, 1143, 3, 62, 23, 0, 1143, 1144, 1, 0, 0, 0, 1144, 1145, 6, 128, 11, 0, 1145, 273, 1, 0, 0, 0, 1146, 1147, 3, 78, 31, 0, 1147, 1148, 1, 0, 0, 0, 1148, 1149, 6, 129, 14, 0, 1149, 1150, 6, 129, 15, 0, 1150, 1151, 6, 129, 15, 0, 1151, 275, 1, 0, 0, 0, 1152, 1153, 3, 112, 48, 0, 1153, 1154, 1, 0, 0, 0, 1154, 1155, 6, 130, 18, 0, 1155, 277, 1, 0, 0, 0, 1156, 1157, 3, 116, 50, 0, 1157, 1158, 1, 0, 0, 0, 1158, 1159, 6, 131, 17, 0, 1159, 279, 1, 0, 0, 0, 1160, 1161, 3, 120, 52, 0, 1161, 1162, 1, 0, 0, 0, 1162, 1163, 6, 132, 21, 0, 1163, 281, 1, 0, 0, 0, 1164, 1165, 3, 258, 121, 0, 1165, 1166, 1, 0, 0, 0, 1166, 1167, 6, 133, 27, 0, 1167, 283, 1, 0, 0, 0, 1168, 1169, 3, 226, 105, 0, 1169, 1170, 1, 0, 0, 0, 1170, 1171, 6, 134, 22, 0, 1171, 285, 1, 0, 0, 0, 1172, 1173, 3, 186, 85, 0, 1173, 1174, 1, 0, 0, 0, 1174, 1175, 6, 135, 25, 0, 1175, 287, 1, 0, 0, 0, 1176, 1177, 3, 58, 21, 0, 1177, 1178, 1, 0, 0, 0, 1178, 1179, 6, 136, 11, 0, 1179, 289, 1, 0, 0, 0, 1180, 1181, 3, 60, 22, 0, 1181, 1182, 1, 0, 0, 0, 1182, 1183, 6, 137, 11, 0, 1183, 291, 1, 0, 0, 0, 1184, 1185, 3, 62, 23, 0, 1185, 1186, 1, 0, 0, 0, 1186, 1187, 6, 138, 11, 0, 1187, 293, 1, 0, 0, 0, 1188, 1189, 3, 78, 31, 0, 1189, 1190, 1, 0, 0, 0, 1190, 1191, 6, 139, 14, 0, 1191, 1192, 6, 139, 15, 0, 1192, 295, 1, 0, 0, 0, 1193, 1194, 3, 116, 50, 0, 1194, 1195, 1, 0, 0, 0, 1195, 1196, 6, 140, 17, 0, 1196, 297, 1, 0, 0, 0, 1197, 1198, 3, 120, 52, 0, 1198, 1199, 1, 0, 0, 0, 1199, 1200, 6, 141, 21, 0, 1200, 299, 1, 0, 0, 0, 1201, 1202, 3, 256, 120, 0, 1202, 1203, 1, 0, 0, 0, 1203, 1204, 6, 142, 28, 0, 1204, 1205, 6, 142, 29, 0, 1205, 301, 1, 0, 0, 0, 1206, 1207, 3, 66, 25, 0, 1207, 1208, 1, 0, 0, 0, 1208, 1209, 6, 143, 20, 0, 1209, 303, 1, 0, 0, 0, 1210, 1211, 3, 58, 21, 0, 1211, 1212, 1, 0, 0, 0, 1212, 1213, 6, 144, 11, 0, 1213, 305, 1, 0, 0, 0, 1214, 1215, 3, 60, 22, 0, 1215, 1216, 1, 0, 0, 0, 1216, 1217, 6, 145, 11, 0, 1217, 307, 1, 0, 0, 0, 1218, 1219, 3, 62, 23, 0, 1219, 1220, 1, 0, 0, 0, 1220, 1221, 6, 146, 11, 0, 1221, 309, 1, 0, 0, 0, 1222, 1223, 3, 78, 31, 0, 1223, 1224, 1, 0, 0, 0, 1224, 1225, 6, 147, 14, 0, 1225, 1226, 6, 147, 15, 0, 1226, 1227, 6, 147, 15, 0, 1227, 311, 1, 0, 0, 0, 1228, 1229, 3, 116, 50, 0, 1229, 1230, 1, 0, 0, 0, 1230, 1231, 6, 148, 17, 0, 1231, 313, 1, 0, 0, 0, 1232, 1233, 3, 120, 52, 0, 1233, 1234, 1, 0, 0, 0, 1234, 1235, 6, 149, 21, 0, 1235, 315, 1, 0, 0, 0, 1236, 1237, 3, 226, 105, 0, 1237, 1238, 1, 0, 0, 0, 1238, 1239, 6, 150, 22, 0, 1239, 317, 1, 0, 0, 0, 1240, 1241, 3, 58, 21, 0, 1241, 1242, 1, 0, 0, 0, 1242, 1243, 6, 151, 11, 0, 1243, 319, 1, 0, 0, 0, 1244, 1245, 3, 60, 22, 0, 1245, 1246, 1, 0, 0, 0, 1246, 1247, 6, 152, 11, 0, 1247, 321, 1, 0, 0, 0, 1248, 1249, 3, 62, 23, 0, 1249, 1250, 1, 0, 0, 0, 1250, 1251, 6, 153, 11, 0, 1251, 323, 1, 0, 0, 0, 1252, 1253, 3, 78, 31, 0, 1253, 1254, 1, 0, 0, 0, 1254, 1255, 6, 154, 14, 0, 1255, 1256, 6, 154, 15, 0, 1256, 325, 1, 0, 0, 0, 1257, 1258, 3, 120, 52, 0, 1258, 1259, 1, 0, 0, 0, 1259, 1260, 6, 155, 21, 0, 1260, 327, 1, 0, 0, 0, 1261, 1262, 3, 186, 85, 0, 1262, 1263, 1, 0, 0, 0, 1263, 1264, 6, 156, 25, 0, 1264, 329, 1, 0, 0, 0, 1265, 1266, 3, 182, 83, 0, 1266, 1267, 1, 0, 0, 0, 1267, 1268, 6, 157, 30, 0, 1268, 331, 1, 0, 0, 0, 1269, 1270, 3, 58, 21, 0, 1270, 1271, 1, 0, 0, 0, 1271, 1272, 6, 158, 11, 0, 1272, 333, 1, 0, 0, 0, 1273, 1274, 3, 60, 22, 0, 1274, 1275, 1, 0, 0, 0, 1275, 1276, 6, 159, 11, 0, 1276, 335, 1, 0, 0, 0, 1277, 1278, 3, 62, 23, 0, 1278, 1279, 1, 0, 0, 0, 1279, 1280, 6, 160, 11, 0, 1280, 337, 1, 0, 0, 0, 1281, 1282, 3, 78, 31, 0, 1282, 1283, 1, 0, 0, 0, 1283, 1284, 6, 161, 14, 0, 1284, 1285, 6, 161, 15, 0, 1285, 339, 1, 0, 0, 0, 1286, 1287, 5, 105, 0, 0, 1287, 1288, 5, 110, 0, 0, 1288, 1289, 5, 102, 0, 0, 1289, 1290, 5, 111, 0, 0, 1290, 341, 1, 0, 0, 0, 1291, 1292, 3, 58, 21, 0, 1292, 1293, 1, 0, 0, 0, 1293, 1294, 6, 163, 11, 0, 1294, 343, 1, 0, 0, 0, 1295, 1296, 3, 60, 22, 0, 1296, 1297, 1, 0, 0, 0, 1297, 1298, 6, 164, 11, 0, 1298, 345, 1, 0, 0, 0, 1299, 1300, 3, 62, 23, 0, 1300, 1301, 1, 0, 0, 0, 1301, 1302, 6, 165, 11, 0, 1302, 347, 1, 0, 0, 0, 1303, 1304, 3, 78, 31, 0, 1304, 1305, 1, 0, 0, 0, 1305, 1306, 6, 166, 14, 0, 1306, 1307, 6, 166, 15, 0, 1307, 349, 1, 0, 0, 0, 1308, 1309, 5, 102, 0, 0, 1309, 1310, 5, 117, 0, 0, 1310, 1311, 5, 110, 0, 0, 1311, 1312, 5, 99, 0, 0, 1312, 1313, 5, 116, 0, 0, 1313, 1314, 5, 105, 0, 0, 1314, 1315, 5, 111, 0, 0, 1315, 1316, 5, 110, 0, 0, 1316, 1317, 5, 115, 0, 0, 1317, 351, 1, 0, 0, 0, 1318, 1319, 3, 58, 21, 0, 1319, 1320, 1, 0, 0, 0, 1320, 1321, 6, 168, 11, 0, 1321, 353, 1, 0, 0, 0, 1322, 1323, 3, 60, 22, 0, 1323, 1324, 1, 0, 0, 0, 1324, 1325, 6, 169, 11, 0, 1325, 355, 1, 0, 0, 0, 1326, 1327, 3, 62, 23, 0, 1327, 1328, 1, 0, 0, 0, 1328, 1329, 6, 170, 11, 0, 1329, 357, 1, 0, 0, 0, 1330, 1331, 3, 180, 82, 0, 1331, 1332, 1, 0, 0, 0, 1332, 1333, 6, 171, 16, 0, 1333, 1334, 6, 171, 15, 0, 1334, 359, 1, 0, 0, 0, 1335, 1336, 5, 58, 0, 0, 1336, 361, 1, 0, 0, 0, 1337, 1343, 3, 90, 37, 0, 1338, 1343, 3, 80, 32, 0, 1339, 1343, 3, 120, 52, 0, 1340, 1343, 3, 82, 33, 0, 1341, 1343, 3, 96, 40, 0, 1342, 1337, 1, 0, 0, 0, 1342, 1338, 1, 0, 0, 0, 1342, 1339, 1, 0, 0, 0, 1342, 1340, 1, 0, 0, 0, 1342, 1341, 1, 0, 0, 0, 1343, 1344, 1, 0, 0, 0, 1344, 1342, 1, 0, 0, 0, 1344, 1345, 1, 0, 0, 0, 1345, 363, 1, 0, 0, 0, 1346, 1347, 3, 58, 21, 0, 1347, 1348, 1, 0, 0, 0, 1348, 1349, 6, 174, 11, 0, 1349, 365, 1, 0, 0, 0, 1350, 1351, 3, 60, 22, 0, 1351, 1352, 1, 0, 0, 0, 1352, 1353, 6, 175, 11, 0, 1353, 367, 1, 0, 0, 0, 1354, 1355, 3, 62, 23, 0, 1355, 1356, 1, 0, 0, 0, 1356, 1357, 6, 176, 11, 0, 1357, 369, 1, 0, 0, 0, 1358, 1359, 3, 78, 31, 0, 1359, 1360, 1, 0, 0, 0, 1360, 1361, 6, 177, 14, 0, 1361, 1362, 6, 177, 15, 0, 1362, 371, 1, 0, 0, 0, 1363, 1364, 3, 66, 25, 0, 1364, 1365, 1, 0, 0, 0, 1365, 1366, 6, 178, 20, 0, 1366, 1367, 6, 178, 15, 0, 1367, 1368, 6, 178, 31, 0, 1368, 373, 1, 0, 0, 0, 1369, 1370, 3, 58, 21, 0, 1370, 1371, 1, 0, 0, 0, 1371, 1372, 6, 179, 11, 0, 1372, 375, 1, 0, 0, 0, 1373, 1374, 3, 60, 22, 0, 1374, 1375, 1, 0, 0, 0, 1375, 1376, 6, 180, 11, 0, 1376, 377, 1, 0, 0, 0, 1377, 1378, 3, 62, 23, 0, 1378, 1379, 1, 0, 0, 0, 1379, 1380, 6, 181, 11, 0, 1380, 379, 1, 0, 0, 0, 1381, 1382, 3, 116, 50, 0, 1382, 1383, 1, 0, 0, 0, 1383, 1384, 6, 182, 17, 0, 1384, 1385, 6, 182, 15, 0, 1385, 1386, 6, 182, 7, 0, 1386, 381, 1, 0, 0, 0, 1387, 1388, 3, 58, 21, 0, 1388, 1389, 1, 0, 0, 0, 1389, 1390, 6, 183, 11, 0, 1390, 383, 1, 0, 0, 0, 1391, 1392, 3, 60, 22, 0, 1392, 1393, 1, 0, 0, 0, 1393, 1394, 6, 184, 11, 0, 1394, 385, 1, 0, 0, 0, 1395, 1396, 3, 62, 23, 0, 1396, 1397, 1, 0, 0, 0, 1397, 1398, 6, 185, 11, 0, 1398, 387, 1, 0, 0, 0, 1399, 1400, 3, 186, 85, 0, 1400, 1401, 1, 0, 0, 0, 1401, 1402, 6, 186, 15, 0, 1402, 1403, 6, 186, 0, 0, 1403, 1404, 6, 186, 25, 0, 1404, 389, 1, 0, 0, 0, 1405, 1406, 3, 182, 83, 0, 1406, 1407, 1, 0, 0, 0, 1407, 1408, 6, 187, 15, 0, 1408, 1409, 6, 187, 0, 0, 1409, 1410, 6, 187, 30, 0, 1410, 391, 1, 0, 0, 0, 1411, 1412, 3, 106, 45, 0, 1412, 1413, 1, 0, 0, 0, 1413, 1414, 6, 188, 15, 0, 1414, 1415, 6, 188, 0, 0, 1415, 1416, 6, 188, 32, 0, 1416, 393, 1, 0, 0, 0, 1417, 1418, 3, 78, 31, 0, 1418, 1419, 1, 0, 0, 0, 1419, 1420, 6, 189, 14, 0, 1420, 1421, 6, 189, 15, 0, 1421, 395, 1, 0, 0, 0, 65, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 568, 578, 582, 585, 594, 596, 607, 614, 619, 658, 663, 672, 679, 684, 686, 697, 705, 708, 710, 715, 720, 726, 733, 738, 744, 747, 755, 759, 883, 890, 892, 908, 913, 918, 920, 926, 1011, 1015, 1020, 1025, 1030, 1032, 1036, 1038, 1115, 1119, 1124, 1342, 1344, 33, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 8, 0, 5, 12, 0, 5, 14, 0, 5, 10, 0, 5, 5, 0, 5, 11, 0, 0, 1, 0, 7, 69, 0, 5, 0, 0, 7, 29, 0, 4, 0, 0, 7, 70, 0, 7, 38, 0, 7, 36, 0, 7, 30, 0, 7, 25, 0, 7, 40, 0, 7, 80, 0, 5, 13, 0, 5, 7, 0, 7, 72, 0, 7, 90, 0, 7, 89, 0, 7, 88, 0, 5, 9, 0, 7, 71, 0, 5, 15, 0, 7, 33, 0] \ No newline at end of file +[4, 0, 124, 1450, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 4, 20, 575, 8, 20, 11, 20, 12, 20, 576, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 585, 8, 21, 10, 21, 12, 21, 588, 9, 21, 1, 21, 3, 21, 591, 8, 21, 1, 21, 3, 21, 594, 8, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 603, 8, 22, 10, 22, 12, 22, 606, 9, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 4, 23, 614, 8, 23, 11, 23, 12, 23, 615, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 3, 24, 623, 8, 24, 1, 25, 4, 25, 626, 8, 25, 11, 25, 12, 25, 627, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 1, 36, 3, 36, 667, 8, 36, 1, 36, 4, 36, 670, 8, 36, 11, 36, 12, 36, 671, 1, 37, 1, 37, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 3, 39, 681, 8, 39, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 3, 41, 688, 8, 41, 1, 42, 1, 42, 1, 42, 5, 42, 693, 8, 42, 10, 42, 12, 42, 696, 9, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 704, 8, 42, 10, 42, 12, 42, 707, 9, 42, 1, 42, 1, 42, 1, 42, 1, 42, 1, 42, 3, 42, 714, 8, 42, 1, 42, 3, 42, 717, 8, 42, 3, 42, 719, 8, 42, 1, 43, 4, 43, 722, 8, 43, 11, 43, 12, 43, 723, 1, 44, 4, 44, 727, 8, 44, 11, 44, 12, 44, 728, 1, 44, 1, 44, 5, 44, 733, 8, 44, 10, 44, 12, 44, 736, 9, 44, 1, 44, 1, 44, 4, 44, 740, 8, 44, 11, 44, 12, 44, 741, 1, 44, 4, 44, 745, 8, 44, 11, 44, 12, 44, 746, 1, 44, 1, 44, 5, 44, 751, 8, 44, 10, 44, 12, 44, 754, 9, 44, 3, 44, 756, 8, 44, 1, 44, 1, 44, 1, 44, 1, 44, 4, 44, 762, 8, 44, 11, 44, 12, 44, 763, 1, 44, 1, 44, 3, 44, 768, 8, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 5, 80, 890, 8, 80, 10, 80, 12, 80, 893, 9, 80, 1, 80, 1, 80, 4, 80, 897, 8, 80, 11, 80, 12, 80, 898, 3, 80, 901, 8, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 5, 83, 915, 8, 83, 10, 83, 12, 83, 918, 9, 83, 1, 83, 1, 83, 3, 83, 922, 8, 83, 1, 83, 4, 83, 925, 8, 83, 11, 83, 12, 83, 926, 3, 83, 929, 8, 83, 1, 84, 1, 84, 4, 84, 933, 8, 84, 11, 84, 12, 84, 934, 1, 84, 1, 84, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 3, 104, 1024, 8, 104, 1, 105, 1, 105, 3, 105, 1028, 8, 105, 1, 105, 5, 105, 1031, 8, 105, 10, 105, 12, 105, 1034, 9, 105, 1, 105, 1, 105, 3, 105, 1038, 8, 105, 1, 105, 4, 105, 1041, 8, 105, 11, 105, 12, 105, 1042, 3, 105, 1045, 8, 105, 1, 106, 1, 106, 4, 106, 1049, 8, 106, 11, 106, 12, 106, 1050, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 124, 4, 124, 1126, 8, 124, 11, 124, 12, 124, 1127, 1, 124, 1, 124, 3, 124, 1132, 8, 124, 1, 124, 4, 124, 1135, 8, 124, 11, 124, 12, 124, 1136, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 175, 4, 175, 1359, 8, 175, 11, 175, 12, 175, 1360, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 2, 604, 705, 0, 194, 16, 1, 18, 2, 20, 3, 22, 4, 24, 5, 26, 6, 28, 7, 30, 8, 32, 9, 34, 10, 36, 11, 38, 12, 40, 13, 42, 14, 44, 15, 46, 16, 48, 17, 50, 18, 52, 19, 54, 20, 56, 21, 58, 22, 60, 23, 62, 24, 64, 0, 66, 25, 68, 0, 70, 0, 72, 26, 74, 27, 76, 28, 78, 29, 80, 0, 82, 0, 84, 0, 86, 0, 88, 0, 90, 0, 92, 0, 94, 0, 96, 0, 98, 0, 100, 30, 102, 31, 104, 32, 106, 33, 108, 34, 110, 35, 112, 36, 114, 37, 116, 38, 118, 39, 120, 40, 122, 41, 124, 42, 126, 43, 128, 44, 130, 45, 132, 46, 134, 47, 136, 48, 138, 49, 140, 50, 142, 51, 144, 52, 146, 53, 148, 54, 150, 55, 152, 56, 154, 57, 156, 58, 158, 59, 160, 60, 162, 61, 164, 62, 166, 63, 168, 64, 170, 65, 172, 66, 174, 67, 176, 68, 178, 69, 180, 70, 182, 71, 184, 0, 186, 72, 188, 73, 190, 74, 192, 75, 194, 0, 196, 0, 198, 0, 200, 0, 202, 0, 204, 0, 206, 76, 208, 0, 210, 0, 212, 77, 214, 78, 216, 79, 218, 0, 220, 0, 222, 0, 224, 0, 226, 0, 228, 80, 230, 81, 232, 82, 234, 83, 236, 0, 238, 0, 240, 0, 242, 0, 244, 84, 246, 0, 248, 85, 250, 86, 252, 87, 254, 0, 256, 0, 258, 88, 260, 89, 262, 0, 264, 90, 266, 0, 268, 91, 270, 92, 272, 93, 274, 0, 276, 0, 278, 0, 280, 0, 282, 0, 284, 0, 286, 0, 288, 94, 290, 95, 292, 96, 294, 0, 296, 0, 298, 0, 300, 0, 302, 0, 304, 0, 306, 0, 308, 97, 310, 98, 312, 99, 314, 0, 316, 0, 318, 0, 320, 0, 322, 100, 324, 101, 326, 102, 328, 0, 330, 0, 332, 0, 334, 0, 336, 103, 338, 104, 340, 105, 342, 0, 344, 106, 346, 107, 348, 108, 350, 109, 352, 0, 354, 110, 356, 111, 358, 112, 360, 113, 362, 0, 364, 114, 366, 115, 368, 116, 370, 117, 372, 118, 374, 0, 376, 0, 378, 0, 380, 119, 382, 120, 384, 121, 386, 0, 388, 0, 390, 122, 392, 123, 394, 124, 396, 0, 398, 0, 400, 0, 402, 0, 16, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1476, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 60, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 0, 66, 1, 0, 0, 0, 1, 68, 1, 0, 0, 0, 1, 70, 1, 0, 0, 0, 1, 72, 1, 0, 0, 0, 1, 74, 1, 0, 0, 0, 1, 76, 1, 0, 0, 0, 2, 78, 1, 0, 0, 0, 2, 100, 1, 0, 0, 0, 2, 102, 1, 0, 0, 0, 2, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 116, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 2, 128, 1, 0, 0, 0, 2, 130, 1, 0, 0, 0, 2, 132, 1, 0, 0, 0, 2, 134, 1, 0, 0, 0, 2, 136, 1, 0, 0, 0, 2, 138, 1, 0, 0, 0, 2, 140, 1, 0, 0, 0, 2, 142, 1, 0, 0, 0, 2, 144, 1, 0, 0, 0, 2, 146, 1, 0, 0, 0, 2, 148, 1, 0, 0, 0, 2, 150, 1, 0, 0, 0, 2, 152, 1, 0, 0, 0, 2, 154, 1, 0, 0, 0, 2, 156, 1, 0, 0, 0, 2, 158, 1, 0, 0, 0, 2, 160, 1, 0, 0, 0, 2, 162, 1, 0, 0, 0, 2, 164, 1, 0, 0, 0, 2, 166, 1, 0, 0, 0, 2, 168, 1, 0, 0, 0, 2, 170, 1, 0, 0, 0, 2, 172, 1, 0, 0, 0, 2, 174, 1, 0, 0, 0, 2, 176, 1, 0, 0, 0, 2, 178, 1, 0, 0, 0, 2, 180, 1, 0, 0, 0, 2, 182, 1, 0, 0, 0, 2, 186, 1, 0, 0, 0, 2, 188, 1, 0, 0, 0, 2, 190, 1, 0, 0, 0, 2, 192, 1, 0, 0, 0, 3, 194, 1, 0, 0, 0, 3, 196, 1, 0, 0, 0, 3, 198, 1, 0, 0, 0, 3, 200, 1, 0, 0, 0, 3, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 3, 208, 1, 0, 0, 0, 3, 210, 1, 0, 0, 0, 3, 212, 1, 0, 0, 0, 3, 214, 1, 0, 0, 0, 3, 216, 1, 0, 0, 0, 4, 218, 1, 0, 0, 0, 4, 220, 1, 0, 0, 0, 4, 222, 1, 0, 0, 0, 4, 228, 1, 0, 0, 0, 4, 230, 1, 0, 0, 0, 4, 232, 1, 0, 0, 0, 4, 234, 1, 0, 0, 0, 5, 236, 1, 0, 0, 0, 5, 238, 1, 0, 0, 0, 5, 240, 1, 0, 0, 0, 5, 242, 1, 0, 0, 0, 5, 244, 1, 0, 0, 0, 5, 246, 1, 0, 0, 0, 5, 248, 1, 0, 0, 0, 5, 250, 1, 0, 0, 0, 5, 252, 1, 0, 0, 0, 6, 254, 1, 0, 0, 0, 6, 256, 1, 0, 0, 0, 6, 258, 1, 0, 0, 0, 6, 260, 1, 0, 0, 0, 6, 264, 1, 0, 0, 0, 6, 266, 1, 0, 0, 0, 6, 268, 1, 0, 0, 0, 6, 270, 1, 0, 0, 0, 6, 272, 1, 0, 0, 0, 7, 274, 1, 0, 0, 0, 7, 276, 1, 0, 0, 0, 7, 278, 1, 0, 0, 0, 7, 280, 1, 0, 0, 0, 7, 282, 1, 0, 0, 0, 7, 284, 1, 0, 0, 0, 7, 286, 1, 0, 0, 0, 7, 288, 1, 0, 0, 0, 7, 290, 1, 0, 0, 0, 7, 292, 1, 0, 0, 0, 8, 294, 1, 0, 0, 0, 8, 296, 1, 0, 0, 0, 8, 298, 1, 0, 0, 0, 8, 300, 1, 0, 0, 0, 8, 302, 1, 0, 0, 0, 8, 304, 1, 0, 0, 0, 8, 306, 1, 0, 0, 0, 8, 308, 1, 0, 0, 0, 8, 310, 1, 0, 0, 0, 8, 312, 1, 0, 0, 0, 9, 314, 1, 0, 0, 0, 9, 316, 1, 0, 0, 0, 9, 318, 1, 0, 0, 0, 9, 320, 1, 0, 0, 0, 9, 322, 1, 0, 0, 0, 9, 324, 1, 0, 0, 0, 9, 326, 1, 0, 0, 0, 10, 328, 1, 0, 0, 0, 10, 330, 1, 0, 0, 0, 10, 332, 1, 0, 0, 0, 10, 334, 1, 0, 0, 0, 10, 336, 1, 0, 0, 0, 10, 338, 1, 0, 0, 0, 10, 340, 1, 0, 0, 0, 11, 342, 1, 0, 0, 0, 11, 344, 1, 0, 0, 0, 11, 346, 1, 0, 0, 0, 11, 348, 1, 0, 0, 0, 11, 350, 1, 0, 0, 0, 12, 352, 1, 0, 0, 0, 12, 354, 1, 0, 0, 0, 12, 356, 1, 0, 0, 0, 12, 358, 1, 0, 0, 0, 12, 360, 1, 0, 0, 0, 13, 362, 1, 0, 0, 0, 13, 364, 1, 0, 0, 0, 13, 366, 1, 0, 0, 0, 13, 368, 1, 0, 0, 0, 13, 370, 1, 0, 0, 0, 13, 372, 1, 0, 0, 0, 14, 374, 1, 0, 0, 0, 14, 376, 1, 0, 0, 0, 14, 378, 1, 0, 0, 0, 14, 380, 1, 0, 0, 0, 14, 382, 1, 0, 0, 0, 14, 384, 1, 0, 0, 0, 15, 386, 1, 0, 0, 0, 15, 388, 1, 0, 0, 0, 15, 390, 1, 0, 0, 0, 15, 392, 1, 0, 0, 0, 15, 394, 1, 0, 0, 0, 15, 396, 1, 0, 0, 0, 15, 398, 1, 0, 0, 0, 15, 400, 1, 0, 0, 0, 15, 402, 1, 0, 0, 0, 16, 404, 1, 0, 0, 0, 18, 414, 1, 0, 0, 0, 20, 421, 1, 0, 0, 0, 22, 430, 1, 0, 0, 0, 24, 437, 1, 0, 0, 0, 26, 447, 1, 0, 0, 0, 28, 454, 1, 0, 0, 0, 30, 461, 1, 0, 0, 0, 32, 475, 1, 0, 0, 0, 34, 482, 1, 0, 0, 0, 36, 490, 1, 0, 0, 0, 38, 499, 1, 0, 0, 0, 40, 506, 1, 0, 0, 0, 42, 516, 1, 0, 0, 0, 44, 528, 1, 0, 0, 0, 46, 537, 1, 0, 0, 0, 48, 543, 1, 0, 0, 0, 50, 550, 1, 0, 0, 0, 52, 557, 1, 0, 0, 0, 54, 565, 1, 0, 0, 0, 56, 574, 1, 0, 0, 0, 58, 580, 1, 0, 0, 0, 60, 597, 1, 0, 0, 0, 62, 613, 1, 0, 0, 0, 64, 622, 1, 0, 0, 0, 66, 625, 1, 0, 0, 0, 68, 629, 1, 0, 0, 0, 70, 634, 1, 0, 0, 0, 72, 639, 1, 0, 0, 0, 74, 643, 1, 0, 0, 0, 76, 647, 1, 0, 0, 0, 78, 651, 1, 0, 0, 0, 80, 655, 1, 0, 0, 0, 82, 657, 1, 0, 0, 0, 84, 659, 1, 0, 0, 0, 86, 662, 1, 0, 0, 0, 88, 664, 1, 0, 0, 0, 90, 673, 1, 0, 0, 0, 92, 675, 1, 0, 0, 0, 94, 680, 1, 0, 0, 0, 96, 682, 1, 0, 0, 0, 98, 687, 1, 0, 0, 0, 100, 718, 1, 0, 0, 0, 102, 721, 1, 0, 0, 0, 104, 767, 1, 0, 0, 0, 106, 769, 1, 0, 0, 0, 108, 772, 1, 0, 0, 0, 110, 776, 1, 0, 0, 0, 112, 780, 1, 0, 0, 0, 114, 782, 1, 0, 0, 0, 116, 785, 1, 0, 0, 0, 118, 787, 1, 0, 0, 0, 120, 792, 1, 0, 0, 0, 122, 794, 1, 0, 0, 0, 124, 800, 1, 0, 0, 0, 126, 806, 1, 0, 0, 0, 128, 811, 1, 0, 0, 0, 130, 813, 1, 0, 0, 0, 132, 816, 1, 0, 0, 0, 134, 819, 1, 0, 0, 0, 136, 824, 1, 0, 0, 0, 138, 828, 1, 0, 0, 0, 140, 833, 1, 0, 0, 0, 142, 839, 1, 0, 0, 0, 144, 842, 1, 0, 0, 0, 146, 844, 1, 0, 0, 0, 148, 850, 1, 0, 0, 0, 150, 852, 1, 0, 0, 0, 152, 857, 1, 0, 0, 0, 154, 860, 1, 0, 0, 0, 156, 863, 1, 0, 0, 0, 158, 866, 1, 0, 0, 0, 160, 868, 1, 0, 0, 0, 162, 871, 1, 0, 0, 0, 164, 873, 1, 0, 0, 0, 166, 876, 1, 0, 0, 0, 168, 878, 1, 0, 0, 0, 170, 880, 1, 0, 0, 0, 172, 882, 1, 0, 0, 0, 174, 884, 1, 0, 0, 0, 176, 900, 1, 0, 0, 0, 178, 902, 1, 0, 0, 0, 180, 907, 1, 0, 0, 0, 182, 928, 1, 0, 0, 0, 184, 930, 1, 0, 0, 0, 186, 938, 1, 0, 0, 0, 188, 940, 1, 0, 0, 0, 190, 944, 1, 0, 0, 0, 192, 948, 1, 0, 0, 0, 194, 952, 1, 0, 0, 0, 196, 957, 1, 0, 0, 0, 198, 961, 1, 0, 0, 0, 200, 965, 1, 0, 0, 0, 202, 969, 1, 0, 0, 0, 204, 973, 1, 0, 0, 0, 206, 977, 1, 0, 0, 0, 208, 986, 1, 0, 0, 0, 210, 990, 1, 0, 0, 0, 212, 994, 1, 0, 0, 0, 214, 998, 1, 0, 0, 0, 216, 1002, 1, 0, 0, 0, 218, 1006, 1, 0, 0, 0, 220, 1011, 1, 0, 0, 0, 222, 1015, 1, 0, 0, 0, 224, 1023, 1, 0, 0, 0, 226, 1044, 1, 0, 0, 0, 228, 1048, 1, 0, 0, 0, 230, 1052, 1, 0, 0, 0, 232, 1056, 1, 0, 0, 0, 234, 1060, 1, 0, 0, 0, 236, 1064, 1, 0, 0, 0, 238, 1069, 1, 0, 0, 0, 240, 1073, 1, 0, 0, 0, 242, 1077, 1, 0, 0, 0, 244, 1081, 1, 0, 0, 0, 246, 1084, 1, 0, 0, 0, 248, 1088, 1, 0, 0, 0, 250, 1092, 1, 0, 0, 0, 252, 1096, 1, 0, 0, 0, 254, 1100, 1, 0, 0, 0, 256, 1105, 1, 0, 0, 0, 258, 1110, 1, 0, 0, 0, 260, 1115, 1, 0, 0, 0, 262, 1122, 1, 0, 0, 0, 264, 1131, 1, 0, 0, 0, 266, 1138, 1, 0, 0, 0, 268, 1142, 1, 0, 0, 0, 270, 1146, 1, 0, 0, 0, 272, 1150, 1, 0, 0, 0, 274, 1154, 1, 0, 0, 0, 276, 1160, 1, 0, 0, 0, 278, 1164, 1, 0, 0, 0, 280, 1168, 1, 0, 0, 0, 282, 1172, 1, 0, 0, 0, 284, 1176, 1, 0, 0, 0, 286, 1180, 1, 0, 0, 0, 288, 1184, 1, 0, 0, 0, 290, 1188, 1, 0, 0, 0, 292, 1192, 1, 0, 0, 0, 294, 1196, 1, 0, 0, 0, 296, 1201, 1, 0, 0, 0, 298, 1205, 1, 0, 0, 0, 300, 1209, 1, 0, 0, 0, 302, 1213, 1, 0, 0, 0, 304, 1218, 1, 0, 0, 0, 306, 1222, 1, 0, 0, 0, 308, 1226, 1, 0, 0, 0, 310, 1230, 1, 0, 0, 0, 312, 1234, 1, 0, 0, 0, 314, 1238, 1, 0, 0, 0, 316, 1244, 1, 0, 0, 0, 318, 1248, 1, 0, 0, 0, 320, 1252, 1, 0, 0, 0, 322, 1256, 1, 0, 0, 0, 324, 1260, 1, 0, 0, 0, 326, 1264, 1, 0, 0, 0, 328, 1268, 1, 0, 0, 0, 330, 1273, 1, 0, 0, 0, 332, 1277, 1, 0, 0, 0, 334, 1281, 1, 0, 0, 0, 336, 1285, 1, 0, 0, 0, 338, 1289, 1, 0, 0, 0, 340, 1293, 1, 0, 0, 0, 342, 1297, 1, 0, 0, 0, 344, 1302, 1, 0, 0, 0, 346, 1307, 1, 0, 0, 0, 348, 1311, 1, 0, 0, 0, 350, 1315, 1, 0, 0, 0, 352, 1319, 1, 0, 0, 0, 354, 1324, 1, 0, 0, 0, 356, 1334, 1, 0, 0, 0, 358, 1338, 1, 0, 0, 0, 360, 1342, 1, 0, 0, 0, 362, 1346, 1, 0, 0, 0, 364, 1351, 1, 0, 0, 0, 366, 1358, 1, 0, 0, 0, 368, 1362, 1, 0, 0, 0, 370, 1366, 1, 0, 0, 0, 372, 1370, 1, 0, 0, 0, 374, 1374, 1, 0, 0, 0, 376, 1379, 1, 0, 0, 0, 378, 1385, 1, 0, 0, 0, 380, 1391, 1, 0, 0, 0, 382, 1395, 1, 0, 0, 0, 384, 1399, 1, 0, 0, 0, 386, 1403, 1, 0, 0, 0, 388, 1409, 1, 0, 0, 0, 390, 1415, 1, 0, 0, 0, 392, 1419, 1, 0, 0, 0, 394, 1423, 1, 0, 0, 0, 396, 1427, 1, 0, 0, 0, 398, 1433, 1, 0, 0, 0, 400, 1439, 1, 0, 0, 0, 402, 1445, 1, 0, 0, 0, 404, 405, 5, 100, 0, 0, 405, 406, 5, 105, 0, 0, 406, 407, 5, 115, 0, 0, 407, 408, 5, 115, 0, 0, 408, 409, 5, 101, 0, 0, 409, 410, 5, 99, 0, 0, 410, 411, 5, 116, 0, 0, 411, 412, 1, 0, 0, 0, 412, 413, 6, 0, 0, 0, 413, 17, 1, 0, 0, 0, 414, 415, 5, 100, 0, 0, 415, 416, 5, 114, 0, 0, 416, 417, 5, 111, 0, 0, 417, 418, 5, 112, 0, 0, 418, 419, 1, 0, 0, 0, 419, 420, 6, 1, 1, 0, 420, 19, 1, 0, 0, 0, 421, 422, 5, 101, 0, 0, 422, 423, 5, 110, 0, 0, 423, 424, 5, 114, 0, 0, 424, 425, 5, 105, 0, 0, 425, 426, 5, 99, 0, 0, 426, 427, 5, 104, 0, 0, 427, 428, 1, 0, 0, 0, 428, 429, 6, 2, 2, 0, 429, 21, 1, 0, 0, 0, 430, 431, 5, 101, 0, 0, 431, 432, 5, 118, 0, 0, 432, 433, 5, 97, 0, 0, 433, 434, 5, 108, 0, 0, 434, 435, 1, 0, 0, 0, 435, 436, 6, 3, 0, 0, 436, 23, 1, 0, 0, 0, 437, 438, 5, 101, 0, 0, 438, 439, 5, 120, 0, 0, 439, 440, 5, 112, 0, 0, 440, 441, 5, 108, 0, 0, 441, 442, 5, 97, 0, 0, 442, 443, 5, 105, 0, 0, 443, 444, 5, 110, 0, 0, 444, 445, 1, 0, 0, 0, 445, 446, 6, 4, 3, 0, 446, 25, 1, 0, 0, 0, 447, 448, 5, 102, 0, 0, 448, 449, 5, 114, 0, 0, 449, 450, 5, 111, 0, 0, 450, 451, 5, 109, 0, 0, 451, 452, 1, 0, 0, 0, 452, 453, 6, 5, 4, 0, 453, 27, 1, 0, 0, 0, 454, 455, 5, 103, 0, 0, 455, 456, 5, 114, 0, 0, 456, 457, 5, 111, 0, 0, 457, 458, 5, 107, 0, 0, 458, 459, 1, 0, 0, 0, 459, 460, 6, 6, 0, 0, 460, 29, 1, 0, 0, 0, 461, 462, 5, 105, 0, 0, 462, 463, 5, 110, 0, 0, 463, 464, 5, 108, 0, 0, 464, 465, 5, 105, 0, 0, 465, 466, 5, 110, 0, 0, 466, 467, 5, 101, 0, 0, 467, 468, 5, 115, 0, 0, 468, 469, 5, 116, 0, 0, 469, 470, 5, 97, 0, 0, 470, 471, 5, 116, 0, 0, 471, 472, 5, 115, 0, 0, 472, 473, 1, 0, 0, 0, 473, 474, 6, 7, 0, 0, 474, 31, 1, 0, 0, 0, 475, 476, 5, 107, 0, 0, 476, 477, 5, 101, 0, 0, 477, 478, 5, 101, 0, 0, 478, 479, 5, 112, 0, 0, 479, 480, 1, 0, 0, 0, 480, 481, 6, 8, 1, 0, 481, 33, 1, 0, 0, 0, 482, 483, 5, 108, 0, 0, 483, 484, 5, 105, 0, 0, 484, 485, 5, 109, 0, 0, 485, 486, 5, 105, 0, 0, 486, 487, 5, 116, 0, 0, 487, 488, 1, 0, 0, 0, 488, 489, 6, 9, 0, 0, 489, 35, 1, 0, 0, 0, 490, 491, 5, 108, 0, 0, 491, 492, 5, 111, 0, 0, 492, 493, 5, 111, 0, 0, 493, 494, 5, 107, 0, 0, 494, 495, 5, 117, 0, 0, 495, 496, 5, 112, 0, 0, 496, 497, 1, 0, 0, 0, 497, 498, 6, 10, 5, 0, 498, 37, 1, 0, 0, 0, 499, 500, 5, 109, 0, 0, 500, 501, 5, 101, 0, 0, 501, 502, 5, 116, 0, 0, 502, 503, 5, 97, 0, 0, 503, 504, 1, 0, 0, 0, 504, 505, 6, 11, 6, 0, 505, 39, 1, 0, 0, 0, 506, 507, 5, 109, 0, 0, 507, 508, 5, 101, 0, 0, 508, 509, 5, 116, 0, 0, 509, 510, 5, 114, 0, 0, 510, 511, 5, 105, 0, 0, 511, 512, 5, 99, 0, 0, 512, 513, 5, 115, 0, 0, 513, 514, 1, 0, 0, 0, 514, 515, 6, 12, 7, 0, 515, 41, 1, 0, 0, 0, 516, 517, 5, 109, 0, 0, 517, 518, 5, 118, 0, 0, 518, 519, 5, 95, 0, 0, 519, 520, 5, 101, 0, 0, 520, 521, 5, 120, 0, 0, 521, 522, 5, 112, 0, 0, 522, 523, 5, 97, 0, 0, 523, 524, 5, 110, 0, 0, 524, 525, 5, 100, 0, 0, 525, 526, 1, 0, 0, 0, 526, 527, 6, 13, 8, 0, 527, 43, 1, 0, 0, 0, 528, 529, 5, 114, 0, 0, 529, 530, 5, 101, 0, 0, 530, 531, 5, 110, 0, 0, 531, 532, 5, 97, 0, 0, 532, 533, 5, 109, 0, 0, 533, 534, 5, 101, 0, 0, 534, 535, 1, 0, 0, 0, 535, 536, 6, 14, 9, 0, 536, 45, 1, 0, 0, 0, 537, 538, 5, 114, 0, 0, 538, 539, 5, 111, 0, 0, 539, 540, 5, 119, 0, 0, 540, 541, 1, 0, 0, 0, 541, 542, 6, 15, 0, 0, 542, 47, 1, 0, 0, 0, 543, 544, 5, 115, 0, 0, 544, 545, 5, 104, 0, 0, 545, 546, 5, 111, 0, 0, 546, 547, 5, 119, 0, 0, 547, 548, 1, 0, 0, 0, 548, 549, 6, 16, 10, 0, 549, 49, 1, 0, 0, 0, 550, 551, 5, 115, 0, 0, 551, 552, 5, 111, 0, 0, 552, 553, 5, 114, 0, 0, 553, 554, 5, 116, 0, 0, 554, 555, 1, 0, 0, 0, 555, 556, 6, 17, 0, 0, 556, 51, 1, 0, 0, 0, 557, 558, 5, 115, 0, 0, 558, 559, 5, 116, 0, 0, 559, 560, 5, 97, 0, 0, 560, 561, 5, 116, 0, 0, 561, 562, 5, 115, 0, 0, 562, 563, 1, 0, 0, 0, 563, 564, 6, 18, 0, 0, 564, 53, 1, 0, 0, 0, 565, 566, 5, 119, 0, 0, 566, 567, 5, 104, 0, 0, 567, 568, 5, 101, 0, 0, 568, 569, 5, 114, 0, 0, 569, 570, 5, 101, 0, 0, 570, 571, 1, 0, 0, 0, 571, 572, 6, 19, 0, 0, 572, 55, 1, 0, 0, 0, 573, 575, 8, 0, 0, 0, 574, 573, 1, 0, 0, 0, 575, 576, 1, 0, 0, 0, 576, 574, 1, 0, 0, 0, 576, 577, 1, 0, 0, 0, 577, 578, 1, 0, 0, 0, 578, 579, 6, 20, 0, 0, 579, 57, 1, 0, 0, 0, 580, 581, 5, 47, 0, 0, 581, 582, 5, 47, 0, 0, 582, 586, 1, 0, 0, 0, 583, 585, 8, 1, 0, 0, 584, 583, 1, 0, 0, 0, 585, 588, 1, 0, 0, 0, 586, 584, 1, 0, 0, 0, 586, 587, 1, 0, 0, 0, 587, 590, 1, 0, 0, 0, 588, 586, 1, 0, 0, 0, 589, 591, 5, 13, 0, 0, 590, 589, 1, 0, 0, 0, 590, 591, 1, 0, 0, 0, 591, 593, 1, 0, 0, 0, 592, 594, 5, 10, 0, 0, 593, 592, 1, 0, 0, 0, 593, 594, 1, 0, 0, 0, 594, 595, 1, 0, 0, 0, 595, 596, 6, 21, 11, 0, 596, 59, 1, 0, 0, 0, 597, 598, 5, 47, 0, 0, 598, 599, 5, 42, 0, 0, 599, 604, 1, 0, 0, 0, 600, 603, 3, 60, 22, 0, 601, 603, 9, 0, 0, 0, 602, 600, 1, 0, 0, 0, 602, 601, 1, 0, 0, 0, 603, 606, 1, 0, 0, 0, 604, 605, 1, 0, 0, 0, 604, 602, 1, 0, 0, 0, 605, 607, 1, 0, 0, 0, 606, 604, 1, 0, 0, 0, 607, 608, 5, 42, 0, 0, 608, 609, 5, 47, 0, 0, 609, 610, 1, 0, 0, 0, 610, 611, 6, 22, 11, 0, 611, 61, 1, 0, 0, 0, 612, 614, 7, 2, 0, 0, 613, 612, 1, 0, 0, 0, 614, 615, 1, 0, 0, 0, 615, 613, 1, 0, 0, 0, 615, 616, 1, 0, 0, 0, 616, 617, 1, 0, 0, 0, 617, 618, 6, 23, 11, 0, 618, 63, 1, 0, 0, 0, 619, 623, 8, 3, 0, 0, 620, 621, 5, 47, 0, 0, 621, 623, 8, 4, 0, 0, 622, 619, 1, 0, 0, 0, 622, 620, 1, 0, 0, 0, 623, 65, 1, 0, 0, 0, 624, 626, 3, 64, 24, 0, 625, 624, 1, 0, 0, 0, 626, 627, 1, 0, 0, 0, 627, 625, 1, 0, 0, 0, 627, 628, 1, 0, 0, 0, 628, 67, 1, 0, 0, 0, 629, 630, 3, 178, 81, 0, 630, 631, 1, 0, 0, 0, 631, 632, 6, 26, 12, 0, 632, 633, 6, 26, 13, 0, 633, 69, 1, 0, 0, 0, 634, 635, 3, 78, 31, 0, 635, 636, 1, 0, 0, 0, 636, 637, 6, 27, 14, 0, 637, 638, 6, 27, 15, 0, 638, 71, 1, 0, 0, 0, 639, 640, 3, 62, 23, 0, 640, 641, 1, 0, 0, 0, 641, 642, 6, 28, 11, 0, 642, 73, 1, 0, 0, 0, 643, 644, 3, 58, 21, 0, 644, 645, 1, 0, 0, 0, 645, 646, 6, 29, 11, 0, 646, 75, 1, 0, 0, 0, 647, 648, 3, 60, 22, 0, 648, 649, 1, 0, 0, 0, 649, 650, 6, 30, 11, 0, 650, 77, 1, 0, 0, 0, 651, 652, 5, 124, 0, 0, 652, 653, 1, 0, 0, 0, 653, 654, 6, 31, 15, 0, 654, 79, 1, 0, 0, 0, 655, 656, 7, 5, 0, 0, 656, 81, 1, 0, 0, 0, 657, 658, 7, 6, 0, 0, 658, 83, 1, 0, 0, 0, 659, 660, 5, 92, 0, 0, 660, 661, 7, 7, 0, 0, 661, 85, 1, 0, 0, 0, 662, 663, 8, 8, 0, 0, 663, 87, 1, 0, 0, 0, 664, 666, 7, 9, 0, 0, 665, 667, 7, 10, 0, 0, 666, 665, 1, 0, 0, 0, 666, 667, 1, 0, 0, 0, 667, 669, 1, 0, 0, 0, 668, 670, 3, 80, 32, 0, 669, 668, 1, 0, 0, 0, 670, 671, 1, 0, 0, 0, 671, 669, 1, 0, 0, 0, 671, 672, 1, 0, 0, 0, 672, 89, 1, 0, 0, 0, 673, 674, 5, 64, 0, 0, 674, 91, 1, 0, 0, 0, 675, 676, 5, 96, 0, 0, 676, 93, 1, 0, 0, 0, 677, 681, 8, 11, 0, 0, 678, 679, 5, 96, 0, 0, 679, 681, 5, 96, 0, 0, 680, 677, 1, 0, 0, 0, 680, 678, 1, 0, 0, 0, 681, 95, 1, 0, 0, 0, 682, 683, 5, 95, 0, 0, 683, 97, 1, 0, 0, 0, 684, 688, 3, 82, 33, 0, 685, 688, 3, 80, 32, 0, 686, 688, 3, 96, 40, 0, 687, 684, 1, 0, 0, 0, 687, 685, 1, 0, 0, 0, 687, 686, 1, 0, 0, 0, 688, 99, 1, 0, 0, 0, 689, 694, 5, 34, 0, 0, 690, 693, 3, 84, 34, 0, 691, 693, 3, 86, 35, 0, 692, 690, 1, 0, 0, 0, 692, 691, 1, 0, 0, 0, 693, 696, 1, 0, 0, 0, 694, 692, 1, 0, 0, 0, 694, 695, 1, 0, 0, 0, 695, 697, 1, 0, 0, 0, 696, 694, 1, 0, 0, 0, 697, 719, 5, 34, 0, 0, 698, 699, 5, 34, 0, 0, 699, 700, 5, 34, 0, 0, 700, 701, 5, 34, 0, 0, 701, 705, 1, 0, 0, 0, 702, 704, 8, 1, 0, 0, 703, 702, 1, 0, 0, 0, 704, 707, 1, 0, 0, 0, 705, 706, 1, 0, 0, 0, 705, 703, 1, 0, 0, 0, 706, 708, 1, 0, 0, 0, 707, 705, 1, 0, 0, 0, 708, 709, 5, 34, 0, 0, 709, 710, 5, 34, 0, 0, 710, 711, 5, 34, 0, 0, 711, 713, 1, 0, 0, 0, 712, 714, 5, 34, 0, 0, 713, 712, 1, 0, 0, 0, 713, 714, 1, 0, 0, 0, 714, 716, 1, 0, 0, 0, 715, 717, 5, 34, 0, 0, 716, 715, 1, 0, 0, 0, 716, 717, 1, 0, 0, 0, 717, 719, 1, 0, 0, 0, 718, 689, 1, 0, 0, 0, 718, 698, 1, 0, 0, 0, 719, 101, 1, 0, 0, 0, 720, 722, 3, 80, 32, 0, 721, 720, 1, 0, 0, 0, 722, 723, 1, 0, 0, 0, 723, 721, 1, 0, 0, 0, 723, 724, 1, 0, 0, 0, 724, 103, 1, 0, 0, 0, 725, 727, 3, 80, 32, 0, 726, 725, 1, 0, 0, 0, 727, 728, 1, 0, 0, 0, 728, 726, 1, 0, 0, 0, 728, 729, 1, 0, 0, 0, 729, 730, 1, 0, 0, 0, 730, 734, 3, 120, 52, 0, 731, 733, 3, 80, 32, 0, 732, 731, 1, 0, 0, 0, 733, 736, 1, 0, 0, 0, 734, 732, 1, 0, 0, 0, 734, 735, 1, 0, 0, 0, 735, 768, 1, 0, 0, 0, 736, 734, 1, 0, 0, 0, 737, 739, 3, 120, 52, 0, 738, 740, 3, 80, 32, 0, 739, 738, 1, 0, 0, 0, 740, 741, 1, 0, 0, 0, 741, 739, 1, 0, 0, 0, 741, 742, 1, 0, 0, 0, 742, 768, 1, 0, 0, 0, 743, 745, 3, 80, 32, 0, 744, 743, 1, 0, 0, 0, 745, 746, 1, 0, 0, 0, 746, 744, 1, 0, 0, 0, 746, 747, 1, 0, 0, 0, 747, 755, 1, 0, 0, 0, 748, 752, 3, 120, 52, 0, 749, 751, 3, 80, 32, 0, 750, 749, 1, 0, 0, 0, 751, 754, 1, 0, 0, 0, 752, 750, 1, 0, 0, 0, 752, 753, 1, 0, 0, 0, 753, 756, 1, 0, 0, 0, 754, 752, 1, 0, 0, 0, 755, 748, 1, 0, 0, 0, 755, 756, 1, 0, 0, 0, 756, 757, 1, 0, 0, 0, 757, 758, 3, 88, 36, 0, 758, 768, 1, 0, 0, 0, 759, 761, 3, 120, 52, 0, 760, 762, 3, 80, 32, 0, 761, 760, 1, 0, 0, 0, 762, 763, 1, 0, 0, 0, 763, 761, 1, 0, 0, 0, 763, 764, 1, 0, 0, 0, 764, 765, 1, 0, 0, 0, 765, 766, 3, 88, 36, 0, 766, 768, 1, 0, 0, 0, 767, 726, 1, 0, 0, 0, 767, 737, 1, 0, 0, 0, 767, 744, 1, 0, 0, 0, 767, 759, 1, 0, 0, 0, 768, 105, 1, 0, 0, 0, 769, 770, 5, 98, 0, 0, 770, 771, 5, 121, 0, 0, 771, 107, 1, 0, 0, 0, 772, 773, 5, 97, 0, 0, 773, 774, 5, 110, 0, 0, 774, 775, 5, 100, 0, 0, 775, 109, 1, 0, 0, 0, 776, 777, 5, 97, 0, 0, 777, 778, 5, 115, 0, 0, 778, 779, 5, 99, 0, 0, 779, 111, 1, 0, 0, 0, 780, 781, 5, 61, 0, 0, 781, 113, 1, 0, 0, 0, 782, 783, 5, 58, 0, 0, 783, 784, 5, 58, 0, 0, 784, 115, 1, 0, 0, 0, 785, 786, 5, 44, 0, 0, 786, 117, 1, 0, 0, 0, 787, 788, 5, 100, 0, 0, 788, 789, 5, 101, 0, 0, 789, 790, 5, 115, 0, 0, 790, 791, 5, 99, 0, 0, 791, 119, 1, 0, 0, 0, 792, 793, 5, 46, 0, 0, 793, 121, 1, 0, 0, 0, 794, 795, 5, 102, 0, 0, 795, 796, 5, 97, 0, 0, 796, 797, 5, 108, 0, 0, 797, 798, 5, 115, 0, 0, 798, 799, 5, 101, 0, 0, 799, 123, 1, 0, 0, 0, 800, 801, 5, 102, 0, 0, 801, 802, 5, 105, 0, 0, 802, 803, 5, 114, 0, 0, 803, 804, 5, 115, 0, 0, 804, 805, 5, 116, 0, 0, 805, 125, 1, 0, 0, 0, 806, 807, 5, 108, 0, 0, 807, 808, 5, 97, 0, 0, 808, 809, 5, 115, 0, 0, 809, 810, 5, 116, 0, 0, 810, 127, 1, 0, 0, 0, 811, 812, 5, 40, 0, 0, 812, 129, 1, 0, 0, 0, 813, 814, 5, 105, 0, 0, 814, 815, 5, 110, 0, 0, 815, 131, 1, 0, 0, 0, 816, 817, 5, 105, 0, 0, 817, 818, 5, 115, 0, 0, 818, 133, 1, 0, 0, 0, 819, 820, 5, 108, 0, 0, 820, 821, 5, 105, 0, 0, 821, 822, 5, 107, 0, 0, 822, 823, 5, 101, 0, 0, 823, 135, 1, 0, 0, 0, 824, 825, 5, 110, 0, 0, 825, 826, 5, 111, 0, 0, 826, 827, 5, 116, 0, 0, 827, 137, 1, 0, 0, 0, 828, 829, 5, 110, 0, 0, 829, 830, 5, 117, 0, 0, 830, 831, 5, 108, 0, 0, 831, 832, 5, 108, 0, 0, 832, 139, 1, 0, 0, 0, 833, 834, 5, 110, 0, 0, 834, 835, 5, 117, 0, 0, 835, 836, 5, 108, 0, 0, 836, 837, 5, 108, 0, 0, 837, 838, 5, 115, 0, 0, 838, 141, 1, 0, 0, 0, 839, 840, 5, 111, 0, 0, 840, 841, 5, 114, 0, 0, 841, 143, 1, 0, 0, 0, 842, 843, 5, 63, 0, 0, 843, 145, 1, 0, 0, 0, 844, 845, 5, 114, 0, 0, 845, 846, 5, 108, 0, 0, 846, 847, 5, 105, 0, 0, 847, 848, 5, 107, 0, 0, 848, 849, 5, 101, 0, 0, 849, 147, 1, 0, 0, 0, 850, 851, 5, 41, 0, 0, 851, 149, 1, 0, 0, 0, 852, 853, 5, 116, 0, 0, 853, 854, 5, 114, 0, 0, 854, 855, 5, 117, 0, 0, 855, 856, 5, 101, 0, 0, 856, 151, 1, 0, 0, 0, 857, 858, 5, 61, 0, 0, 858, 859, 5, 61, 0, 0, 859, 153, 1, 0, 0, 0, 860, 861, 5, 61, 0, 0, 861, 862, 5, 126, 0, 0, 862, 155, 1, 0, 0, 0, 863, 864, 5, 33, 0, 0, 864, 865, 5, 61, 0, 0, 865, 157, 1, 0, 0, 0, 866, 867, 5, 60, 0, 0, 867, 159, 1, 0, 0, 0, 868, 869, 5, 60, 0, 0, 869, 870, 5, 61, 0, 0, 870, 161, 1, 0, 0, 0, 871, 872, 5, 62, 0, 0, 872, 163, 1, 0, 0, 0, 873, 874, 5, 62, 0, 0, 874, 875, 5, 61, 0, 0, 875, 165, 1, 0, 0, 0, 876, 877, 5, 43, 0, 0, 877, 167, 1, 0, 0, 0, 878, 879, 5, 45, 0, 0, 879, 169, 1, 0, 0, 0, 880, 881, 5, 42, 0, 0, 881, 171, 1, 0, 0, 0, 882, 883, 5, 47, 0, 0, 883, 173, 1, 0, 0, 0, 884, 885, 5, 37, 0, 0, 885, 175, 1, 0, 0, 0, 886, 887, 3, 144, 64, 0, 887, 891, 3, 82, 33, 0, 888, 890, 3, 98, 41, 0, 889, 888, 1, 0, 0, 0, 890, 893, 1, 0, 0, 0, 891, 889, 1, 0, 0, 0, 891, 892, 1, 0, 0, 0, 892, 901, 1, 0, 0, 0, 893, 891, 1, 0, 0, 0, 894, 896, 3, 144, 64, 0, 895, 897, 3, 80, 32, 0, 896, 895, 1, 0, 0, 0, 897, 898, 1, 0, 0, 0, 898, 896, 1, 0, 0, 0, 898, 899, 1, 0, 0, 0, 899, 901, 1, 0, 0, 0, 900, 886, 1, 0, 0, 0, 900, 894, 1, 0, 0, 0, 901, 177, 1, 0, 0, 0, 902, 903, 5, 91, 0, 0, 903, 904, 1, 0, 0, 0, 904, 905, 6, 81, 0, 0, 905, 906, 6, 81, 0, 0, 906, 179, 1, 0, 0, 0, 907, 908, 5, 93, 0, 0, 908, 909, 1, 0, 0, 0, 909, 910, 6, 82, 15, 0, 910, 911, 6, 82, 15, 0, 911, 181, 1, 0, 0, 0, 912, 916, 3, 82, 33, 0, 913, 915, 3, 98, 41, 0, 914, 913, 1, 0, 0, 0, 915, 918, 1, 0, 0, 0, 916, 914, 1, 0, 0, 0, 916, 917, 1, 0, 0, 0, 917, 929, 1, 0, 0, 0, 918, 916, 1, 0, 0, 0, 919, 922, 3, 96, 40, 0, 920, 922, 3, 90, 37, 0, 921, 919, 1, 0, 0, 0, 921, 920, 1, 0, 0, 0, 922, 924, 1, 0, 0, 0, 923, 925, 3, 98, 41, 0, 924, 923, 1, 0, 0, 0, 925, 926, 1, 0, 0, 0, 926, 924, 1, 0, 0, 0, 926, 927, 1, 0, 0, 0, 927, 929, 1, 0, 0, 0, 928, 912, 1, 0, 0, 0, 928, 921, 1, 0, 0, 0, 929, 183, 1, 0, 0, 0, 930, 932, 3, 92, 38, 0, 931, 933, 3, 94, 39, 0, 932, 931, 1, 0, 0, 0, 933, 934, 1, 0, 0, 0, 934, 932, 1, 0, 0, 0, 934, 935, 1, 0, 0, 0, 935, 936, 1, 0, 0, 0, 936, 937, 3, 92, 38, 0, 937, 185, 1, 0, 0, 0, 938, 939, 3, 184, 84, 0, 939, 187, 1, 0, 0, 0, 940, 941, 3, 58, 21, 0, 941, 942, 1, 0, 0, 0, 942, 943, 6, 86, 11, 0, 943, 189, 1, 0, 0, 0, 944, 945, 3, 60, 22, 0, 945, 946, 1, 0, 0, 0, 946, 947, 6, 87, 11, 0, 947, 191, 1, 0, 0, 0, 948, 949, 3, 62, 23, 0, 949, 950, 1, 0, 0, 0, 950, 951, 6, 88, 11, 0, 951, 193, 1, 0, 0, 0, 952, 953, 3, 78, 31, 0, 953, 954, 1, 0, 0, 0, 954, 955, 6, 89, 14, 0, 955, 956, 6, 89, 15, 0, 956, 195, 1, 0, 0, 0, 957, 958, 3, 178, 81, 0, 958, 959, 1, 0, 0, 0, 959, 960, 6, 90, 12, 0, 960, 197, 1, 0, 0, 0, 961, 962, 3, 180, 82, 0, 962, 963, 1, 0, 0, 0, 963, 964, 6, 91, 16, 0, 964, 199, 1, 0, 0, 0, 965, 966, 3, 364, 174, 0, 966, 967, 1, 0, 0, 0, 967, 968, 6, 92, 17, 0, 968, 201, 1, 0, 0, 0, 969, 970, 3, 116, 50, 0, 970, 971, 1, 0, 0, 0, 971, 972, 6, 93, 18, 0, 972, 203, 1, 0, 0, 0, 973, 974, 3, 112, 48, 0, 974, 975, 1, 0, 0, 0, 975, 976, 6, 94, 19, 0, 976, 205, 1, 0, 0, 0, 977, 978, 5, 109, 0, 0, 978, 979, 5, 101, 0, 0, 979, 980, 5, 116, 0, 0, 980, 981, 5, 97, 0, 0, 981, 982, 5, 100, 0, 0, 982, 983, 5, 97, 0, 0, 983, 984, 5, 116, 0, 0, 984, 985, 5, 97, 0, 0, 985, 207, 1, 0, 0, 0, 986, 987, 3, 66, 25, 0, 987, 988, 1, 0, 0, 0, 988, 989, 6, 96, 20, 0, 989, 209, 1, 0, 0, 0, 990, 991, 3, 100, 42, 0, 991, 992, 1, 0, 0, 0, 992, 993, 6, 97, 21, 0, 993, 211, 1, 0, 0, 0, 994, 995, 3, 58, 21, 0, 995, 996, 1, 0, 0, 0, 996, 997, 6, 98, 11, 0, 997, 213, 1, 0, 0, 0, 998, 999, 3, 60, 22, 0, 999, 1000, 1, 0, 0, 0, 1000, 1001, 6, 99, 11, 0, 1001, 215, 1, 0, 0, 0, 1002, 1003, 3, 62, 23, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1005, 6, 100, 11, 0, 1005, 217, 1, 0, 0, 0, 1006, 1007, 3, 78, 31, 0, 1007, 1008, 1, 0, 0, 0, 1008, 1009, 6, 101, 14, 0, 1009, 1010, 6, 101, 15, 0, 1010, 219, 1, 0, 0, 0, 1011, 1012, 3, 120, 52, 0, 1012, 1013, 1, 0, 0, 0, 1013, 1014, 6, 102, 22, 0, 1014, 221, 1, 0, 0, 0, 1015, 1016, 3, 116, 50, 0, 1016, 1017, 1, 0, 0, 0, 1017, 1018, 6, 103, 18, 0, 1018, 223, 1, 0, 0, 0, 1019, 1024, 3, 82, 33, 0, 1020, 1024, 3, 80, 32, 0, 1021, 1024, 3, 96, 40, 0, 1022, 1024, 3, 170, 77, 0, 1023, 1019, 1, 0, 0, 0, 1023, 1020, 1, 0, 0, 0, 1023, 1021, 1, 0, 0, 0, 1023, 1022, 1, 0, 0, 0, 1024, 225, 1, 0, 0, 0, 1025, 1028, 3, 82, 33, 0, 1026, 1028, 3, 170, 77, 0, 1027, 1025, 1, 0, 0, 0, 1027, 1026, 1, 0, 0, 0, 1028, 1032, 1, 0, 0, 0, 1029, 1031, 3, 224, 104, 0, 1030, 1029, 1, 0, 0, 0, 1031, 1034, 1, 0, 0, 0, 1032, 1030, 1, 0, 0, 0, 1032, 1033, 1, 0, 0, 0, 1033, 1045, 1, 0, 0, 0, 1034, 1032, 1, 0, 0, 0, 1035, 1038, 3, 96, 40, 0, 1036, 1038, 3, 90, 37, 0, 1037, 1035, 1, 0, 0, 0, 1037, 1036, 1, 0, 0, 0, 1038, 1040, 1, 0, 0, 0, 1039, 1041, 3, 224, 104, 0, 1040, 1039, 1, 0, 0, 0, 1041, 1042, 1, 0, 0, 0, 1042, 1040, 1, 0, 0, 0, 1042, 1043, 1, 0, 0, 0, 1043, 1045, 1, 0, 0, 0, 1044, 1027, 1, 0, 0, 0, 1044, 1037, 1, 0, 0, 0, 1045, 227, 1, 0, 0, 0, 1046, 1049, 3, 226, 105, 0, 1047, 1049, 3, 184, 84, 0, 1048, 1046, 1, 0, 0, 0, 1048, 1047, 1, 0, 0, 0, 1049, 1050, 1, 0, 0, 0, 1050, 1048, 1, 0, 0, 0, 1050, 1051, 1, 0, 0, 0, 1051, 229, 1, 0, 0, 0, 1052, 1053, 3, 58, 21, 0, 1053, 1054, 1, 0, 0, 0, 1054, 1055, 6, 107, 11, 0, 1055, 231, 1, 0, 0, 0, 1056, 1057, 3, 60, 22, 0, 1057, 1058, 1, 0, 0, 0, 1058, 1059, 6, 108, 11, 0, 1059, 233, 1, 0, 0, 0, 1060, 1061, 3, 62, 23, 0, 1061, 1062, 1, 0, 0, 0, 1062, 1063, 6, 109, 11, 0, 1063, 235, 1, 0, 0, 0, 1064, 1065, 3, 78, 31, 0, 1065, 1066, 1, 0, 0, 0, 1066, 1067, 6, 110, 14, 0, 1067, 1068, 6, 110, 15, 0, 1068, 237, 1, 0, 0, 0, 1069, 1070, 3, 112, 48, 0, 1070, 1071, 1, 0, 0, 0, 1071, 1072, 6, 111, 19, 0, 1072, 239, 1, 0, 0, 0, 1073, 1074, 3, 116, 50, 0, 1074, 1075, 1, 0, 0, 0, 1075, 1076, 6, 112, 18, 0, 1076, 241, 1, 0, 0, 0, 1077, 1078, 3, 120, 52, 0, 1078, 1079, 1, 0, 0, 0, 1079, 1080, 6, 113, 22, 0, 1080, 243, 1, 0, 0, 0, 1081, 1082, 5, 97, 0, 0, 1082, 1083, 5, 115, 0, 0, 1083, 245, 1, 0, 0, 0, 1084, 1085, 3, 228, 106, 0, 1085, 1086, 1, 0, 0, 0, 1086, 1087, 6, 115, 23, 0, 1087, 247, 1, 0, 0, 0, 1088, 1089, 3, 58, 21, 0, 1089, 1090, 1, 0, 0, 0, 1090, 1091, 6, 116, 11, 0, 1091, 249, 1, 0, 0, 0, 1092, 1093, 3, 60, 22, 0, 1093, 1094, 1, 0, 0, 0, 1094, 1095, 6, 117, 11, 0, 1095, 251, 1, 0, 0, 0, 1096, 1097, 3, 62, 23, 0, 1097, 1098, 1, 0, 0, 0, 1098, 1099, 6, 118, 11, 0, 1099, 253, 1, 0, 0, 0, 1100, 1101, 3, 78, 31, 0, 1101, 1102, 1, 0, 0, 0, 1102, 1103, 6, 119, 14, 0, 1103, 1104, 6, 119, 15, 0, 1104, 255, 1, 0, 0, 0, 1105, 1106, 3, 178, 81, 0, 1106, 1107, 1, 0, 0, 0, 1107, 1108, 6, 120, 12, 0, 1108, 1109, 6, 120, 24, 0, 1109, 257, 1, 0, 0, 0, 1110, 1111, 5, 111, 0, 0, 1111, 1112, 5, 110, 0, 0, 1112, 1113, 1, 0, 0, 0, 1113, 1114, 6, 121, 25, 0, 1114, 259, 1, 0, 0, 0, 1115, 1116, 5, 119, 0, 0, 1116, 1117, 5, 105, 0, 0, 1117, 1118, 5, 116, 0, 0, 1118, 1119, 5, 104, 0, 0, 1119, 1120, 1, 0, 0, 0, 1120, 1121, 6, 122, 25, 0, 1121, 261, 1, 0, 0, 0, 1122, 1123, 8, 12, 0, 0, 1123, 263, 1, 0, 0, 0, 1124, 1126, 3, 262, 123, 0, 1125, 1124, 1, 0, 0, 0, 1126, 1127, 1, 0, 0, 0, 1127, 1125, 1, 0, 0, 0, 1127, 1128, 1, 0, 0, 0, 1128, 1129, 1, 0, 0, 0, 1129, 1130, 3, 364, 174, 0, 1130, 1132, 1, 0, 0, 0, 1131, 1125, 1, 0, 0, 0, 1131, 1132, 1, 0, 0, 0, 1132, 1134, 1, 0, 0, 0, 1133, 1135, 3, 262, 123, 0, 1134, 1133, 1, 0, 0, 0, 1135, 1136, 1, 0, 0, 0, 1136, 1134, 1, 0, 0, 0, 1136, 1137, 1, 0, 0, 0, 1137, 265, 1, 0, 0, 0, 1138, 1139, 3, 264, 124, 0, 1139, 1140, 1, 0, 0, 0, 1140, 1141, 6, 125, 26, 0, 1141, 267, 1, 0, 0, 0, 1142, 1143, 3, 58, 21, 0, 1143, 1144, 1, 0, 0, 0, 1144, 1145, 6, 126, 11, 0, 1145, 269, 1, 0, 0, 0, 1146, 1147, 3, 60, 22, 0, 1147, 1148, 1, 0, 0, 0, 1148, 1149, 6, 127, 11, 0, 1149, 271, 1, 0, 0, 0, 1150, 1151, 3, 62, 23, 0, 1151, 1152, 1, 0, 0, 0, 1152, 1153, 6, 128, 11, 0, 1153, 273, 1, 0, 0, 0, 1154, 1155, 3, 78, 31, 0, 1155, 1156, 1, 0, 0, 0, 1156, 1157, 6, 129, 14, 0, 1157, 1158, 6, 129, 15, 0, 1158, 1159, 6, 129, 15, 0, 1159, 275, 1, 0, 0, 0, 1160, 1161, 3, 112, 48, 0, 1161, 1162, 1, 0, 0, 0, 1162, 1163, 6, 130, 19, 0, 1163, 277, 1, 0, 0, 0, 1164, 1165, 3, 116, 50, 0, 1165, 1166, 1, 0, 0, 0, 1166, 1167, 6, 131, 18, 0, 1167, 279, 1, 0, 0, 0, 1168, 1169, 3, 120, 52, 0, 1169, 1170, 1, 0, 0, 0, 1170, 1171, 6, 132, 22, 0, 1171, 281, 1, 0, 0, 0, 1172, 1173, 3, 260, 122, 0, 1173, 1174, 1, 0, 0, 0, 1174, 1175, 6, 133, 27, 0, 1175, 283, 1, 0, 0, 0, 1176, 1177, 3, 228, 106, 0, 1177, 1178, 1, 0, 0, 0, 1178, 1179, 6, 134, 23, 0, 1179, 285, 1, 0, 0, 0, 1180, 1181, 3, 186, 85, 0, 1181, 1182, 1, 0, 0, 0, 1182, 1183, 6, 135, 28, 0, 1183, 287, 1, 0, 0, 0, 1184, 1185, 3, 58, 21, 0, 1185, 1186, 1, 0, 0, 0, 1186, 1187, 6, 136, 11, 0, 1187, 289, 1, 0, 0, 0, 1188, 1189, 3, 60, 22, 0, 1189, 1190, 1, 0, 0, 0, 1190, 1191, 6, 137, 11, 0, 1191, 291, 1, 0, 0, 0, 1192, 1193, 3, 62, 23, 0, 1193, 1194, 1, 0, 0, 0, 1194, 1195, 6, 138, 11, 0, 1195, 293, 1, 0, 0, 0, 1196, 1197, 3, 78, 31, 0, 1197, 1198, 1, 0, 0, 0, 1198, 1199, 6, 139, 14, 0, 1199, 1200, 6, 139, 15, 0, 1200, 295, 1, 0, 0, 0, 1201, 1202, 3, 364, 174, 0, 1202, 1203, 1, 0, 0, 0, 1203, 1204, 6, 140, 17, 0, 1204, 297, 1, 0, 0, 0, 1205, 1206, 3, 116, 50, 0, 1206, 1207, 1, 0, 0, 0, 1207, 1208, 6, 141, 18, 0, 1208, 299, 1, 0, 0, 0, 1209, 1210, 3, 120, 52, 0, 1210, 1211, 1, 0, 0, 0, 1211, 1212, 6, 142, 22, 0, 1212, 301, 1, 0, 0, 0, 1213, 1214, 3, 258, 121, 0, 1214, 1215, 1, 0, 0, 0, 1215, 1216, 6, 143, 29, 0, 1216, 1217, 6, 143, 30, 0, 1217, 303, 1, 0, 0, 0, 1218, 1219, 3, 66, 25, 0, 1219, 1220, 1, 0, 0, 0, 1220, 1221, 6, 144, 20, 0, 1221, 305, 1, 0, 0, 0, 1222, 1223, 3, 100, 42, 0, 1223, 1224, 1, 0, 0, 0, 1224, 1225, 6, 145, 21, 0, 1225, 307, 1, 0, 0, 0, 1226, 1227, 3, 58, 21, 0, 1227, 1228, 1, 0, 0, 0, 1228, 1229, 6, 146, 11, 0, 1229, 309, 1, 0, 0, 0, 1230, 1231, 3, 60, 22, 0, 1231, 1232, 1, 0, 0, 0, 1232, 1233, 6, 147, 11, 0, 1233, 311, 1, 0, 0, 0, 1234, 1235, 3, 62, 23, 0, 1235, 1236, 1, 0, 0, 0, 1236, 1237, 6, 148, 11, 0, 1237, 313, 1, 0, 0, 0, 1238, 1239, 3, 78, 31, 0, 1239, 1240, 1, 0, 0, 0, 1240, 1241, 6, 149, 14, 0, 1241, 1242, 6, 149, 15, 0, 1242, 1243, 6, 149, 15, 0, 1243, 315, 1, 0, 0, 0, 1244, 1245, 3, 116, 50, 0, 1245, 1246, 1, 0, 0, 0, 1246, 1247, 6, 150, 18, 0, 1247, 317, 1, 0, 0, 0, 1248, 1249, 3, 120, 52, 0, 1249, 1250, 1, 0, 0, 0, 1250, 1251, 6, 151, 22, 0, 1251, 319, 1, 0, 0, 0, 1252, 1253, 3, 228, 106, 0, 1253, 1254, 1, 0, 0, 0, 1254, 1255, 6, 152, 23, 0, 1255, 321, 1, 0, 0, 0, 1256, 1257, 3, 58, 21, 0, 1257, 1258, 1, 0, 0, 0, 1258, 1259, 6, 153, 11, 0, 1259, 323, 1, 0, 0, 0, 1260, 1261, 3, 60, 22, 0, 1261, 1262, 1, 0, 0, 0, 1262, 1263, 6, 154, 11, 0, 1263, 325, 1, 0, 0, 0, 1264, 1265, 3, 62, 23, 0, 1265, 1266, 1, 0, 0, 0, 1266, 1267, 6, 155, 11, 0, 1267, 327, 1, 0, 0, 0, 1268, 1269, 3, 78, 31, 0, 1269, 1270, 1, 0, 0, 0, 1270, 1271, 6, 156, 14, 0, 1271, 1272, 6, 156, 15, 0, 1272, 329, 1, 0, 0, 0, 1273, 1274, 3, 120, 52, 0, 1274, 1275, 1, 0, 0, 0, 1275, 1276, 6, 157, 22, 0, 1276, 331, 1, 0, 0, 0, 1277, 1278, 3, 186, 85, 0, 1278, 1279, 1, 0, 0, 0, 1279, 1280, 6, 158, 28, 0, 1280, 333, 1, 0, 0, 0, 1281, 1282, 3, 182, 83, 0, 1282, 1283, 1, 0, 0, 0, 1283, 1284, 6, 159, 31, 0, 1284, 335, 1, 0, 0, 0, 1285, 1286, 3, 58, 21, 0, 1286, 1287, 1, 0, 0, 0, 1287, 1288, 6, 160, 11, 0, 1288, 337, 1, 0, 0, 0, 1289, 1290, 3, 60, 22, 0, 1290, 1291, 1, 0, 0, 0, 1291, 1292, 6, 161, 11, 0, 1292, 339, 1, 0, 0, 0, 1293, 1294, 3, 62, 23, 0, 1294, 1295, 1, 0, 0, 0, 1295, 1296, 6, 162, 11, 0, 1296, 341, 1, 0, 0, 0, 1297, 1298, 3, 78, 31, 0, 1298, 1299, 1, 0, 0, 0, 1299, 1300, 6, 163, 14, 0, 1300, 1301, 6, 163, 15, 0, 1301, 343, 1, 0, 0, 0, 1302, 1303, 5, 105, 0, 0, 1303, 1304, 5, 110, 0, 0, 1304, 1305, 5, 102, 0, 0, 1305, 1306, 5, 111, 0, 0, 1306, 345, 1, 0, 0, 0, 1307, 1308, 3, 58, 21, 0, 1308, 1309, 1, 0, 0, 0, 1309, 1310, 6, 165, 11, 0, 1310, 347, 1, 0, 0, 0, 1311, 1312, 3, 60, 22, 0, 1312, 1313, 1, 0, 0, 0, 1313, 1314, 6, 166, 11, 0, 1314, 349, 1, 0, 0, 0, 1315, 1316, 3, 62, 23, 0, 1316, 1317, 1, 0, 0, 0, 1317, 1318, 6, 167, 11, 0, 1318, 351, 1, 0, 0, 0, 1319, 1320, 3, 78, 31, 0, 1320, 1321, 1, 0, 0, 0, 1321, 1322, 6, 168, 14, 0, 1322, 1323, 6, 168, 15, 0, 1323, 353, 1, 0, 0, 0, 1324, 1325, 5, 102, 0, 0, 1325, 1326, 5, 117, 0, 0, 1326, 1327, 5, 110, 0, 0, 1327, 1328, 5, 99, 0, 0, 1328, 1329, 5, 116, 0, 0, 1329, 1330, 5, 105, 0, 0, 1330, 1331, 5, 111, 0, 0, 1331, 1332, 5, 110, 0, 0, 1332, 1333, 5, 115, 0, 0, 1333, 355, 1, 0, 0, 0, 1334, 1335, 3, 58, 21, 0, 1335, 1336, 1, 0, 0, 0, 1336, 1337, 6, 170, 11, 0, 1337, 357, 1, 0, 0, 0, 1338, 1339, 3, 60, 22, 0, 1339, 1340, 1, 0, 0, 0, 1340, 1341, 6, 171, 11, 0, 1341, 359, 1, 0, 0, 0, 1342, 1343, 3, 62, 23, 0, 1343, 1344, 1, 0, 0, 0, 1344, 1345, 6, 172, 11, 0, 1345, 361, 1, 0, 0, 0, 1346, 1347, 3, 180, 82, 0, 1347, 1348, 1, 0, 0, 0, 1348, 1349, 6, 173, 16, 0, 1349, 1350, 6, 173, 15, 0, 1350, 363, 1, 0, 0, 0, 1351, 1352, 5, 58, 0, 0, 1352, 365, 1, 0, 0, 0, 1353, 1359, 3, 90, 37, 0, 1354, 1359, 3, 80, 32, 0, 1355, 1359, 3, 120, 52, 0, 1356, 1359, 3, 82, 33, 0, 1357, 1359, 3, 96, 40, 0, 1358, 1353, 1, 0, 0, 0, 1358, 1354, 1, 0, 0, 0, 1358, 1355, 1, 0, 0, 0, 1358, 1356, 1, 0, 0, 0, 1358, 1357, 1, 0, 0, 0, 1359, 1360, 1, 0, 0, 0, 1360, 1358, 1, 0, 0, 0, 1360, 1361, 1, 0, 0, 0, 1361, 367, 1, 0, 0, 0, 1362, 1363, 3, 58, 21, 0, 1363, 1364, 1, 0, 0, 0, 1364, 1365, 6, 176, 11, 0, 1365, 369, 1, 0, 0, 0, 1366, 1367, 3, 60, 22, 0, 1367, 1368, 1, 0, 0, 0, 1368, 1369, 6, 177, 11, 0, 1369, 371, 1, 0, 0, 0, 1370, 1371, 3, 62, 23, 0, 1371, 1372, 1, 0, 0, 0, 1372, 1373, 6, 178, 11, 0, 1373, 373, 1, 0, 0, 0, 1374, 1375, 3, 78, 31, 0, 1375, 1376, 1, 0, 0, 0, 1376, 1377, 6, 179, 14, 0, 1377, 1378, 6, 179, 15, 0, 1378, 375, 1, 0, 0, 0, 1379, 1380, 3, 66, 25, 0, 1380, 1381, 1, 0, 0, 0, 1381, 1382, 6, 180, 20, 0, 1382, 1383, 6, 180, 15, 0, 1383, 1384, 6, 180, 32, 0, 1384, 377, 1, 0, 0, 0, 1385, 1386, 3, 100, 42, 0, 1386, 1387, 1, 0, 0, 0, 1387, 1388, 6, 181, 21, 0, 1388, 1389, 6, 181, 15, 0, 1389, 1390, 6, 181, 32, 0, 1390, 379, 1, 0, 0, 0, 1391, 1392, 3, 58, 21, 0, 1392, 1393, 1, 0, 0, 0, 1393, 1394, 6, 182, 11, 0, 1394, 381, 1, 0, 0, 0, 1395, 1396, 3, 60, 22, 0, 1396, 1397, 1, 0, 0, 0, 1397, 1398, 6, 183, 11, 0, 1398, 383, 1, 0, 0, 0, 1399, 1400, 3, 62, 23, 0, 1400, 1401, 1, 0, 0, 0, 1401, 1402, 6, 184, 11, 0, 1402, 385, 1, 0, 0, 0, 1403, 1404, 3, 364, 174, 0, 1404, 1405, 1, 0, 0, 0, 1405, 1406, 6, 185, 17, 0, 1406, 1407, 6, 185, 15, 0, 1407, 1408, 6, 185, 7, 0, 1408, 387, 1, 0, 0, 0, 1409, 1410, 3, 116, 50, 0, 1410, 1411, 1, 0, 0, 0, 1411, 1412, 6, 186, 18, 0, 1412, 1413, 6, 186, 15, 0, 1413, 1414, 6, 186, 7, 0, 1414, 389, 1, 0, 0, 0, 1415, 1416, 3, 58, 21, 0, 1416, 1417, 1, 0, 0, 0, 1417, 1418, 6, 187, 11, 0, 1418, 391, 1, 0, 0, 0, 1419, 1420, 3, 60, 22, 0, 1420, 1421, 1, 0, 0, 0, 1421, 1422, 6, 188, 11, 0, 1422, 393, 1, 0, 0, 0, 1423, 1424, 3, 62, 23, 0, 1424, 1425, 1, 0, 0, 0, 1425, 1426, 6, 189, 11, 0, 1426, 395, 1, 0, 0, 0, 1427, 1428, 3, 186, 85, 0, 1428, 1429, 1, 0, 0, 0, 1429, 1430, 6, 190, 15, 0, 1430, 1431, 6, 190, 0, 0, 1431, 1432, 6, 190, 28, 0, 1432, 397, 1, 0, 0, 0, 1433, 1434, 3, 182, 83, 0, 1434, 1435, 1, 0, 0, 0, 1435, 1436, 6, 191, 15, 0, 1436, 1437, 6, 191, 0, 0, 1437, 1438, 6, 191, 31, 0, 1438, 399, 1, 0, 0, 0, 1439, 1440, 3, 106, 45, 0, 1440, 1441, 1, 0, 0, 0, 1441, 1442, 6, 192, 15, 0, 1442, 1443, 6, 192, 0, 0, 1443, 1444, 6, 192, 33, 0, 1444, 401, 1, 0, 0, 0, 1445, 1446, 3, 78, 31, 0, 1446, 1447, 1, 0, 0, 0, 1447, 1448, 6, 193, 14, 0, 1448, 1449, 6, 193, 15, 0, 1449, 403, 1, 0, 0, 0, 65, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 576, 586, 590, 593, 602, 604, 615, 622, 627, 666, 671, 680, 687, 692, 694, 705, 713, 716, 718, 723, 728, 734, 741, 746, 752, 755, 763, 767, 891, 898, 900, 916, 921, 926, 928, 934, 1023, 1027, 1032, 1037, 1042, 1044, 1048, 1050, 1127, 1131, 1136, 1358, 1360, 34, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 8, 0, 5, 12, 0, 5, 14, 0, 5, 10, 0, 5, 5, 0, 5, 11, 0, 0, 1, 0, 7, 69, 0, 5, 0, 0, 7, 29, 0, 4, 0, 0, 7, 70, 0, 7, 114, 0, 7, 38, 0, 7, 36, 0, 7, 25, 0, 7, 30, 0, 7, 40, 0, 7, 80, 0, 5, 13, 0, 5, 7, 0, 7, 90, 0, 7, 89, 0, 7, 72, 0, 7, 88, 0, 5, 9, 0, 7, 71, 0, 5, 15, 0, 7, 33, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index 1511be73d40e1..de837d1764791 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -20,29 +20,28 @@ public class EsqlBaseLexer extends Lexer { DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, INLINESTATS=8, KEEP=9, LIMIT=10, LOOKUP=11, META=12, METRICS=13, MV_EXPAND=14, RENAME=15, ROW=16, SHOW=17, SORT=18, STATS=19, WHERE=20, UNKNOWN_CMD=21, LINE_COMMENT=22, - MULTILINE_COMMENT=23, WS=24, INDEX_UNQUOTED_IDENTIFIER=25, EXPLAIN_WS=26, - EXPLAIN_LINE_COMMENT=27, EXPLAIN_MULTILINE_COMMENT=28, PIPE=29, QUOTED_STRING=30, - INTEGER_LITERAL=31, DECIMAL_LITERAL=32, BY=33, AND=34, ASC=35, ASSIGN=36, - CAST_OP=37, COMMA=38, DESC=39, DOT=40, FALSE=41, FIRST=42, LAST=43, LP=44, - IN=45, IS=46, LIKE=47, NOT=48, NULL=49, NULLS=50, OR=51, PARAM=52, RLIKE=53, - RP=54, TRUE=55, EQ=56, CIEQ=57, NEQ=58, LT=59, LTE=60, GT=61, GTE=62, - PLUS=63, MINUS=64, ASTERISK=65, SLASH=66, PERCENT=67, NAMED_OR_POSITIONAL_PARAM=68, - OPENING_BRACKET=69, CLOSING_BRACKET=70, UNQUOTED_IDENTIFIER=71, QUOTED_IDENTIFIER=72, - EXPR_LINE_COMMENT=73, EXPR_MULTILINE_COMMENT=74, EXPR_WS=75, METADATA=76, - FROM_LINE_COMMENT=77, FROM_MULTILINE_COMMENT=78, FROM_WS=79, ID_PATTERN=80, - PROJECT_LINE_COMMENT=81, PROJECT_MULTILINE_COMMENT=82, PROJECT_WS=83, - AS=84, RENAME_LINE_COMMENT=85, RENAME_MULTILINE_COMMENT=86, RENAME_WS=87, - ON=88, WITH=89, ENRICH_POLICY_NAME=90, ENRICH_LINE_COMMENT=91, ENRICH_MULTILINE_COMMENT=92, - ENRICH_WS=93, ENRICH_FIELD_LINE_COMMENT=94, ENRICH_FIELD_MULTILINE_COMMENT=95, - ENRICH_FIELD_WS=96, LOOKUP_LINE_COMMENT=97, LOOKUP_MULTILINE_COMMENT=98, - LOOKUP_WS=99, LOOKUP_FIELD_LINE_COMMENT=100, LOOKUP_FIELD_MULTILINE_COMMENT=101, - LOOKUP_FIELD_WS=102, MVEXPAND_LINE_COMMENT=103, MVEXPAND_MULTILINE_COMMENT=104, - MVEXPAND_WS=105, INFO=106, SHOW_LINE_COMMENT=107, SHOW_MULTILINE_COMMENT=108, - SHOW_WS=109, FUNCTIONS=110, META_LINE_COMMENT=111, META_MULTILINE_COMMENT=112, - META_WS=113, COLON=114, SETTING=115, SETTING_LINE_COMMENT=116, SETTTING_MULTILINE_COMMENT=117, - SETTING_WS=118, METRICS_LINE_COMMENT=119, METRICS_MULTILINE_COMMENT=120, - METRICS_WS=121, CLOSING_METRICS_LINE_COMMENT=122, CLOSING_METRICS_MULTILINE_COMMENT=123, - CLOSING_METRICS_WS=124; + MULTILINE_COMMENT=23, WS=24, UNQUOTED_SOURCE=25, EXPLAIN_WS=26, EXPLAIN_LINE_COMMENT=27, + EXPLAIN_MULTILINE_COMMENT=28, PIPE=29, QUOTED_STRING=30, INTEGER_LITERAL=31, + DECIMAL_LITERAL=32, BY=33, AND=34, ASC=35, ASSIGN=36, CAST_OP=37, COMMA=38, + DESC=39, DOT=40, FALSE=41, FIRST=42, LAST=43, LP=44, IN=45, IS=46, LIKE=47, + NOT=48, NULL=49, NULLS=50, OR=51, PARAM=52, RLIKE=53, RP=54, TRUE=55, + EQ=56, CIEQ=57, NEQ=58, LT=59, LTE=60, GT=61, GTE=62, PLUS=63, MINUS=64, + ASTERISK=65, SLASH=66, PERCENT=67, NAMED_OR_POSITIONAL_PARAM=68, OPENING_BRACKET=69, + CLOSING_BRACKET=70, UNQUOTED_IDENTIFIER=71, QUOTED_IDENTIFIER=72, EXPR_LINE_COMMENT=73, + EXPR_MULTILINE_COMMENT=74, EXPR_WS=75, METADATA=76, FROM_LINE_COMMENT=77, + FROM_MULTILINE_COMMENT=78, FROM_WS=79, ID_PATTERN=80, PROJECT_LINE_COMMENT=81, + PROJECT_MULTILINE_COMMENT=82, PROJECT_WS=83, AS=84, RENAME_LINE_COMMENT=85, + RENAME_MULTILINE_COMMENT=86, RENAME_WS=87, ON=88, WITH=89, ENRICH_POLICY_NAME=90, + ENRICH_LINE_COMMENT=91, ENRICH_MULTILINE_COMMENT=92, ENRICH_WS=93, ENRICH_FIELD_LINE_COMMENT=94, + ENRICH_FIELD_MULTILINE_COMMENT=95, ENRICH_FIELD_WS=96, LOOKUP_LINE_COMMENT=97, + LOOKUP_MULTILINE_COMMENT=98, LOOKUP_WS=99, LOOKUP_FIELD_LINE_COMMENT=100, + LOOKUP_FIELD_MULTILINE_COMMENT=101, LOOKUP_FIELD_WS=102, MVEXPAND_LINE_COMMENT=103, + MVEXPAND_MULTILINE_COMMENT=104, MVEXPAND_WS=105, INFO=106, SHOW_LINE_COMMENT=107, + SHOW_MULTILINE_COMMENT=108, SHOW_WS=109, FUNCTIONS=110, META_LINE_COMMENT=111, + META_MULTILINE_COMMENT=112, META_WS=113, COLON=114, SETTING=115, SETTING_LINE_COMMENT=116, + SETTTING_MULTILINE_COMMENT=117, SETTING_WS=118, METRICS_LINE_COMMENT=119, + METRICS_MULTILINE_COMMENT=120, METRICS_WS=121, CLOSING_METRICS_LINE_COMMENT=122, + CLOSING_METRICS_MULTILINE_COMMENT=123, CLOSING_METRICS_WS=124; public static final int EXPLAIN_MODE=1, EXPRESSION_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, ENRICH_MODE=6, ENRICH_FIELD_MODE=7, LOOKUP_MODE=8, LOOKUP_FIELD_MODE=9, @@ -64,7 +63,7 @@ private static String[] makeRuleNames() { "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "INLINESTATS", "KEEP", "LIMIT", "LOOKUP", "META", "METRICS", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", - "MULTILINE_COMMENT", "WS", "INDEX_UNQUOTED_IDENTIFIER_PART", "INDEX_UNQUOTED_IDENTIFIER", + "MULTILINE_COMMENT", "WS", "UNQUOTED_SOURCE_PART", "UNQUOTED_SOURCE", "EXPLAIN_OPENING_BRACKET", "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "PIPE", "DIGIT", "LETTER", "ESCAPE_SEQUENCE", "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", @@ -76,19 +75,19 @@ private static String[] makeRuleNames() { "PERCENT", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "FROM_PIPE", "FROM_OPENING_BRACKET", - "FROM_CLOSING_BRACKET", "FROM_COMMA", "FROM_ASSIGN", "FROM_QUOTED_STRING", - "METADATA", "FROM_INDEX_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_CLOSING_BRACKET", "FROM_COLON", "FROM_COMMA", "FROM_ASSIGN", "METADATA", + "FROM_UNQUOTED_SOURCE", "FROM_QUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", "UNQUOTED_ID_BODY_WITH_PATTERN", "UNQUOTED_ID_PATTERN", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", "AS", "RENAME_ID_PATTERN", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ENRICH_PIPE", "ENRICH_OPENING_BRACKET", "ON", "WITH", "ENRICH_POLICY_NAME_BODY", - "ENRICH_POLICY_NAME", "ENRICH_QUOTED_IDENTIFIER", "ENRICH_MODE_UNQUOTED_VALUE", - "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_PIPE", - "ENRICH_FIELD_ASSIGN", "ENRICH_FIELD_COMMA", "ENRICH_FIELD_DOT", "ENRICH_FIELD_WITH", - "ENRICH_FIELD_ID_PATTERN", "ENRICH_FIELD_QUOTED_IDENTIFIER", "ENRICH_FIELD_LINE_COMMENT", - "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "LOOKUP_PIPE", "LOOKUP_COMMA", - "LOOKUP_DOT", "LOOKUP_ON", "LOOKUP_INDEX_UNQUOTED_IDENTIFIER", "LOOKUP_LINE_COMMENT", + "ENRICH_POLICY_NAME", "ENRICH_MODE_UNQUOTED_VALUE", "ENRICH_LINE_COMMENT", + "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_PIPE", "ENRICH_FIELD_ASSIGN", + "ENRICH_FIELD_COMMA", "ENRICH_FIELD_DOT", "ENRICH_FIELD_WITH", "ENRICH_FIELD_ID_PATTERN", + "ENRICH_FIELD_QUOTED_IDENTIFIER", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", + "ENRICH_FIELD_WS", "LOOKUP_PIPE", "LOOKUP_COLON", "LOOKUP_COMMA", "LOOKUP_DOT", + "LOOKUP_ON", "LOOKUP_UNQUOTED_SOURCE", "LOOKUP_QUOTED_SOURCE", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", "LOOKUP_FIELD_PIPE", "LOOKUP_FIELD_COMMA", "LOOKUP_FIELD_DOT", "LOOKUP_FIELD_ID_PATTERN", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", "MVEXPAND_PIPE", @@ -97,11 +96,11 @@ private static String[] makeRuleNames() { "SHOW_PIPE", "INFO", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", "SHOW_WS", "META_PIPE", "FUNCTIONS", "META_LINE_COMMENT", "META_MULTILINE_COMMENT", "META_WS", "SETTING_CLOSING_BRACKET", "COLON", "SETTING", "SETTING_LINE_COMMENT", - "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "METRICS_PIPE", "METRICS_INDEX_UNQUOTED_IDENTIFIER", - "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_COMMA", - "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", - "CLOSING_METRICS_WS", "CLOSING_METRICS_QUOTED_IDENTIFIER", "CLOSING_METRICS_UNQUOTED_IDENTIFIER", - "CLOSING_METRICS_BY", "CLOSING_METRICS_PIPE" + "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "METRICS_PIPE", "METRICS_UNQUOTED_SOURCE", + "METRICS_QUOTED_SOURCE", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", + "METRICS_WS", "CLOSING_METRICS_COLON", "CLOSING_METRICS_COMMA", "CLOSING_METRICS_LINE_COMMENT", + "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS", "CLOSING_METRICS_QUOTED_IDENTIFIER", + "CLOSING_METRICS_UNQUOTED_IDENTIFIER", "CLOSING_METRICS_BY", "CLOSING_METRICS_PIPE" }; } public static final String[] ruleNames = makeRuleNames(); @@ -129,11 +128,11 @@ private static String[] makeSymbolicNames() { null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "INLINESTATS", "KEEP", "LIMIT", "LOOKUP", "META", "METRICS", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", - "MULTILINE_COMMENT", "WS", "INDEX_UNQUOTED_IDENTIFIER", "EXPLAIN_WS", - "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "PIPE", "QUOTED_STRING", - "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", - "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", "IN", "IS", "LIKE", - "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", + "MULTILINE_COMMENT", "WS", "UNQUOTED_SOURCE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", + "EXPLAIN_MULTILINE_COMMENT", "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", + "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", + "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", "IN", "IS", "LIKE", "NOT", + "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", @@ -213,7 +212,7 @@ public EsqlBaseLexer(CharStream input) { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\u0004\u0000|\u058e\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ + "\u0004\u0000|\u05aa\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ @@ -267,857 +266,876 @@ public EsqlBaseLexer(CharStream input) { "\u0002\u00b4\u0007\u00b4\u0002\u00b5\u0007\u00b5\u0002\u00b6\u0007\u00b6"+ "\u0002\u00b7\u0007\u00b7\u0002\u00b8\u0007\u00b8\u0002\u00b9\u0007\u00b9"+ "\u0002\u00ba\u0007\u00ba\u0002\u00bb\u0007\u00bb\u0002\u00bc\u0007\u00bc"+ - "\u0002\u00bd\u0007\u00bd\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ + "\u0002\u00bd\u0007\u00bd\u0002\u00be\u0007\u00be\u0002\u00bf\u0007\u00bf"+ + "\u0002\u00c0\u0007\u00c0\u0002\u00c1\u0007\u00c1\u0001\u0000\u0001\u0000"+ "\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ - "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004"+ - "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+ - "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006"+ - "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007"+ - "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ + "\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002"+ + "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ + "\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+ + "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ + "\u0001\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ - "\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001"+ - "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001"+ - "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b"+ - "\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ - "\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ - "\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ - "\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e"+ + "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001"+ + "\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ + "\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ + "\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ + "\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\u000e"+ "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e"+ - "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f"+ - "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ - "\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ - "\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012"+ - "\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013"+ + "\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f"+ + "\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ + "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011"+ + "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012"+ + "\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012"+ "\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013"+ - "\u0001\u0014\u0004\u0014\u0237\b\u0014\u000b\u0014\f\u0014\u0238\u0001"+ - "\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0005"+ - "\u0015\u0241\b\u0015\n\u0015\f\u0015\u0244\t\u0015\u0001\u0015\u0003\u0015"+ - "\u0247\b\u0015\u0001\u0015\u0003\u0015\u024a\b\u0015\u0001\u0015\u0001"+ - "\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0005"+ - "\u0016\u0253\b\u0016\n\u0016\f\u0016\u0256\t\u0016\u0001\u0016\u0001\u0016"+ - "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0004\u0017\u025e\b\u0017"+ - "\u000b\u0017\f\u0017\u025f\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018"+ - "\u0001\u0018\u0003\u0018\u0267\b\u0018\u0001\u0019\u0004\u0019\u026a\b"+ - "\u0019\u000b\u0019\f\u0019\u026b\u0001\u001a\u0001\u001a\u0001\u001a\u0001"+ - "\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001b\u0001"+ - "\u001b\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001d\u0001"+ - "\u001d\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001\u001e\u0001"+ - "\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001"+ - "!\u0001!\u0001\"\u0001\"\u0001\"\u0001#\u0001#\u0001$\u0001$\u0003$\u0293"+ - "\b$\u0001$\u0004$\u0296\b$\u000b$\f$\u0297\u0001%\u0001%\u0001&\u0001"+ - "&\u0001\'\u0001\'\u0001\'\u0003\'\u02a1\b\'\u0001(\u0001(\u0001)\u0001"+ - ")\u0001)\u0003)\u02a8\b)\u0001*\u0001*\u0001*\u0005*\u02ad\b*\n*\f*\u02b0"+ - "\t*\u0001*\u0001*\u0001*\u0001*\u0001*\u0001*\u0005*\u02b8\b*\n*\f*\u02bb"+ - "\t*\u0001*\u0001*\u0001*\u0001*\u0001*\u0003*\u02c2\b*\u0001*\u0003*\u02c5"+ - "\b*\u0003*\u02c7\b*\u0001+\u0004+\u02ca\b+\u000b+\f+\u02cb\u0001,\u0004"+ - ",\u02cf\b,\u000b,\f,\u02d0\u0001,\u0001,\u0005,\u02d5\b,\n,\f,\u02d8\t"+ - ",\u0001,\u0001,\u0004,\u02dc\b,\u000b,\f,\u02dd\u0001,\u0004,\u02e1\b"+ - ",\u000b,\f,\u02e2\u0001,\u0001,\u0005,\u02e7\b,\n,\f,\u02ea\t,\u0003,"+ - "\u02ec\b,\u0001,\u0001,\u0001,\u0001,\u0004,\u02f2\b,\u000b,\f,\u02f3"+ - "\u0001,\u0001,\u0003,\u02f8\b,\u0001-\u0001-\u0001-\u0001.\u0001.\u0001"+ - ".\u0001.\u0001/\u0001/\u0001/\u0001/\u00010\u00010\u00011\u00011\u0001"+ - "1\u00012\u00012\u00013\u00013\u00013\u00013\u00013\u00014\u00014\u0001"+ - "5\u00015\u00015\u00015\u00015\u00015\u00016\u00016\u00016\u00016\u0001"+ - "6\u00016\u00017\u00017\u00017\u00017\u00017\u00018\u00018\u00019\u0001"+ - "9\u00019\u0001:\u0001:\u0001:\u0001;\u0001;\u0001;\u0001;\u0001;\u0001"+ - "<\u0001<\u0001<\u0001<\u0001=\u0001=\u0001=\u0001=\u0001=\u0001>\u0001"+ - ">\u0001>\u0001>\u0001>\u0001>\u0001?\u0001?\u0001?\u0001@\u0001@\u0001"+ - "A\u0001A\u0001A\u0001A\u0001A\u0001A\u0001B\u0001B\u0001C\u0001C\u0001"+ - "C\u0001C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001E\u0001E\u0001F\u0001"+ - "F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001H\u0001I\u0001I\u0001J\u0001"+ - "J\u0001J\u0001K\u0001K\u0001L\u0001L\u0001M\u0001M\u0001N\u0001N\u0001"+ - "O\u0001O\u0001P\u0001P\u0001P\u0005P\u0372\bP\nP\fP\u0375\tP\u0001P\u0001"+ - "P\u0004P\u0379\bP\u000bP\fP\u037a\u0003P\u037d\bP\u0001Q\u0001Q\u0001"+ - "Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0005"+ - "S\u038b\bS\nS\fS\u038e\tS\u0001S\u0001S\u0003S\u0392\bS\u0001S\u0004S"+ - "\u0395\bS\u000bS\fS\u0396\u0003S\u0399\bS\u0001T\u0001T\u0004T\u039d\b"+ - "T\u000bT\fT\u039e\u0001T\u0001T\u0001U\u0001U\u0001V\u0001V\u0001V\u0001"+ - "V\u0001W\u0001W\u0001W\u0001W\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001"+ - "Y\u0001Y\u0001Y\u0001Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001"+ - "[\u0001[\u0001\\\u0001\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001"+ - "^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001_\u0001_\u0001"+ - "_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001a\u0001a\u0001a\u0001"+ - "a\u0001b\u0001b\u0001b\u0001b\u0001c\u0001c\u0001c\u0001c\u0001d\u0001"+ - "d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001e\u0001e\u0001f\u0001f\u0001"+ - "f\u0001f\u0001g\u0001g\u0001g\u0001g\u0003g\u03f4\bg\u0001h\u0001h\u0003"+ - "h\u03f8\bh\u0001h\u0005h\u03fb\bh\nh\fh\u03fe\th\u0001h\u0001h\u0003h"+ - "\u0402\bh\u0001h\u0004h\u0405\bh\u000bh\fh\u0406\u0003h\u0409\bh\u0001"+ - "i\u0001i\u0004i\u040d\bi\u000bi\fi\u040e\u0001j\u0001j\u0001j\u0001j\u0001"+ - "k\u0001k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001"+ - "m\u0001m\u0001m\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001"+ - "o\u0001p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001r\u0001r\u0001"+ - "r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001t\u0001t\u0001t\u0001t\u0001"+ - "u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001v\u0001v\u0001w\u0001"+ - "w\u0001w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001x\u0001x\u0001y\u0001"+ - "y\u0001y\u0001y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001{\u0004{\u045a"+ - "\b{\u000b{\f{\u045b\u0001{\u0001{\u0003{\u0460\b{\u0001{\u0004{\u0463"+ - "\b{\u000b{\f{\u0464\u0001|\u0001|\u0001|\u0001|\u0001}\u0001}\u0001}\u0001"+ - "}\u0001~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f\u0001"+ - "\u007f\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081\u0001"+ - "\u0081\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0082\u0001"+ - "\u0082\u0001\u0082\u0001\u0082\u0001\u0083\u0001\u0083\u0001\u0083\u0001"+ - "\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0085\u0001"+ - "\u0085\u0001\u0085\u0001\u0085\u0001\u0086\u0001\u0086\u0001\u0086\u0001"+ - "\u0086\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0088\u0001"+ - "\u0088\u0001\u0088\u0001\u0088\u0001\u0089\u0001\u0089\u0001\u0089\u0001"+ - "\u0089\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001"+ - "\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008c\u0001\u008c\u0001"+ - "\u008c\u0001\u008c\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001"+ - "\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008f\u0001"+ - "\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090\u0001\u0090\u0001"+ - "\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0092\u0001"+ - "\u0092\u0001\u0092\u0001\u0092\u0001\u0093\u0001\u0093\u0001\u0093\u0001"+ - "\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094\u0001\u0094\u0001"+ - "\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0096\u0001"+ - "\u0096\u0001\u0096\u0001\u0096\u0001\u0097\u0001\u0097\u0001\u0097\u0001"+ - "\u0097\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0099\u0001"+ - "\u0099\u0001\u0099\u0001\u0099\u0001\u009a\u0001\u009a\u0001\u009a\u0001"+ - "\u009a\u0001\u009a\u0001\u009b\u0001\u009b\u0001\u009b\u0001\u009b\u0001"+ - "\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009d\u0001\u009d\u0001"+ - "\u009d\u0001\u009d\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009e\u0001"+ - "\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u00a0\u0001\u00a0\u0001"+ - "\u00a0\u0001\u00a0\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001"+ - "\u00a1\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001"+ - "\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a4\u0001\u00a4\u0001"+ - "\u00a4\u0001\u00a4\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001"+ - "\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a7\u0001"+ - "\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001"+ - "\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001"+ - "\u00a8\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00aa\u0001"+ - "\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001"+ - "\u00ab\u0001\u00ab\u0001\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad\u0001"+ - "\u00ad\u0001\u00ad\u0001\u00ad\u0004\u00ad\u053f\b\u00ad\u000b\u00ad\f"+ - "\u00ad\u0540\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00af"+ - "\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00b0\u0001\u00b0\u0001\u00b0"+ - "\u0001\u00b0\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1"+ - "\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b2"+ - "\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b4\u0001\u00b4"+ - "\u0001\u00b4\u0001\u00b4\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5"+ - "\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b6"+ - "\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b8\u0001\u00b8"+ - "\u0001\u00b8\u0001\u00b8\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00b9"+ - "\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00ba"+ - "\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb"+ - "\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc"+ - "\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0002\u0254"+ - "\u02b9\u0000\u00be\u0010\u0001\u0012\u0002\u0014\u0003\u0016\u0004\u0018"+ - "\u0005\u001a\u0006\u001c\u0007\u001e\b \t\"\n$\u000b&\f(\r*\u000e,\u000f"+ - ".\u00100\u00112\u00124\u00136\u00148\u0015:\u0016<\u0017>\u0018@\u0000"+ - "B\u0019D\u0000F\u0000H\u001aJ\u001bL\u001cN\u001dP\u0000R\u0000T\u0000"+ - "V\u0000X\u0000Z\u0000\\\u0000^\u0000`\u0000b\u0000d\u001ef\u001fh j!l"+ - "\"n#p$r%t&v\'x(z)|*~+\u0080,\u0082-\u0084.\u0086/\u00880\u008a1\u008c"+ - "2\u008e3\u00904\u00925\u00946\u00967\u00988\u009a9\u009c:\u009e;\u00a0"+ - "<\u00a2=\u00a4>\u00a6?\u00a8@\u00aaA\u00acB\u00aeC\u00b0D\u00b2E\u00b4"+ - "F\u00b6G\u00b8\u0000\u00baH\u00bcI\u00beJ\u00c0K\u00c2\u0000\u00c4\u0000"+ - "\u00c6\u0000\u00c8\u0000\u00ca\u0000\u00cc\u0000\u00ceL\u00d0\u0000\u00d2"+ - "M\u00d4N\u00d6O\u00d8\u0000\u00da\u0000\u00dc\u0000\u00de\u0000\u00e0"+ - "\u0000\u00e2P\u00e4Q\u00e6R\u00e8S\u00ea\u0000\u00ec\u0000\u00ee\u0000"+ - "\u00f0\u0000\u00f2T\u00f4\u0000\u00f6U\u00f8V\u00faW\u00fc\u0000\u00fe"+ - "\u0000\u0100X\u0102Y\u0104\u0000\u0106Z\u0108\u0000\u010a\u0000\u010c"+ - "[\u010e\\\u0110]\u0112\u0000\u0114\u0000\u0116\u0000\u0118\u0000\u011a"+ - "\u0000\u011c\u0000\u011e\u0000\u0120^\u0122_\u0124`\u0126\u0000\u0128"+ - "\u0000\u012a\u0000\u012c\u0000\u012e\u0000\u0130a\u0132b\u0134c\u0136"+ - "\u0000\u0138\u0000\u013a\u0000\u013c\u0000\u013ed\u0140e\u0142f\u0144"+ - "\u0000\u0146\u0000\u0148\u0000\u014a\u0000\u014cg\u014eh\u0150i\u0152"+ - "\u0000\u0154j\u0156k\u0158l\u015am\u015c\u0000\u015en\u0160o\u0162p\u0164"+ - "q\u0166\u0000\u0168r\u016as\u016ct\u016eu\u0170v\u0172\u0000\u0174\u0000"+ - "\u0176w\u0178x\u017ay\u017c\u0000\u017ez\u0180{\u0182|\u0184\u0000\u0186"+ - "\u0000\u0188\u0000\u018a\u0000\u0010\u0000\u0001\u0002\u0003\u0004\u0005"+ + "\u0001\u0013\u0001\u0013\u0001\u0014\u0004\u0014\u023f\b\u0014\u000b\u0014"+ + "\f\u0014\u0240\u0001\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0001\u0015"+ + "\u0001\u0015\u0005\u0015\u0249\b\u0015\n\u0015\f\u0015\u024c\t\u0015\u0001"+ + "\u0015\u0003\u0015\u024f\b\u0015\u0001\u0015\u0003\u0015\u0252\b\u0015"+ + "\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016"+ + "\u0001\u0016\u0005\u0016\u025b\b\u0016\n\u0016\f\u0016\u025e\t\u0016\u0001"+ + "\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0004"+ + "\u0017\u0266\b\u0017\u000b\u0017\f\u0017\u0267\u0001\u0017\u0001\u0017"+ + "\u0001\u0018\u0001\u0018\u0001\u0018\u0003\u0018\u026f\b\u0018\u0001\u0019"+ + "\u0004\u0019\u0272\b\u0019\u000b\u0019\f\u0019\u0273\u0001\u001a\u0001"+ + "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001"+ + "\u001b\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001c\u0001"+ + "\u001c\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001e\u0001"+ + "\u001e\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0001"+ + "\u001f\u0001 \u0001 \u0001!\u0001!\u0001\"\u0001\"\u0001\"\u0001#\u0001"+ + "#\u0001$\u0001$\u0003$\u029b\b$\u0001$\u0004$\u029e\b$\u000b$\f$\u029f"+ + "\u0001%\u0001%\u0001&\u0001&\u0001\'\u0001\'\u0001\'\u0003\'\u02a9\b\'"+ + "\u0001(\u0001(\u0001)\u0001)\u0001)\u0003)\u02b0\b)\u0001*\u0001*\u0001"+ + "*\u0005*\u02b5\b*\n*\f*\u02b8\t*\u0001*\u0001*\u0001*\u0001*\u0001*\u0001"+ + "*\u0005*\u02c0\b*\n*\f*\u02c3\t*\u0001*\u0001*\u0001*\u0001*\u0001*\u0003"+ + "*\u02ca\b*\u0001*\u0003*\u02cd\b*\u0003*\u02cf\b*\u0001+\u0004+\u02d2"+ + "\b+\u000b+\f+\u02d3\u0001,\u0004,\u02d7\b,\u000b,\f,\u02d8\u0001,\u0001"+ + ",\u0005,\u02dd\b,\n,\f,\u02e0\t,\u0001,\u0001,\u0004,\u02e4\b,\u000b,"+ + "\f,\u02e5\u0001,\u0004,\u02e9\b,\u000b,\f,\u02ea\u0001,\u0001,\u0005,"+ + "\u02ef\b,\n,\f,\u02f2\t,\u0003,\u02f4\b,\u0001,\u0001,\u0001,\u0001,\u0004"+ + ",\u02fa\b,\u000b,\f,\u02fb\u0001,\u0001,\u0003,\u0300\b,\u0001-\u0001"+ + "-\u0001-\u0001.\u0001.\u0001.\u0001.\u0001/\u0001/\u0001/\u0001/\u0001"+ + "0\u00010\u00011\u00011\u00011\u00012\u00012\u00013\u00013\u00013\u0001"+ + "3\u00013\u00014\u00014\u00015\u00015\u00015\u00015\u00015\u00015\u0001"+ + "6\u00016\u00016\u00016\u00016\u00016\u00017\u00017\u00017\u00017\u0001"+ + "7\u00018\u00018\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001;\u0001"+ + ";\u0001;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001=\u0001=\u0001"+ + "=\u0001=\u0001=\u0001>\u0001>\u0001>\u0001>\u0001>\u0001>\u0001?\u0001"+ + "?\u0001?\u0001@\u0001@\u0001A\u0001A\u0001A\u0001A\u0001A\u0001A\u0001"+ + "B\u0001B\u0001C\u0001C\u0001C\u0001C\u0001C\u0001D\u0001D\u0001D\u0001"+ + "E\u0001E\u0001E\u0001F\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001"+ + "H\u0001I\u0001I\u0001J\u0001J\u0001J\u0001K\u0001K\u0001L\u0001L\u0001"+ + "M\u0001M\u0001N\u0001N\u0001O\u0001O\u0001P\u0001P\u0001P\u0005P\u037a"+ + "\bP\nP\fP\u037d\tP\u0001P\u0001P\u0004P\u0381\bP\u000bP\fP\u0382\u0003"+ + "P\u0385\bP\u0001Q\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001"+ + "R\u0001R\u0001S\u0001S\u0005S\u0393\bS\nS\fS\u0396\tS\u0001S\u0001S\u0003"+ + "S\u039a\bS\u0001S\u0004S\u039d\bS\u000bS\fS\u039e\u0003S\u03a1\bS\u0001"+ + "T\u0001T\u0004T\u03a5\bT\u000bT\fT\u03a6\u0001T\u0001T\u0001U\u0001U\u0001"+ + "V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001W\u0001X\u0001X\u0001"+ + "X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001Y\u0001Z\u0001Z\u0001Z\u0001"+ + "Z\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0001\\\u0001]\u0001"+ + "]\u0001]\u0001]\u0001^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001_\u0001"+ + "_\u0001_\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001"+ + "a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001c\u0001c\u0001"+ + "c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001e\u0001e\u0001"+ + "e\u0001f\u0001f\u0001f\u0001f\u0001g\u0001g\u0001g\u0001g\u0001h\u0001"+ + "h\u0001h\u0001h\u0003h\u0400\bh\u0001i\u0001i\u0003i\u0404\bi\u0001i\u0005"+ + "i\u0407\bi\ni\fi\u040a\ti\u0001i\u0001i\u0003i\u040e\bi\u0001i\u0004i"+ + "\u0411\bi\u000bi\fi\u0412\u0003i\u0415\bi\u0001j\u0001j\u0004j\u0419\b"+ + "j\u000bj\fj\u041a\u0001k\u0001k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001"+ + "l\u0001m\u0001m\u0001m\u0001m\u0001n\u0001n\u0001n\u0001n\u0001n\u0001"+ + "o\u0001o\u0001o\u0001o\u0001p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001"+ + "q\u0001q\u0001r\u0001r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001t\u0001"+ + "t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001"+ + "v\u0001w\u0001w\u0001w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001x\u0001"+ + "x\u0001y\u0001y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001"+ + "z\u0001z\u0001z\u0001{\u0001{\u0001|\u0004|\u0466\b|\u000b|\f|\u0467\u0001"+ + "|\u0001|\u0003|\u046c\b|\u0001|\u0004|\u046f\b|\u000b|\f|\u0470\u0001"+ + "}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f"+ + "\u0001\u007f\u0001\u007f\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080"+ + "\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0081"+ + "\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0083\u0001\u0083"+ + "\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084"+ + "\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0086\u0001\u0086"+ + "\u0001\u0086\u0001\u0086\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0087"+ + "\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0089\u0001\u0089"+ + "\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008a"+ + "\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008c"+ + "\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d\u0001\u008d"+ + "\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008f"+ + "\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090"+ + "\u0001\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091"+ + "\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0093\u0001\u0093"+ + "\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0094"+ + "\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095"+ + "\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097\u0001\u0097"+ + "\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0098"+ + "\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u009a\u0001\u009a"+ + "\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b\u0001\u009b\u0001\u009b"+ + "\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009d"+ + "\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009e\u0001\u009e\u0001\u009e"+ + "\u0001\u009e\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u00a0"+ + "\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a1\u0001\u00a1\u0001\u00a1"+ + "\u0001\u00a1\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a3"+ + "\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a4\u0001\u00a4"+ + "\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a5\u0001\u00a5\u0001\u00a5"+ + "\u0001\u00a5\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a7"+ + "\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a8\u0001\u00a8\u0001\u00a8"+ + "\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9"+ + "\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9"+ + "\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00ab\u0001\u00ab"+ + "\u0001\u00ab\u0001\u00ab\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ac"+ + "\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ae"+ + "\u0001\u00ae\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00af"+ + "\u0004\u00af\u054f\b\u00af\u000b\u00af\f\u00af\u0550\u0001\u00b0\u0001"+ + "\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001"+ + "\u00b1\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b3\u0001"+ + "\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b4\u0001\u00b4\u0001"+ + "\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b5\u0001\u00b5\u0001"+ + "\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b6\u0001\u00b6\u0001"+ + "\u00b6\u0001\u00b6\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001"+ + "\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b9\u0001\u00b9\u0001"+ + "\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00ba\u0001\u00ba\u0001"+ + "\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00bb\u0001\u00bb\u0001"+ + "\u00bb\u0001\u00bb\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001"+ + "\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00be\u0001\u00be\u0001"+ + "\u00be\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00bf\u0001\u00bf\u0001"+ + "\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00c0\u0001\u00c0\u0001"+ + "\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1\u0001\u00c1\u0001"+ + "\u00c1\u0001\u00c1\u0001\u00c1\u0002\u025c\u02c1\u0000\u00c2\u0010\u0001"+ + "\u0012\u0002\u0014\u0003\u0016\u0004\u0018\u0005\u001a\u0006\u001c\u0007"+ + "\u001e\b \t\"\n$\u000b&\f(\r*\u000e,\u000f.\u00100\u00112\u00124\u0013"+ + "6\u00148\u0015:\u0016<\u0017>\u0018@\u0000B\u0019D\u0000F\u0000H\u001a"+ + "J\u001bL\u001cN\u001dP\u0000R\u0000T\u0000V\u0000X\u0000Z\u0000\\\u0000"+ + "^\u0000`\u0000b\u0000d\u001ef\u001fh j!l\"n#p$r%t&v\'x(z)|*~+\u0080,\u0082"+ + "-\u0084.\u0086/\u00880\u008a1\u008c2\u008e3\u00904\u00925\u00946\u0096"+ + "7\u00988\u009a9\u009c:\u009e;\u00a0<\u00a2=\u00a4>\u00a6?\u00a8@\u00aa"+ + "A\u00acB\u00aeC\u00b0D\u00b2E\u00b4F\u00b6G\u00b8\u0000\u00baH\u00bcI"+ + "\u00beJ\u00c0K\u00c2\u0000\u00c4\u0000\u00c6\u0000\u00c8\u0000\u00ca\u0000"+ + "\u00cc\u0000\u00ceL\u00d0\u0000\u00d2\u0000\u00d4M\u00d6N\u00d8O\u00da"+ + "\u0000\u00dc\u0000\u00de\u0000\u00e0\u0000\u00e2\u0000\u00e4P\u00e6Q\u00e8"+ + "R\u00eaS\u00ec\u0000\u00ee\u0000\u00f0\u0000\u00f2\u0000\u00f4T\u00f6"+ + "\u0000\u00f8U\u00faV\u00fcW\u00fe\u0000\u0100\u0000\u0102X\u0104Y\u0106"+ + "\u0000\u0108Z\u010a\u0000\u010c[\u010e\\\u0110]\u0112\u0000\u0114\u0000"+ + "\u0116\u0000\u0118\u0000\u011a\u0000\u011c\u0000\u011e\u0000\u0120^\u0122"+ + "_\u0124`\u0126\u0000\u0128\u0000\u012a\u0000\u012c\u0000\u012e\u0000\u0130"+ + "\u0000\u0132\u0000\u0134a\u0136b\u0138c\u013a\u0000\u013c\u0000\u013e"+ + "\u0000\u0140\u0000\u0142d\u0144e\u0146f\u0148\u0000\u014a\u0000\u014c"+ + "\u0000\u014e\u0000\u0150g\u0152h\u0154i\u0156\u0000\u0158j\u015ak\u015c"+ + "l\u015em\u0160\u0000\u0162n\u0164o\u0166p\u0168q\u016a\u0000\u016cr\u016e"+ + "s\u0170t\u0172u\u0174v\u0176\u0000\u0178\u0000\u017a\u0000\u017cw\u017e"+ + "x\u0180y\u0182\u0000\u0184\u0000\u0186z\u0188{\u018a|\u018c\u0000\u018e"+ + "\u0000\u0190\u0000\u0192\u0000\u0010\u0000\u0001\u0002\u0003\u0004\u0005"+ "\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\r\u0006\u0000\t\n\r\r //[[]"+ - "]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r\r \n\u0000\t\n\r\r ,,//==[["+ - "]]``||\u0002\u0000**//\u0001\u000009\u0002\u0000AZaz\u0005\u0000\"\"\\"+ - "\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002\u0000EEee\u0002\u0000++--\u0001"+ - "\u0000``\u000b\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u05a8\u0000\u0010\u0001"+ - "\u0000\u0000\u0000\u0000\u0012\u0001\u0000\u0000\u0000\u0000\u0014\u0001"+ - "\u0000\u0000\u0000\u0000\u0016\u0001\u0000\u0000\u0000\u0000\u0018\u0001"+ - "\u0000\u0000\u0000\u0000\u001a\u0001\u0000\u0000\u0000\u0000\u001c\u0001"+ - "\u0000\u0000\u0000\u0000\u001e\u0001\u0000\u0000\u0000\u0000 \u0001\u0000"+ - "\u0000\u0000\u0000\"\u0001\u0000\u0000\u0000\u0000$\u0001\u0000\u0000"+ - "\u0000\u0000&\u0001\u0000\u0000\u0000\u0000(\u0001\u0000\u0000\u0000\u0000"+ - "*\u0001\u0000\u0000\u0000\u0000,\u0001\u0000\u0000\u0000\u0000.\u0001"+ - "\u0000\u0000\u0000\u00000\u0001\u0000\u0000\u0000\u00002\u0001\u0000\u0000"+ - "\u0000\u00004\u0001\u0000\u0000\u0000\u00006\u0001\u0000\u0000\u0000\u0000"+ - "8\u0001\u0000\u0000\u0000\u0000:\u0001\u0000\u0000\u0000\u0000<\u0001"+ - "\u0000\u0000\u0000\u0000>\u0001\u0000\u0000\u0000\u0000B\u0001\u0000\u0000"+ - "\u0000\u0001D\u0001\u0000\u0000\u0000\u0001F\u0001\u0000\u0000\u0000\u0001"+ - "H\u0001\u0000\u0000\u0000\u0001J\u0001\u0000\u0000\u0000\u0001L\u0001"+ - "\u0000\u0000\u0000\u0002N\u0001\u0000\u0000\u0000\u0002d\u0001\u0000\u0000"+ - "\u0000\u0002f\u0001\u0000\u0000\u0000\u0002h\u0001\u0000\u0000\u0000\u0002"+ - "j\u0001\u0000\u0000\u0000\u0002l\u0001\u0000\u0000\u0000\u0002n\u0001"+ - "\u0000\u0000\u0000\u0002p\u0001\u0000\u0000\u0000\u0002r\u0001\u0000\u0000"+ - "\u0000\u0002t\u0001\u0000\u0000\u0000\u0002v\u0001\u0000\u0000\u0000\u0002"+ - "x\u0001\u0000\u0000\u0000\u0002z\u0001\u0000\u0000\u0000\u0002|\u0001"+ - "\u0000\u0000\u0000\u0002~\u0001\u0000\u0000\u0000\u0002\u0080\u0001\u0000"+ - "\u0000\u0000\u0002\u0082\u0001\u0000\u0000\u0000\u0002\u0084\u0001\u0000"+ - "\u0000\u0000\u0002\u0086\u0001\u0000\u0000\u0000\u0002\u0088\u0001\u0000"+ - "\u0000\u0000\u0002\u008a\u0001\u0000\u0000\u0000\u0002\u008c\u0001\u0000"+ - "\u0000\u0000\u0002\u008e\u0001\u0000\u0000\u0000\u0002\u0090\u0001\u0000"+ - "\u0000\u0000\u0002\u0092\u0001\u0000\u0000\u0000\u0002\u0094\u0001\u0000"+ - "\u0000\u0000\u0002\u0096\u0001\u0000\u0000\u0000\u0002\u0098\u0001\u0000"+ - "\u0000\u0000\u0002\u009a\u0001\u0000\u0000\u0000\u0002\u009c\u0001\u0000"+ - "\u0000\u0000\u0002\u009e\u0001\u0000\u0000\u0000\u0002\u00a0\u0001\u0000"+ - "\u0000\u0000\u0002\u00a2\u0001\u0000\u0000\u0000\u0002\u00a4\u0001\u0000"+ - "\u0000\u0000\u0002\u00a6\u0001\u0000\u0000\u0000\u0002\u00a8\u0001\u0000"+ - "\u0000\u0000\u0002\u00aa\u0001\u0000\u0000\u0000\u0002\u00ac\u0001\u0000"+ - "\u0000\u0000\u0002\u00ae\u0001\u0000\u0000\u0000\u0002\u00b0\u0001\u0000"+ - "\u0000\u0000\u0002\u00b2\u0001\u0000\u0000\u0000\u0002\u00b4\u0001\u0000"+ - "\u0000\u0000\u0002\u00b6\u0001\u0000\u0000\u0000\u0002\u00ba\u0001\u0000"+ - "\u0000\u0000\u0002\u00bc\u0001\u0000\u0000\u0000\u0002\u00be\u0001\u0000"+ - "\u0000\u0000\u0002\u00c0\u0001\u0000\u0000\u0000\u0003\u00c2\u0001\u0000"+ - "\u0000\u0000\u0003\u00c4\u0001\u0000\u0000\u0000\u0003\u00c6\u0001\u0000"+ - "\u0000\u0000\u0003\u00c8\u0001\u0000\u0000\u0000\u0003\u00ca\u0001\u0000"+ - "\u0000\u0000\u0003\u00cc\u0001\u0000\u0000\u0000\u0003\u00ce\u0001\u0000"+ - "\u0000\u0000\u0003\u00d0\u0001\u0000\u0000\u0000\u0003\u00d2\u0001\u0000"+ - "\u0000\u0000\u0003\u00d4\u0001\u0000\u0000\u0000\u0003\u00d6\u0001\u0000"+ - "\u0000\u0000\u0004\u00d8\u0001\u0000\u0000\u0000\u0004\u00da\u0001\u0000"+ - "\u0000\u0000\u0004\u00dc\u0001\u0000\u0000\u0000\u0004\u00e2\u0001\u0000"+ - "\u0000\u0000\u0004\u00e4\u0001\u0000\u0000\u0000\u0004\u00e6\u0001\u0000"+ - "\u0000\u0000\u0004\u00e8\u0001\u0000\u0000\u0000\u0005\u00ea\u0001\u0000"+ - "\u0000\u0000\u0005\u00ec\u0001\u0000\u0000\u0000\u0005\u00ee\u0001\u0000"+ - "\u0000\u0000\u0005\u00f0\u0001\u0000\u0000\u0000\u0005\u00f2\u0001\u0000"+ - "\u0000\u0000\u0005\u00f4\u0001\u0000\u0000\u0000\u0005\u00f6\u0001\u0000"+ - "\u0000\u0000\u0005\u00f8\u0001\u0000\u0000\u0000\u0005\u00fa\u0001\u0000"+ - "\u0000\u0000\u0006\u00fc\u0001\u0000\u0000\u0000\u0006\u00fe\u0001\u0000"+ - "\u0000\u0000\u0006\u0100\u0001\u0000\u0000\u0000\u0006\u0102\u0001\u0000"+ - "\u0000\u0000\u0006\u0106\u0001\u0000\u0000\u0000\u0006\u0108\u0001\u0000"+ - "\u0000\u0000\u0006\u010a\u0001\u0000\u0000\u0000\u0006\u010c\u0001\u0000"+ - "\u0000\u0000\u0006\u010e\u0001\u0000\u0000\u0000\u0006\u0110\u0001\u0000"+ - "\u0000\u0000\u0007\u0112\u0001\u0000\u0000\u0000\u0007\u0114\u0001\u0000"+ - "\u0000\u0000\u0007\u0116\u0001\u0000\u0000\u0000\u0007\u0118\u0001\u0000"+ - "\u0000\u0000\u0007\u011a\u0001\u0000\u0000\u0000\u0007\u011c\u0001\u0000"+ - "\u0000\u0000\u0007\u011e\u0001\u0000\u0000\u0000\u0007\u0120\u0001\u0000"+ - "\u0000\u0000\u0007\u0122\u0001\u0000\u0000\u0000\u0007\u0124\u0001\u0000"+ - "\u0000\u0000\b\u0126\u0001\u0000\u0000\u0000\b\u0128\u0001\u0000\u0000"+ - "\u0000\b\u012a\u0001\u0000\u0000\u0000\b\u012c\u0001\u0000\u0000\u0000"+ - "\b\u012e\u0001\u0000\u0000\u0000\b\u0130\u0001\u0000\u0000\u0000\b\u0132"+ - "\u0001\u0000\u0000\u0000\b\u0134\u0001\u0000\u0000\u0000\t\u0136\u0001"+ - "\u0000\u0000\u0000\t\u0138\u0001\u0000\u0000\u0000\t\u013a\u0001\u0000"+ - "\u0000\u0000\t\u013c\u0001\u0000\u0000\u0000\t\u013e\u0001\u0000\u0000"+ - "\u0000\t\u0140\u0001\u0000\u0000\u0000\t\u0142\u0001\u0000\u0000\u0000"+ - "\n\u0144\u0001\u0000\u0000\u0000\n\u0146\u0001\u0000\u0000\u0000\n\u0148"+ - "\u0001\u0000\u0000\u0000\n\u014a\u0001\u0000\u0000\u0000\n\u014c\u0001"+ - "\u0000\u0000\u0000\n\u014e\u0001\u0000\u0000\u0000\n\u0150\u0001\u0000"+ - "\u0000\u0000\u000b\u0152\u0001\u0000\u0000\u0000\u000b\u0154\u0001\u0000"+ + "]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u000b\u0000\t\n\r\r \"\""+ + ",,//::==[[]]||\u0002\u0000**//\u0001\u000009\u0002\u0000AZaz\u0005\u0000"+ + "\"\"\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002\u0000EEee\u0002\u0000"+ + "++--\u0001\u0000``\u000b\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u05c4\u0000"+ + "\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001\u0000\u0000\u0000\u0000"+ + "\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001\u0000\u0000\u0000\u0000"+ + "\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001\u0000\u0000\u0000\u0000"+ + "\u001c\u0001\u0000\u0000\u0000\u0000\u001e\u0001\u0000\u0000\u0000\u0000"+ + " \u0001\u0000\u0000\u0000\u0000\"\u0001\u0000\u0000\u0000\u0000$\u0001"+ + "\u0000\u0000\u0000\u0000&\u0001\u0000\u0000\u0000\u0000(\u0001\u0000\u0000"+ + "\u0000\u0000*\u0001\u0000\u0000\u0000\u0000,\u0001\u0000\u0000\u0000\u0000"+ + ".\u0001\u0000\u0000\u0000\u00000\u0001\u0000\u0000\u0000\u00002\u0001"+ + "\u0000\u0000\u0000\u00004\u0001\u0000\u0000\u0000\u00006\u0001\u0000\u0000"+ + "\u0000\u00008\u0001\u0000\u0000\u0000\u0000:\u0001\u0000\u0000\u0000\u0000"+ + "<\u0001\u0000\u0000\u0000\u0000>\u0001\u0000\u0000\u0000\u0000B\u0001"+ + "\u0000\u0000\u0000\u0001D\u0001\u0000\u0000\u0000\u0001F\u0001\u0000\u0000"+ + "\u0000\u0001H\u0001\u0000\u0000\u0000\u0001J\u0001\u0000\u0000\u0000\u0001"+ + "L\u0001\u0000\u0000\u0000\u0002N\u0001\u0000\u0000\u0000\u0002d\u0001"+ + "\u0000\u0000\u0000\u0002f\u0001\u0000\u0000\u0000\u0002h\u0001\u0000\u0000"+ + "\u0000\u0002j\u0001\u0000\u0000\u0000\u0002l\u0001\u0000\u0000\u0000\u0002"+ + "n\u0001\u0000\u0000\u0000\u0002p\u0001\u0000\u0000\u0000\u0002r\u0001"+ + "\u0000\u0000\u0000\u0002t\u0001\u0000\u0000\u0000\u0002v\u0001\u0000\u0000"+ + "\u0000\u0002x\u0001\u0000\u0000\u0000\u0002z\u0001\u0000\u0000\u0000\u0002"+ + "|\u0001\u0000\u0000\u0000\u0002~\u0001\u0000\u0000\u0000\u0002\u0080\u0001"+ + "\u0000\u0000\u0000\u0002\u0082\u0001\u0000\u0000\u0000\u0002\u0084\u0001"+ + "\u0000\u0000\u0000\u0002\u0086\u0001\u0000\u0000\u0000\u0002\u0088\u0001"+ + "\u0000\u0000\u0000\u0002\u008a\u0001\u0000\u0000\u0000\u0002\u008c\u0001"+ + "\u0000\u0000\u0000\u0002\u008e\u0001\u0000\u0000\u0000\u0002\u0090\u0001"+ + "\u0000\u0000\u0000\u0002\u0092\u0001\u0000\u0000\u0000\u0002\u0094\u0001"+ + "\u0000\u0000\u0000\u0002\u0096\u0001\u0000\u0000\u0000\u0002\u0098\u0001"+ + "\u0000\u0000\u0000\u0002\u009a\u0001\u0000\u0000\u0000\u0002\u009c\u0001"+ + "\u0000\u0000\u0000\u0002\u009e\u0001\u0000\u0000\u0000\u0002\u00a0\u0001"+ + "\u0000\u0000\u0000\u0002\u00a2\u0001\u0000\u0000\u0000\u0002\u00a4\u0001"+ + "\u0000\u0000\u0000\u0002\u00a6\u0001\u0000\u0000\u0000\u0002\u00a8\u0001"+ + "\u0000\u0000\u0000\u0002\u00aa\u0001\u0000\u0000\u0000\u0002\u00ac\u0001"+ + "\u0000\u0000\u0000\u0002\u00ae\u0001\u0000\u0000\u0000\u0002\u00b0\u0001"+ + "\u0000\u0000\u0000\u0002\u00b2\u0001\u0000\u0000\u0000\u0002\u00b4\u0001"+ + "\u0000\u0000\u0000\u0002\u00b6\u0001\u0000\u0000\u0000\u0002\u00ba\u0001"+ + "\u0000\u0000\u0000\u0002\u00bc\u0001\u0000\u0000\u0000\u0002\u00be\u0001"+ + "\u0000\u0000\u0000\u0002\u00c0\u0001\u0000\u0000\u0000\u0003\u00c2\u0001"+ + "\u0000\u0000\u0000\u0003\u00c4\u0001\u0000\u0000\u0000\u0003\u00c6\u0001"+ + "\u0000\u0000\u0000\u0003\u00c8\u0001\u0000\u0000\u0000\u0003\u00ca\u0001"+ + "\u0000\u0000\u0000\u0003\u00cc\u0001\u0000\u0000\u0000\u0003\u00ce\u0001"+ + "\u0000\u0000\u0000\u0003\u00d0\u0001\u0000\u0000\u0000\u0003\u00d2\u0001"+ + "\u0000\u0000\u0000\u0003\u00d4\u0001\u0000\u0000\u0000\u0003\u00d6\u0001"+ + "\u0000\u0000\u0000\u0003\u00d8\u0001\u0000\u0000\u0000\u0004\u00da\u0001"+ + "\u0000\u0000\u0000\u0004\u00dc\u0001\u0000\u0000\u0000\u0004\u00de\u0001"+ + "\u0000\u0000\u0000\u0004\u00e4\u0001\u0000\u0000\u0000\u0004\u00e6\u0001"+ + "\u0000\u0000\u0000\u0004\u00e8\u0001\u0000\u0000\u0000\u0004\u00ea\u0001"+ + "\u0000\u0000\u0000\u0005\u00ec\u0001\u0000\u0000\u0000\u0005\u00ee\u0001"+ + "\u0000\u0000\u0000\u0005\u00f0\u0001\u0000\u0000\u0000\u0005\u00f2\u0001"+ + "\u0000\u0000\u0000\u0005\u00f4\u0001\u0000\u0000\u0000\u0005\u00f6\u0001"+ + "\u0000\u0000\u0000\u0005\u00f8\u0001\u0000\u0000\u0000\u0005\u00fa\u0001"+ + "\u0000\u0000\u0000\u0005\u00fc\u0001\u0000\u0000\u0000\u0006\u00fe\u0001"+ + "\u0000\u0000\u0000\u0006\u0100\u0001\u0000\u0000\u0000\u0006\u0102\u0001"+ + "\u0000\u0000\u0000\u0006\u0104\u0001\u0000\u0000\u0000\u0006\u0108\u0001"+ + "\u0000\u0000\u0000\u0006\u010a\u0001\u0000\u0000\u0000\u0006\u010c\u0001"+ + "\u0000\u0000\u0000\u0006\u010e\u0001\u0000\u0000\u0000\u0006\u0110\u0001"+ + "\u0000\u0000\u0000\u0007\u0112\u0001\u0000\u0000\u0000\u0007\u0114\u0001"+ + "\u0000\u0000\u0000\u0007\u0116\u0001\u0000\u0000\u0000\u0007\u0118\u0001"+ + "\u0000\u0000\u0000\u0007\u011a\u0001\u0000\u0000\u0000\u0007\u011c\u0001"+ + "\u0000\u0000\u0000\u0007\u011e\u0001\u0000\u0000\u0000\u0007\u0120\u0001"+ + "\u0000\u0000\u0000\u0007\u0122\u0001\u0000\u0000\u0000\u0007\u0124\u0001"+ + "\u0000\u0000\u0000\b\u0126\u0001\u0000\u0000\u0000\b\u0128\u0001\u0000"+ + "\u0000\u0000\b\u012a\u0001\u0000\u0000\u0000\b\u012c\u0001\u0000\u0000"+ + "\u0000\b\u012e\u0001\u0000\u0000\u0000\b\u0130\u0001\u0000\u0000\u0000"+ + "\b\u0132\u0001\u0000\u0000\u0000\b\u0134\u0001\u0000\u0000\u0000\b\u0136"+ + "\u0001\u0000\u0000\u0000\b\u0138\u0001\u0000\u0000\u0000\t\u013a\u0001"+ + "\u0000\u0000\u0000\t\u013c\u0001\u0000\u0000\u0000\t\u013e\u0001\u0000"+ + "\u0000\u0000\t\u0140\u0001\u0000\u0000\u0000\t\u0142\u0001\u0000\u0000"+ + "\u0000\t\u0144\u0001\u0000\u0000\u0000\t\u0146\u0001\u0000\u0000\u0000"+ + "\n\u0148\u0001\u0000\u0000\u0000\n\u014a\u0001\u0000\u0000\u0000\n\u014c"+ + "\u0001\u0000\u0000\u0000\n\u014e\u0001\u0000\u0000\u0000\n\u0150\u0001"+ + "\u0000\u0000\u0000\n\u0152\u0001\u0000\u0000\u0000\n\u0154\u0001\u0000"+ "\u0000\u0000\u000b\u0156\u0001\u0000\u0000\u0000\u000b\u0158\u0001\u0000"+ - "\u0000\u0000\u000b\u015a\u0001\u0000\u0000\u0000\f\u015c\u0001\u0000\u0000"+ - "\u0000\f\u015e\u0001\u0000\u0000\u0000\f\u0160\u0001\u0000\u0000\u0000"+ - "\f\u0162\u0001\u0000\u0000\u0000\f\u0164\u0001\u0000\u0000\u0000\r\u0166"+ - "\u0001\u0000\u0000\u0000\r\u0168\u0001\u0000\u0000\u0000\r\u016a\u0001"+ - "\u0000\u0000\u0000\r\u016c\u0001\u0000\u0000\u0000\r\u016e\u0001\u0000"+ - "\u0000\u0000\r\u0170\u0001\u0000\u0000\u0000\u000e\u0172\u0001\u0000\u0000"+ - "\u0000\u000e\u0174\u0001\u0000\u0000\u0000\u000e\u0176\u0001\u0000\u0000"+ + "\u0000\u0000\u000b\u015a\u0001\u0000\u0000\u0000\u000b\u015c\u0001\u0000"+ + "\u0000\u0000\u000b\u015e\u0001\u0000\u0000\u0000\f\u0160\u0001\u0000\u0000"+ + "\u0000\f\u0162\u0001\u0000\u0000\u0000\f\u0164\u0001\u0000\u0000\u0000"+ + "\f\u0166\u0001\u0000\u0000\u0000\f\u0168\u0001\u0000\u0000\u0000\r\u016a"+ + "\u0001\u0000\u0000\u0000\r\u016c\u0001\u0000\u0000\u0000\r\u016e\u0001"+ + "\u0000\u0000\u0000\r\u0170\u0001\u0000\u0000\u0000\r\u0172\u0001\u0000"+ + "\u0000\u0000\r\u0174\u0001\u0000\u0000\u0000\u000e\u0176\u0001\u0000\u0000"+ "\u0000\u000e\u0178\u0001\u0000\u0000\u0000\u000e\u017a\u0001\u0000\u0000"+ - "\u0000\u000f\u017c\u0001\u0000\u0000\u0000\u000f\u017e\u0001\u0000\u0000"+ - "\u0000\u000f\u0180\u0001\u0000\u0000\u0000\u000f\u0182\u0001\u0000\u0000"+ + "\u0000\u000e\u017c\u0001\u0000\u0000\u0000\u000e\u017e\u0001\u0000\u0000"+ + "\u0000\u000e\u0180\u0001\u0000\u0000\u0000\u000f\u0182\u0001\u0000\u0000"+ "\u0000\u000f\u0184\u0001\u0000\u0000\u0000\u000f\u0186\u0001\u0000\u0000"+ "\u0000\u000f\u0188\u0001\u0000\u0000\u0000\u000f\u018a\u0001\u0000\u0000"+ - "\u0000\u0010\u018c\u0001\u0000\u0000\u0000\u0012\u0196\u0001\u0000\u0000"+ - "\u0000\u0014\u019d\u0001\u0000\u0000\u0000\u0016\u01a6\u0001\u0000\u0000"+ - "\u0000\u0018\u01ad\u0001\u0000\u0000\u0000\u001a\u01b7\u0001\u0000\u0000"+ - "\u0000\u001c\u01be\u0001\u0000\u0000\u0000\u001e\u01c5\u0001\u0000\u0000"+ - "\u0000 \u01d3\u0001\u0000\u0000\u0000\"\u01da\u0001\u0000\u0000\u0000"+ - "$\u01e2\u0001\u0000\u0000\u0000&\u01eb\u0001\u0000\u0000\u0000(\u01f2"+ - "\u0001\u0000\u0000\u0000*\u01fc\u0001\u0000\u0000\u0000,\u0208\u0001\u0000"+ - "\u0000\u0000.\u0211\u0001\u0000\u0000\u00000\u0217\u0001\u0000\u0000\u0000"+ - "2\u021e\u0001\u0000\u0000\u00004\u0225\u0001\u0000\u0000\u00006\u022d"+ - "\u0001\u0000\u0000\u00008\u0236\u0001\u0000\u0000\u0000:\u023c\u0001\u0000"+ - "\u0000\u0000<\u024d\u0001\u0000\u0000\u0000>\u025d\u0001\u0000\u0000\u0000"+ - "@\u0266\u0001\u0000\u0000\u0000B\u0269\u0001\u0000\u0000\u0000D\u026d"+ - "\u0001\u0000\u0000\u0000F\u0272\u0001\u0000\u0000\u0000H\u0277\u0001\u0000"+ - "\u0000\u0000J\u027b\u0001\u0000\u0000\u0000L\u027f\u0001\u0000\u0000\u0000"+ - "N\u0283\u0001\u0000\u0000\u0000P\u0287\u0001\u0000\u0000\u0000R\u0289"+ - "\u0001\u0000\u0000\u0000T\u028b\u0001\u0000\u0000\u0000V\u028e\u0001\u0000"+ - "\u0000\u0000X\u0290\u0001\u0000\u0000\u0000Z\u0299\u0001\u0000\u0000\u0000"+ - "\\\u029b\u0001\u0000\u0000\u0000^\u02a0\u0001\u0000\u0000\u0000`\u02a2"+ - "\u0001\u0000\u0000\u0000b\u02a7\u0001\u0000\u0000\u0000d\u02c6\u0001\u0000"+ - "\u0000\u0000f\u02c9\u0001\u0000\u0000\u0000h\u02f7\u0001\u0000\u0000\u0000"+ - "j\u02f9\u0001\u0000\u0000\u0000l\u02fc\u0001\u0000\u0000\u0000n\u0300"+ - "\u0001\u0000\u0000\u0000p\u0304\u0001\u0000\u0000\u0000r\u0306\u0001\u0000"+ - "\u0000\u0000t\u0309\u0001\u0000\u0000\u0000v\u030b\u0001\u0000\u0000\u0000"+ - "x\u0310\u0001\u0000\u0000\u0000z\u0312\u0001\u0000\u0000\u0000|\u0318"+ - "\u0001\u0000\u0000\u0000~\u031e\u0001\u0000\u0000\u0000\u0080\u0323\u0001"+ - "\u0000\u0000\u0000\u0082\u0325\u0001\u0000\u0000\u0000\u0084\u0328\u0001"+ - "\u0000\u0000\u0000\u0086\u032b\u0001\u0000\u0000\u0000\u0088\u0330\u0001"+ - "\u0000\u0000\u0000\u008a\u0334\u0001\u0000\u0000\u0000\u008c\u0339\u0001"+ - "\u0000\u0000\u0000\u008e\u033f\u0001\u0000\u0000\u0000\u0090\u0342\u0001"+ - "\u0000\u0000\u0000\u0092\u0344\u0001\u0000\u0000\u0000\u0094\u034a\u0001"+ - "\u0000\u0000\u0000\u0096\u034c\u0001\u0000\u0000\u0000\u0098\u0351\u0001"+ - "\u0000\u0000\u0000\u009a\u0354\u0001\u0000\u0000\u0000\u009c\u0357\u0001"+ - "\u0000\u0000\u0000\u009e\u035a\u0001\u0000\u0000\u0000\u00a0\u035c\u0001"+ - "\u0000\u0000\u0000\u00a2\u035f\u0001\u0000\u0000\u0000\u00a4\u0361\u0001"+ - "\u0000\u0000\u0000\u00a6\u0364\u0001\u0000\u0000\u0000\u00a8\u0366\u0001"+ - "\u0000\u0000\u0000\u00aa\u0368\u0001\u0000\u0000\u0000\u00ac\u036a\u0001"+ - "\u0000\u0000\u0000\u00ae\u036c\u0001\u0000\u0000\u0000\u00b0\u037c\u0001"+ - "\u0000\u0000\u0000\u00b2\u037e\u0001\u0000\u0000\u0000\u00b4\u0383\u0001"+ - "\u0000\u0000\u0000\u00b6\u0398\u0001\u0000\u0000\u0000\u00b8\u039a\u0001"+ - "\u0000\u0000\u0000\u00ba\u03a2\u0001\u0000\u0000\u0000\u00bc\u03a4\u0001"+ - "\u0000\u0000\u0000\u00be\u03a8\u0001\u0000\u0000\u0000\u00c0\u03ac\u0001"+ - "\u0000\u0000\u0000\u00c2\u03b0\u0001\u0000\u0000\u0000\u00c4\u03b5\u0001"+ - "\u0000\u0000\u0000\u00c6\u03b9\u0001\u0000\u0000\u0000\u00c8\u03bd\u0001"+ - "\u0000\u0000\u0000\u00ca\u03c1\u0001\u0000\u0000\u0000\u00cc\u03c5\u0001"+ - "\u0000\u0000\u0000\u00ce\u03c9\u0001\u0000\u0000\u0000\u00d0\u03d2\u0001"+ - "\u0000\u0000\u0000\u00d2\u03d6\u0001\u0000\u0000\u0000\u00d4\u03da\u0001"+ - "\u0000\u0000\u0000\u00d6\u03de\u0001\u0000\u0000\u0000\u00d8\u03e2\u0001"+ - "\u0000\u0000\u0000\u00da\u03e7\u0001\u0000\u0000\u0000\u00dc\u03eb\u0001"+ - "\u0000\u0000\u0000\u00de\u03f3\u0001\u0000\u0000\u0000\u00e0\u0408\u0001"+ - "\u0000\u0000\u0000\u00e2\u040c\u0001\u0000\u0000\u0000\u00e4\u0410\u0001"+ - "\u0000\u0000\u0000\u00e6\u0414\u0001\u0000\u0000\u0000\u00e8\u0418\u0001"+ - "\u0000\u0000\u0000\u00ea\u041c\u0001\u0000\u0000\u0000\u00ec\u0421\u0001"+ - "\u0000\u0000\u0000\u00ee\u0425\u0001\u0000\u0000\u0000\u00f0\u0429\u0001"+ - "\u0000\u0000\u0000\u00f2\u042d\u0001\u0000\u0000\u0000\u00f4\u0430\u0001"+ - "\u0000\u0000\u0000\u00f6\u0434\u0001\u0000\u0000\u0000\u00f8\u0438\u0001"+ - "\u0000\u0000\u0000\u00fa\u043c\u0001\u0000\u0000\u0000\u00fc\u0440\u0001"+ - "\u0000\u0000\u0000\u00fe\u0445\u0001\u0000\u0000\u0000\u0100\u044a\u0001"+ - "\u0000\u0000\u0000\u0102\u044f\u0001\u0000\u0000\u0000\u0104\u0456\u0001"+ - "\u0000\u0000\u0000\u0106\u045f\u0001\u0000\u0000\u0000\u0108\u0466\u0001"+ - "\u0000\u0000\u0000\u010a\u046a\u0001\u0000\u0000\u0000\u010c\u046e\u0001"+ - "\u0000\u0000\u0000\u010e\u0472\u0001\u0000\u0000\u0000\u0110\u0476\u0001"+ - "\u0000\u0000\u0000\u0112\u047a\u0001\u0000\u0000\u0000\u0114\u0480\u0001"+ - "\u0000\u0000\u0000\u0116\u0484\u0001\u0000\u0000\u0000\u0118\u0488\u0001"+ - "\u0000\u0000\u0000\u011a\u048c\u0001\u0000\u0000\u0000\u011c\u0490\u0001"+ - "\u0000\u0000\u0000\u011e\u0494\u0001\u0000\u0000\u0000\u0120\u0498\u0001"+ - "\u0000\u0000\u0000\u0122\u049c\u0001\u0000\u0000\u0000\u0124\u04a0\u0001"+ - "\u0000\u0000\u0000\u0126\u04a4\u0001\u0000\u0000\u0000\u0128\u04a9\u0001"+ - "\u0000\u0000\u0000\u012a\u04ad\u0001\u0000\u0000\u0000\u012c\u04b1\u0001"+ - "\u0000\u0000\u0000\u012e\u04b6\u0001\u0000\u0000\u0000\u0130\u04ba\u0001"+ - "\u0000\u0000\u0000\u0132\u04be\u0001\u0000\u0000\u0000\u0134\u04c2\u0001"+ - "\u0000\u0000\u0000\u0136\u04c6\u0001\u0000\u0000\u0000\u0138\u04cc\u0001"+ - "\u0000\u0000\u0000\u013a\u04d0\u0001\u0000\u0000\u0000\u013c\u04d4\u0001"+ - "\u0000\u0000\u0000\u013e\u04d8\u0001\u0000\u0000\u0000\u0140\u04dc\u0001"+ - "\u0000\u0000\u0000\u0142\u04e0\u0001\u0000\u0000\u0000\u0144\u04e4\u0001"+ - "\u0000\u0000\u0000\u0146\u04e9\u0001\u0000\u0000\u0000\u0148\u04ed\u0001"+ - "\u0000\u0000\u0000\u014a\u04f1\u0001\u0000\u0000\u0000\u014c\u04f5\u0001"+ - "\u0000\u0000\u0000\u014e\u04f9\u0001\u0000\u0000\u0000\u0150\u04fd\u0001"+ - "\u0000\u0000\u0000\u0152\u0501\u0001\u0000\u0000\u0000\u0154\u0506\u0001"+ - "\u0000\u0000\u0000\u0156\u050b\u0001\u0000\u0000\u0000\u0158\u050f\u0001"+ - "\u0000\u0000\u0000\u015a\u0513\u0001\u0000\u0000\u0000\u015c\u0517\u0001"+ - "\u0000\u0000\u0000\u015e\u051c\u0001\u0000\u0000\u0000\u0160\u0526\u0001"+ - "\u0000\u0000\u0000\u0162\u052a\u0001\u0000\u0000\u0000\u0164\u052e\u0001"+ - "\u0000\u0000\u0000\u0166\u0532\u0001\u0000\u0000\u0000\u0168\u0537\u0001"+ - "\u0000\u0000\u0000\u016a\u053e\u0001\u0000\u0000\u0000\u016c\u0542\u0001"+ - "\u0000\u0000\u0000\u016e\u0546\u0001\u0000\u0000\u0000\u0170\u054a\u0001"+ - "\u0000\u0000\u0000\u0172\u054e\u0001\u0000\u0000\u0000\u0174\u0553\u0001"+ - "\u0000\u0000\u0000\u0176\u0559\u0001\u0000\u0000\u0000\u0178\u055d\u0001"+ - "\u0000\u0000\u0000\u017a\u0561\u0001\u0000\u0000\u0000\u017c\u0565\u0001"+ - "\u0000\u0000\u0000\u017e\u056b\u0001\u0000\u0000\u0000\u0180\u056f\u0001"+ - "\u0000\u0000\u0000\u0182\u0573\u0001\u0000\u0000\u0000\u0184\u0577\u0001"+ - "\u0000\u0000\u0000\u0186\u057d\u0001\u0000\u0000\u0000\u0188\u0583\u0001"+ - "\u0000\u0000\u0000\u018a\u0589\u0001\u0000\u0000\u0000\u018c\u018d\u0005"+ - "d\u0000\u0000\u018d\u018e\u0005i\u0000\u0000\u018e\u018f\u0005s\u0000"+ - "\u0000\u018f\u0190\u0005s\u0000\u0000\u0190\u0191\u0005e\u0000\u0000\u0191"+ - "\u0192\u0005c\u0000\u0000\u0192\u0193\u0005t\u0000\u0000\u0193\u0194\u0001"+ - "\u0000\u0000\u0000\u0194\u0195\u0006\u0000\u0000\u0000\u0195\u0011\u0001"+ - "\u0000\u0000\u0000\u0196\u0197\u0005d\u0000\u0000\u0197\u0198\u0005r\u0000"+ - "\u0000\u0198\u0199\u0005o\u0000\u0000\u0199\u019a\u0005p\u0000\u0000\u019a"+ - "\u019b\u0001\u0000\u0000\u0000\u019b\u019c\u0006\u0001\u0001\u0000\u019c"+ - "\u0013\u0001\u0000\u0000\u0000\u019d\u019e\u0005e\u0000\u0000\u019e\u019f"+ - "\u0005n\u0000\u0000\u019f\u01a0\u0005r\u0000\u0000\u01a0\u01a1\u0005i"+ - "\u0000\u0000\u01a1\u01a2\u0005c\u0000\u0000\u01a2\u01a3\u0005h\u0000\u0000"+ - "\u01a3\u01a4\u0001\u0000\u0000\u0000\u01a4\u01a5\u0006\u0002\u0002\u0000"+ - "\u01a5\u0015\u0001\u0000\u0000\u0000\u01a6\u01a7\u0005e\u0000\u0000\u01a7"+ - "\u01a8\u0005v\u0000\u0000\u01a8\u01a9\u0005a\u0000\u0000\u01a9\u01aa\u0005"+ - "l\u0000\u0000\u01aa\u01ab\u0001\u0000\u0000\u0000\u01ab\u01ac\u0006\u0003"+ - "\u0000\u0000\u01ac\u0017\u0001\u0000\u0000\u0000\u01ad\u01ae\u0005e\u0000"+ - "\u0000\u01ae\u01af\u0005x\u0000\u0000\u01af\u01b0\u0005p\u0000\u0000\u01b0"+ - "\u01b1\u0005l\u0000\u0000\u01b1\u01b2\u0005a\u0000\u0000\u01b2\u01b3\u0005"+ - "i\u0000\u0000\u01b3\u01b4\u0005n\u0000\u0000\u01b4\u01b5\u0001\u0000\u0000"+ - "\u0000\u01b5\u01b6\u0006\u0004\u0003\u0000\u01b6\u0019\u0001\u0000\u0000"+ - "\u0000\u01b7\u01b8\u0005f\u0000\u0000\u01b8\u01b9\u0005r\u0000\u0000\u01b9"+ - "\u01ba\u0005o\u0000\u0000\u01ba\u01bb\u0005m\u0000\u0000\u01bb\u01bc\u0001"+ - "\u0000\u0000\u0000\u01bc\u01bd\u0006\u0005\u0004\u0000\u01bd\u001b\u0001"+ - "\u0000\u0000\u0000\u01be\u01bf\u0005g\u0000\u0000\u01bf\u01c0\u0005r\u0000"+ - "\u0000\u01c0\u01c1\u0005o\u0000\u0000\u01c1\u01c2\u0005k\u0000\u0000\u01c2"+ - "\u01c3\u0001\u0000\u0000\u0000\u01c3\u01c4\u0006\u0006\u0000\u0000\u01c4"+ - "\u001d\u0001\u0000\u0000\u0000\u01c5\u01c6\u0005i\u0000\u0000\u01c6\u01c7"+ - "\u0005n\u0000\u0000\u01c7\u01c8\u0005l\u0000\u0000\u01c8\u01c9\u0005i"+ - "\u0000\u0000\u01c9\u01ca\u0005n\u0000\u0000\u01ca\u01cb\u0005e\u0000\u0000"+ - "\u01cb\u01cc\u0005s\u0000\u0000\u01cc\u01cd\u0005t\u0000\u0000\u01cd\u01ce"+ - "\u0005a\u0000\u0000\u01ce\u01cf\u0005t\u0000\u0000\u01cf\u01d0\u0005s"+ - "\u0000\u0000\u01d0\u01d1\u0001\u0000\u0000\u0000\u01d1\u01d2\u0006\u0007"+ - "\u0000\u0000\u01d2\u001f\u0001\u0000\u0000\u0000\u01d3\u01d4\u0005k\u0000"+ - "\u0000\u01d4\u01d5\u0005e\u0000\u0000\u01d5\u01d6\u0005e\u0000\u0000\u01d6"+ - "\u01d7\u0005p\u0000\u0000\u01d7\u01d8\u0001\u0000\u0000\u0000\u01d8\u01d9"+ - "\u0006\b\u0001\u0000\u01d9!\u0001\u0000\u0000\u0000\u01da\u01db\u0005"+ - "l\u0000\u0000\u01db\u01dc\u0005i\u0000\u0000\u01dc\u01dd\u0005m\u0000"+ - "\u0000\u01dd\u01de\u0005i\u0000\u0000\u01de\u01df\u0005t\u0000\u0000\u01df"+ - "\u01e0\u0001\u0000\u0000\u0000\u01e0\u01e1\u0006\t\u0000\u0000\u01e1#"+ - "\u0001\u0000\u0000\u0000\u01e2\u01e3\u0005l\u0000\u0000\u01e3\u01e4\u0005"+ - "o\u0000\u0000\u01e4\u01e5\u0005o\u0000\u0000\u01e5\u01e6\u0005k\u0000"+ - "\u0000\u01e6\u01e7\u0005u\u0000\u0000\u01e7\u01e8\u0005p\u0000\u0000\u01e8"+ - "\u01e9\u0001\u0000\u0000\u0000\u01e9\u01ea\u0006\n\u0005\u0000\u01ea%"+ - "\u0001\u0000\u0000\u0000\u01eb\u01ec\u0005m\u0000\u0000\u01ec\u01ed\u0005"+ - "e\u0000\u0000\u01ed\u01ee\u0005t\u0000\u0000\u01ee\u01ef\u0005a\u0000"+ - "\u0000\u01ef\u01f0\u0001\u0000\u0000\u0000\u01f0\u01f1\u0006\u000b\u0006"+ - "\u0000\u01f1\'\u0001\u0000\u0000\u0000\u01f2\u01f3\u0005m\u0000\u0000"+ - "\u01f3\u01f4\u0005e\u0000\u0000\u01f4\u01f5\u0005t\u0000\u0000\u01f5\u01f6"+ - "\u0005r\u0000\u0000\u01f6\u01f7\u0005i\u0000\u0000\u01f7\u01f8\u0005c"+ - "\u0000\u0000\u01f8\u01f9\u0005s\u0000\u0000\u01f9\u01fa\u0001\u0000\u0000"+ - "\u0000\u01fa\u01fb\u0006\f\u0007\u0000\u01fb)\u0001\u0000\u0000\u0000"+ - "\u01fc\u01fd\u0005m\u0000\u0000\u01fd\u01fe\u0005v\u0000\u0000\u01fe\u01ff"+ - "\u0005_\u0000\u0000\u01ff\u0200\u0005e\u0000\u0000\u0200\u0201\u0005x"+ - "\u0000\u0000\u0201\u0202\u0005p\u0000\u0000\u0202\u0203\u0005a\u0000\u0000"+ - "\u0203\u0204\u0005n\u0000\u0000\u0204\u0205\u0005d\u0000\u0000\u0205\u0206"+ - "\u0001\u0000\u0000\u0000\u0206\u0207\u0006\r\b\u0000\u0207+\u0001\u0000"+ - "\u0000\u0000\u0208\u0209\u0005r\u0000\u0000\u0209\u020a\u0005e\u0000\u0000"+ - "\u020a\u020b\u0005n\u0000\u0000\u020b\u020c\u0005a\u0000\u0000\u020c\u020d"+ - "\u0005m\u0000\u0000\u020d\u020e\u0005e\u0000\u0000\u020e\u020f\u0001\u0000"+ - "\u0000\u0000\u020f\u0210\u0006\u000e\t\u0000\u0210-\u0001\u0000\u0000"+ - "\u0000\u0211\u0212\u0005r\u0000\u0000\u0212\u0213\u0005o\u0000\u0000\u0213"+ - "\u0214\u0005w\u0000\u0000\u0214\u0215\u0001\u0000\u0000\u0000\u0215\u0216"+ - "\u0006\u000f\u0000\u0000\u0216/\u0001\u0000\u0000\u0000\u0217\u0218\u0005"+ - "s\u0000\u0000\u0218\u0219\u0005h\u0000\u0000\u0219\u021a\u0005o\u0000"+ - "\u0000\u021a\u021b\u0005w\u0000\u0000\u021b\u021c\u0001\u0000\u0000\u0000"+ - "\u021c\u021d\u0006\u0010\n\u0000\u021d1\u0001\u0000\u0000\u0000\u021e"+ - "\u021f\u0005s\u0000\u0000\u021f\u0220\u0005o\u0000\u0000\u0220\u0221\u0005"+ - "r\u0000\u0000\u0221\u0222\u0005t\u0000\u0000\u0222\u0223\u0001\u0000\u0000"+ - "\u0000\u0223\u0224\u0006\u0011\u0000\u0000\u02243\u0001\u0000\u0000\u0000"+ - "\u0225\u0226\u0005s\u0000\u0000\u0226\u0227\u0005t\u0000\u0000\u0227\u0228"+ - "\u0005a\u0000\u0000\u0228\u0229\u0005t\u0000\u0000\u0229\u022a\u0005s"+ - "\u0000\u0000\u022a\u022b\u0001\u0000\u0000\u0000\u022b\u022c\u0006\u0012"+ - "\u0000\u0000\u022c5\u0001\u0000\u0000\u0000\u022d\u022e\u0005w\u0000\u0000"+ - "\u022e\u022f\u0005h\u0000\u0000\u022f\u0230\u0005e\u0000\u0000\u0230\u0231"+ - "\u0005r\u0000\u0000\u0231\u0232\u0005e\u0000\u0000\u0232\u0233\u0001\u0000"+ - "\u0000\u0000\u0233\u0234\u0006\u0013\u0000\u0000\u02347\u0001\u0000\u0000"+ - "\u0000\u0235\u0237\b\u0000\u0000\u0000\u0236\u0235\u0001\u0000\u0000\u0000"+ - "\u0237\u0238\u0001\u0000\u0000\u0000\u0238\u0236\u0001\u0000\u0000\u0000"+ - "\u0238\u0239\u0001\u0000\u0000\u0000\u0239\u023a\u0001\u0000\u0000\u0000"+ - "\u023a\u023b\u0006\u0014\u0000\u0000\u023b9\u0001\u0000\u0000\u0000\u023c"+ - "\u023d\u0005/\u0000\u0000\u023d\u023e\u0005/\u0000\u0000\u023e\u0242\u0001"+ - "\u0000\u0000\u0000\u023f\u0241\b\u0001\u0000\u0000\u0240\u023f\u0001\u0000"+ - "\u0000\u0000\u0241\u0244\u0001\u0000\u0000\u0000\u0242\u0240\u0001\u0000"+ - "\u0000\u0000\u0242\u0243\u0001\u0000\u0000\u0000\u0243\u0246\u0001\u0000"+ - "\u0000\u0000\u0244\u0242\u0001\u0000\u0000\u0000\u0245\u0247\u0005\r\u0000"+ - "\u0000\u0246\u0245\u0001\u0000\u0000\u0000\u0246\u0247\u0001\u0000\u0000"+ - "\u0000\u0247\u0249\u0001\u0000\u0000\u0000\u0248\u024a\u0005\n\u0000\u0000"+ - "\u0249\u0248\u0001\u0000\u0000\u0000\u0249\u024a\u0001\u0000\u0000\u0000"+ - "\u024a\u024b\u0001\u0000\u0000\u0000\u024b\u024c\u0006\u0015\u000b\u0000"+ - "\u024c;\u0001\u0000\u0000\u0000\u024d\u024e\u0005/\u0000\u0000\u024e\u024f"+ - "\u0005*\u0000\u0000\u024f\u0254\u0001\u0000\u0000\u0000\u0250\u0253\u0003"+ - "<\u0016\u0000\u0251\u0253\t\u0000\u0000\u0000\u0252\u0250\u0001\u0000"+ - "\u0000\u0000\u0252\u0251\u0001\u0000\u0000\u0000\u0253\u0256\u0001\u0000"+ - "\u0000\u0000\u0254\u0255\u0001\u0000\u0000\u0000\u0254\u0252\u0001\u0000"+ - "\u0000\u0000\u0255\u0257\u0001\u0000\u0000\u0000\u0256\u0254\u0001\u0000"+ - "\u0000\u0000\u0257\u0258\u0005*\u0000\u0000\u0258\u0259\u0005/\u0000\u0000"+ - "\u0259\u025a\u0001\u0000\u0000\u0000\u025a\u025b\u0006\u0016\u000b\u0000"+ - "\u025b=\u0001\u0000\u0000\u0000\u025c\u025e\u0007\u0002\u0000\u0000\u025d"+ - "\u025c\u0001\u0000\u0000\u0000\u025e\u025f\u0001\u0000\u0000\u0000\u025f"+ - "\u025d\u0001\u0000\u0000\u0000\u025f\u0260\u0001\u0000\u0000\u0000\u0260"+ - "\u0261\u0001\u0000\u0000\u0000\u0261\u0262\u0006\u0017\u000b\u0000\u0262"+ - "?\u0001\u0000\u0000\u0000\u0263\u0267\b\u0003\u0000\u0000\u0264\u0265"+ - "\u0005/\u0000\u0000\u0265\u0267\b\u0004\u0000\u0000\u0266\u0263\u0001"+ - "\u0000\u0000\u0000\u0266\u0264\u0001\u0000\u0000\u0000\u0267A\u0001\u0000"+ - "\u0000\u0000\u0268\u026a\u0003@\u0018\u0000\u0269\u0268\u0001\u0000\u0000"+ - "\u0000\u026a\u026b\u0001\u0000\u0000\u0000\u026b\u0269\u0001\u0000\u0000"+ - "\u0000\u026b\u026c\u0001\u0000\u0000\u0000\u026cC\u0001\u0000\u0000\u0000"+ - "\u026d\u026e\u0003\u00b2Q\u0000\u026e\u026f\u0001\u0000\u0000\u0000\u026f"+ - "\u0270\u0006\u001a\f\u0000\u0270\u0271\u0006\u001a\r\u0000\u0271E\u0001"+ - "\u0000\u0000\u0000\u0272\u0273\u0003N\u001f\u0000\u0273\u0274\u0001\u0000"+ - "\u0000\u0000\u0274\u0275\u0006\u001b\u000e\u0000\u0275\u0276\u0006\u001b"+ - "\u000f\u0000\u0276G\u0001\u0000\u0000\u0000\u0277\u0278\u0003>\u0017\u0000"+ - "\u0278\u0279\u0001\u0000\u0000\u0000\u0279\u027a\u0006\u001c\u000b\u0000"+ - "\u027aI\u0001\u0000\u0000\u0000\u027b\u027c\u0003:\u0015\u0000\u027c\u027d"+ - "\u0001\u0000\u0000\u0000\u027d\u027e\u0006\u001d\u000b\u0000\u027eK\u0001"+ - "\u0000\u0000\u0000\u027f\u0280\u0003<\u0016\u0000\u0280\u0281\u0001\u0000"+ - "\u0000\u0000\u0281\u0282\u0006\u001e\u000b\u0000\u0282M\u0001\u0000\u0000"+ - "\u0000\u0283\u0284\u0005|\u0000\u0000\u0284\u0285\u0001\u0000\u0000\u0000"+ - "\u0285\u0286\u0006\u001f\u000f\u0000\u0286O\u0001\u0000\u0000\u0000\u0287"+ - "\u0288\u0007\u0005\u0000\u0000\u0288Q\u0001\u0000\u0000\u0000\u0289\u028a"+ - "\u0007\u0006\u0000\u0000\u028aS\u0001\u0000\u0000\u0000\u028b\u028c\u0005"+ - "\\\u0000\u0000\u028c\u028d\u0007\u0007\u0000\u0000\u028dU\u0001\u0000"+ - "\u0000\u0000\u028e\u028f\b\b\u0000\u0000\u028fW\u0001\u0000\u0000\u0000"+ - "\u0290\u0292\u0007\t\u0000\u0000\u0291\u0293\u0007\n\u0000\u0000\u0292"+ - "\u0291\u0001\u0000\u0000\u0000\u0292\u0293\u0001\u0000\u0000\u0000\u0293"+ - "\u0295\u0001\u0000\u0000\u0000\u0294\u0296\u0003P \u0000\u0295\u0294\u0001"+ - "\u0000\u0000\u0000\u0296\u0297\u0001\u0000\u0000\u0000\u0297\u0295\u0001"+ - "\u0000\u0000\u0000\u0297\u0298\u0001\u0000\u0000\u0000\u0298Y\u0001\u0000"+ - "\u0000\u0000\u0299\u029a\u0005@\u0000\u0000\u029a[\u0001\u0000\u0000\u0000"+ - "\u029b\u029c\u0005`\u0000\u0000\u029c]\u0001\u0000\u0000\u0000\u029d\u02a1"+ - "\b\u000b\u0000\u0000\u029e\u029f\u0005`\u0000\u0000\u029f\u02a1\u0005"+ - "`\u0000\u0000\u02a0\u029d\u0001\u0000\u0000\u0000\u02a0\u029e\u0001\u0000"+ - "\u0000\u0000\u02a1_\u0001\u0000\u0000\u0000\u02a2\u02a3\u0005_\u0000\u0000"+ - "\u02a3a\u0001\u0000\u0000\u0000\u02a4\u02a8\u0003R!\u0000\u02a5\u02a8"+ - "\u0003P \u0000\u02a6\u02a8\u0003`(\u0000\u02a7\u02a4\u0001\u0000\u0000"+ - "\u0000\u02a7\u02a5\u0001\u0000\u0000\u0000\u02a7\u02a6\u0001\u0000\u0000"+ - "\u0000\u02a8c\u0001\u0000\u0000\u0000\u02a9\u02ae\u0005\"\u0000\u0000"+ - "\u02aa\u02ad\u0003T\"\u0000\u02ab\u02ad\u0003V#\u0000\u02ac\u02aa\u0001"+ - "\u0000\u0000\u0000\u02ac\u02ab\u0001\u0000\u0000\u0000\u02ad\u02b0\u0001"+ - "\u0000\u0000\u0000\u02ae\u02ac\u0001\u0000\u0000\u0000\u02ae\u02af\u0001"+ - "\u0000\u0000\u0000\u02af\u02b1\u0001\u0000\u0000\u0000\u02b0\u02ae\u0001"+ - "\u0000\u0000\u0000\u02b1\u02c7\u0005\"\u0000\u0000\u02b2\u02b3\u0005\""+ - "\u0000\u0000\u02b3\u02b4\u0005\"\u0000\u0000\u02b4\u02b5\u0005\"\u0000"+ - "\u0000\u02b5\u02b9\u0001\u0000\u0000\u0000\u02b6\u02b8\b\u0001\u0000\u0000"+ - "\u02b7\u02b6\u0001\u0000\u0000\u0000\u02b8\u02bb\u0001\u0000\u0000\u0000"+ - "\u02b9\u02ba\u0001\u0000\u0000\u0000\u02b9\u02b7\u0001\u0000\u0000\u0000"+ - "\u02ba\u02bc\u0001\u0000\u0000\u0000\u02bb\u02b9\u0001\u0000\u0000\u0000"+ - "\u02bc\u02bd\u0005\"\u0000\u0000\u02bd\u02be\u0005\"\u0000\u0000\u02be"+ - "\u02bf\u0005\"\u0000\u0000\u02bf\u02c1\u0001\u0000\u0000\u0000\u02c0\u02c2"+ - "\u0005\"\u0000\u0000\u02c1\u02c0\u0001\u0000\u0000\u0000\u02c1\u02c2\u0001"+ - "\u0000\u0000\u0000\u02c2\u02c4\u0001\u0000\u0000\u0000\u02c3\u02c5\u0005"+ - "\"\u0000\u0000\u02c4\u02c3\u0001\u0000\u0000\u0000\u02c4\u02c5\u0001\u0000"+ - "\u0000\u0000\u02c5\u02c7\u0001\u0000\u0000\u0000\u02c6\u02a9\u0001\u0000"+ - "\u0000\u0000\u02c6\u02b2\u0001\u0000\u0000\u0000\u02c7e\u0001\u0000\u0000"+ - "\u0000\u02c8\u02ca\u0003P \u0000\u02c9\u02c8\u0001\u0000\u0000\u0000\u02ca"+ - "\u02cb\u0001\u0000\u0000\u0000\u02cb\u02c9\u0001\u0000\u0000\u0000\u02cb"+ - "\u02cc\u0001\u0000\u0000\u0000\u02ccg\u0001\u0000\u0000\u0000\u02cd\u02cf"+ - "\u0003P \u0000\u02ce\u02cd\u0001\u0000\u0000\u0000\u02cf\u02d0\u0001\u0000"+ - "\u0000\u0000\u02d0\u02ce\u0001\u0000\u0000\u0000\u02d0\u02d1\u0001\u0000"+ - "\u0000\u0000\u02d1\u02d2\u0001\u0000\u0000\u0000\u02d2\u02d6\u0003x4\u0000"+ - "\u02d3\u02d5\u0003P \u0000\u02d4\u02d3\u0001\u0000\u0000\u0000\u02d5\u02d8"+ - "\u0001\u0000\u0000\u0000\u02d6\u02d4\u0001\u0000\u0000\u0000\u02d6\u02d7"+ - "\u0001\u0000\u0000\u0000\u02d7\u02f8\u0001\u0000\u0000\u0000\u02d8\u02d6"+ - "\u0001\u0000\u0000\u0000\u02d9\u02db\u0003x4\u0000\u02da\u02dc\u0003P"+ - " \u0000\u02db\u02da\u0001\u0000\u0000\u0000\u02dc\u02dd\u0001\u0000\u0000"+ - "\u0000\u02dd\u02db\u0001\u0000\u0000\u0000\u02dd\u02de\u0001\u0000\u0000"+ - "\u0000\u02de\u02f8\u0001\u0000\u0000\u0000\u02df\u02e1\u0003P \u0000\u02e0"+ - "\u02df\u0001\u0000\u0000\u0000\u02e1\u02e2\u0001\u0000\u0000\u0000\u02e2"+ - "\u02e0\u0001\u0000\u0000\u0000\u02e2\u02e3\u0001\u0000\u0000\u0000\u02e3"+ - "\u02eb\u0001\u0000\u0000\u0000\u02e4\u02e8\u0003x4\u0000\u02e5\u02e7\u0003"+ - "P \u0000\u02e6\u02e5\u0001\u0000\u0000\u0000\u02e7\u02ea\u0001\u0000\u0000"+ - "\u0000\u02e8\u02e6\u0001\u0000\u0000\u0000\u02e8\u02e9\u0001\u0000\u0000"+ - "\u0000\u02e9\u02ec\u0001\u0000\u0000\u0000\u02ea\u02e8\u0001\u0000\u0000"+ - "\u0000\u02eb\u02e4\u0001\u0000\u0000\u0000\u02eb\u02ec\u0001\u0000\u0000"+ - "\u0000\u02ec\u02ed\u0001\u0000\u0000\u0000\u02ed\u02ee\u0003X$\u0000\u02ee"+ - "\u02f8\u0001\u0000\u0000\u0000\u02ef\u02f1\u0003x4\u0000\u02f0\u02f2\u0003"+ - "P \u0000\u02f1\u02f0\u0001\u0000\u0000\u0000\u02f2\u02f3\u0001\u0000\u0000"+ - "\u0000\u02f3\u02f1\u0001\u0000\u0000\u0000\u02f3\u02f4\u0001\u0000\u0000"+ + "\u0000\u000f\u018c\u0001\u0000\u0000\u0000\u000f\u018e\u0001\u0000\u0000"+ + "\u0000\u000f\u0190\u0001\u0000\u0000\u0000\u000f\u0192\u0001\u0000\u0000"+ + "\u0000\u0010\u0194\u0001\u0000\u0000\u0000\u0012\u019e\u0001\u0000\u0000"+ + "\u0000\u0014\u01a5\u0001\u0000\u0000\u0000\u0016\u01ae\u0001\u0000\u0000"+ + "\u0000\u0018\u01b5\u0001\u0000\u0000\u0000\u001a\u01bf\u0001\u0000\u0000"+ + "\u0000\u001c\u01c6\u0001\u0000\u0000\u0000\u001e\u01cd\u0001\u0000\u0000"+ + "\u0000 \u01db\u0001\u0000\u0000\u0000\"\u01e2\u0001\u0000\u0000\u0000"+ + "$\u01ea\u0001\u0000\u0000\u0000&\u01f3\u0001\u0000\u0000\u0000(\u01fa"+ + "\u0001\u0000\u0000\u0000*\u0204\u0001\u0000\u0000\u0000,\u0210\u0001\u0000"+ + "\u0000\u0000.\u0219\u0001\u0000\u0000\u00000\u021f\u0001\u0000\u0000\u0000"+ + "2\u0226\u0001\u0000\u0000\u00004\u022d\u0001\u0000\u0000\u00006\u0235"+ + "\u0001\u0000\u0000\u00008\u023e\u0001\u0000\u0000\u0000:\u0244\u0001\u0000"+ + "\u0000\u0000<\u0255\u0001\u0000\u0000\u0000>\u0265\u0001\u0000\u0000\u0000"+ + "@\u026e\u0001\u0000\u0000\u0000B\u0271\u0001\u0000\u0000\u0000D\u0275"+ + "\u0001\u0000\u0000\u0000F\u027a\u0001\u0000\u0000\u0000H\u027f\u0001\u0000"+ + "\u0000\u0000J\u0283\u0001\u0000\u0000\u0000L\u0287\u0001\u0000\u0000\u0000"+ + "N\u028b\u0001\u0000\u0000\u0000P\u028f\u0001\u0000\u0000\u0000R\u0291"+ + "\u0001\u0000\u0000\u0000T\u0293\u0001\u0000\u0000\u0000V\u0296\u0001\u0000"+ + "\u0000\u0000X\u0298\u0001\u0000\u0000\u0000Z\u02a1\u0001\u0000\u0000\u0000"+ + "\\\u02a3\u0001\u0000\u0000\u0000^\u02a8\u0001\u0000\u0000\u0000`\u02aa"+ + "\u0001\u0000\u0000\u0000b\u02af\u0001\u0000\u0000\u0000d\u02ce\u0001\u0000"+ + "\u0000\u0000f\u02d1\u0001\u0000\u0000\u0000h\u02ff\u0001\u0000\u0000\u0000"+ + "j\u0301\u0001\u0000\u0000\u0000l\u0304\u0001\u0000\u0000\u0000n\u0308"+ + "\u0001\u0000\u0000\u0000p\u030c\u0001\u0000\u0000\u0000r\u030e\u0001\u0000"+ + "\u0000\u0000t\u0311\u0001\u0000\u0000\u0000v\u0313\u0001\u0000\u0000\u0000"+ + "x\u0318\u0001\u0000\u0000\u0000z\u031a\u0001\u0000\u0000\u0000|\u0320"+ + "\u0001\u0000\u0000\u0000~\u0326\u0001\u0000\u0000\u0000\u0080\u032b\u0001"+ + "\u0000\u0000\u0000\u0082\u032d\u0001\u0000\u0000\u0000\u0084\u0330\u0001"+ + "\u0000\u0000\u0000\u0086\u0333\u0001\u0000\u0000\u0000\u0088\u0338\u0001"+ + "\u0000\u0000\u0000\u008a\u033c\u0001\u0000\u0000\u0000\u008c\u0341\u0001"+ + "\u0000\u0000\u0000\u008e\u0347\u0001\u0000\u0000\u0000\u0090\u034a\u0001"+ + "\u0000\u0000\u0000\u0092\u034c\u0001\u0000\u0000\u0000\u0094\u0352\u0001"+ + "\u0000\u0000\u0000\u0096\u0354\u0001\u0000\u0000\u0000\u0098\u0359\u0001"+ + "\u0000\u0000\u0000\u009a\u035c\u0001\u0000\u0000\u0000\u009c\u035f\u0001"+ + "\u0000\u0000\u0000\u009e\u0362\u0001\u0000\u0000\u0000\u00a0\u0364\u0001"+ + "\u0000\u0000\u0000\u00a2\u0367\u0001\u0000\u0000\u0000\u00a4\u0369\u0001"+ + "\u0000\u0000\u0000\u00a6\u036c\u0001\u0000\u0000\u0000\u00a8\u036e\u0001"+ + "\u0000\u0000\u0000\u00aa\u0370\u0001\u0000\u0000\u0000\u00ac\u0372\u0001"+ + "\u0000\u0000\u0000\u00ae\u0374\u0001\u0000\u0000\u0000\u00b0\u0384\u0001"+ + "\u0000\u0000\u0000\u00b2\u0386\u0001\u0000\u0000\u0000\u00b4\u038b\u0001"+ + "\u0000\u0000\u0000\u00b6\u03a0\u0001\u0000\u0000\u0000\u00b8\u03a2\u0001"+ + "\u0000\u0000\u0000\u00ba\u03aa\u0001\u0000\u0000\u0000\u00bc\u03ac\u0001"+ + "\u0000\u0000\u0000\u00be\u03b0\u0001\u0000\u0000\u0000\u00c0\u03b4\u0001"+ + "\u0000\u0000\u0000\u00c2\u03b8\u0001\u0000\u0000\u0000\u00c4\u03bd\u0001"+ + "\u0000\u0000\u0000\u00c6\u03c1\u0001\u0000\u0000\u0000\u00c8\u03c5\u0001"+ + "\u0000\u0000\u0000\u00ca\u03c9\u0001\u0000\u0000\u0000\u00cc\u03cd\u0001"+ + "\u0000\u0000\u0000\u00ce\u03d1\u0001\u0000\u0000\u0000\u00d0\u03da\u0001"+ + "\u0000\u0000\u0000\u00d2\u03de\u0001\u0000\u0000\u0000\u00d4\u03e2\u0001"+ + "\u0000\u0000\u0000\u00d6\u03e6\u0001\u0000\u0000\u0000\u00d8\u03ea\u0001"+ + "\u0000\u0000\u0000\u00da\u03ee\u0001\u0000\u0000\u0000\u00dc\u03f3\u0001"+ + "\u0000\u0000\u0000\u00de\u03f7\u0001\u0000\u0000\u0000\u00e0\u03ff\u0001"+ + "\u0000\u0000\u0000\u00e2\u0414\u0001\u0000\u0000\u0000\u00e4\u0418\u0001"+ + "\u0000\u0000\u0000\u00e6\u041c\u0001\u0000\u0000\u0000\u00e8\u0420\u0001"+ + "\u0000\u0000\u0000\u00ea\u0424\u0001\u0000\u0000\u0000\u00ec\u0428\u0001"+ + "\u0000\u0000\u0000\u00ee\u042d\u0001\u0000\u0000\u0000\u00f0\u0431\u0001"+ + "\u0000\u0000\u0000\u00f2\u0435\u0001\u0000\u0000\u0000\u00f4\u0439\u0001"+ + "\u0000\u0000\u0000\u00f6\u043c\u0001\u0000\u0000\u0000\u00f8\u0440\u0001"+ + "\u0000\u0000\u0000\u00fa\u0444\u0001\u0000\u0000\u0000\u00fc\u0448\u0001"+ + "\u0000\u0000\u0000\u00fe\u044c\u0001\u0000\u0000\u0000\u0100\u0451\u0001"+ + "\u0000\u0000\u0000\u0102\u0456\u0001\u0000\u0000\u0000\u0104\u045b\u0001"+ + "\u0000\u0000\u0000\u0106\u0462\u0001\u0000\u0000\u0000\u0108\u046b\u0001"+ + "\u0000\u0000\u0000\u010a\u0472\u0001\u0000\u0000\u0000\u010c\u0476\u0001"+ + "\u0000\u0000\u0000\u010e\u047a\u0001\u0000\u0000\u0000\u0110\u047e\u0001"+ + "\u0000\u0000\u0000\u0112\u0482\u0001\u0000\u0000\u0000\u0114\u0488\u0001"+ + "\u0000\u0000\u0000\u0116\u048c\u0001\u0000\u0000\u0000\u0118\u0490\u0001"+ + "\u0000\u0000\u0000\u011a\u0494\u0001\u0000\u0000\u0000\u011c\u0498\u0001"+ + "\u0000\u0000\u0000\u011e\u049c\u0001\u0000\u0000\u0000\u0120\u04a0\u0001"+ + "\u0000\u0000\u0000\u0122\u04a4\u0001\u0000\u0000\u0000\u0124\u04a8\u0001"+ + "\u0000\u0000\u0000\u0126\u04ac\u0001\u0000\u0000\u0000\u0128\u04b1\u0001"+ + "\u0000\u0000\u0000\u012a\u04b5\u0001\u0000\u0000\u0000\u012c\u04b9\u0001"+ + "\u0000\u0000\u0000\u012e\u04bd\u0001\u0000\u0000\u0000\u0130\u04c2\u0001"+ + "\u0000\u0000\u0000\u0132\u04c6\u0001\u0000\u0000\u0000\u0134\u04ca\u0001"+ + "\u0000\u0000\u0000\u0136\u04ce\u0001\u0000\u0000\u0000\u0138\u04d2\u0001"+ + "\u0000\u0000\u0000\u013a\u04d6\u0001\u0000\u0000\u0000\u013c\u04dc\u0001"+ + "\u0000\u0000\u0000\u013e\u04e0\u0001\u0000\u0000\u0000\u0140\u04e4\u0001"+ + "\u0000\u0000\u0000\u0142\u04e8\u0001\u0000\u0000\u0000\u0144\u04ec\u0001"+ + "\u0000\u0000\u0000\u0146\u04f0\u0001\u0000\u0000\u0000\u0148\u04f4\u0001"+ + "\u0000\u0000\u0000\u014a\u04f9\u0001\u0000\u0000\u0000\u014c\u04fd\u0001"+ + "\u0000\u0000\u0000\u014e\u0501\u0001\u0000\u0000\u0000\u0150\u0505\u0001"+ + "\u0000\u0000\u0000\u0152\u0509\u0001\u0000\u0000\u0000\u0154\u050d\u0001"+ + "\u0000\u0000\u0000\u0156\u0511\u0001\u0000\u0000\u0000\u0158\u0516\u0001"+ + "\u0000\u0000\u0000\u015a\u051b\u0001\u0000\u0000\u0000\u015c\u051f\u0001"+ + "\u0000\u0000\u0000\u015e\u0523\u0001\u0000\u0000\u0000\u0160\u0527\u0001"+ + "\u0000\u0000\u0000\u0162\u052c\u0001\u0000\u0000\u0000\u0164\u0536\u0001"+ + "\u0000\u0000\u0000\u0166\u053a\u0001\u0000\u0000\u0000\u0168\u053e\u0001"+ + "\u0000\u0000\u0000\u016a\u0542\u0001\u0000\u0000\u0000\u016c\u0547\u0001"+ + "\u0000\u0000\u0000\u016e\u054e\u0001\u0000\u0000\u0000\u0170\u0552\u0001"+ + "\u0000\u0000\u0000\u0172\u0556\u0001\u0000\u0000\u0000\u0174\u055a\u0001"+ + "\u0000\u0000\u0000\u0176\u055e\u0001\u0000\u0000\u0000\u0178\u0563\u0001"+ + "\u0000\u0000\u0000\u017a\u0569\u0001\u0000\u0000\u0000\u017c\u056f\u0001"+ + "\u0000\u0000\u0000\u017e\u0573\u0001\u0000\u0000\u0000\u0180\u0577\u0001"+ + "\u0000\u0000\u0000\u0182\u057b\u0001\u0000\u0000\u0000\u0184\u0581\u0001"+ + "\u0000\u0000\u0000\u0186\u0587\u0001\u0000\u0000\u0000\u0188\u058b\u0001"+ + "\u0000\u0000\u0000\u018a\u058f\u0001\u0000\u0000\u0000\u018c\u0593\u0001"+ + "\u0000\u0000\u0000\u018e\u0599\u0001\u0000\u0000\u0000\u0190\u059f\u0001"+ + "\u0000\u0000\u0000\u0192\u05a5\u0001\u0000\u0000\u0000\u0194\u0195\u0005"+ + "d\u0000\u0000\u0195\u0196\u0005i\u0000\u0000\u0196\u0197\u0005s\u0000"+ + "\u0000\u0197\u0198\u0005s\u0000\u0000\u0198\u0199\u0005e\u0000\u0000\u0199"+ + "\u019a\u0005c\u0000\u0000\u019a\u019b\u0005t\u0000\u0000\u019b\u019c\u0001"+ + "\u0000\u0000\u0000\u019c\u019d\u0006\u0000\u0000\u0000\u019d\u0011\u0001"+ + "\u0000\u0000\u0000\u019e\u019f\u0005d\u0000\u0000\u019f\u01a0\u0005r\u0000"+ + "\u0000\u01a0\u01a1\u0005o\u0000\u0000\u01a1\u01a2\u0005p\u0000\u0000\u01a2"+ + "\u01a3\u0001\u0000\u0000\u0000\u01a3\u01a4\u0006\u0001\u0001\u0000\u01a4"+ + "\u0013\u0001\u0000\u0000\u0000\u01a5\u01a6\u0005e\u0000\u0000\u01a6\u01a7"+ + "\u0005n\u0000\u0000\u01a7\u01a8\u0005r\u0000\u0000\u01a8\u01a9\u0005i"+ + "\u0000\u0000\u01a9\u01aa\u0005c\u0000\u0000\u01aa\u01ab\u0005h\u0000\u0000"+ + "\u01ab\u01ac\u0001\u0000\u0000\u0000\u01ac\u01ad\u0006\u0002\u0002\u0000"+ + "\u01ad\u0015\u0001\u0000\u0000\u0000\u01ae\u01af\u0005e\u0000\u0000\u01af"+ + "\u01b0\u0005v\u0000\u0000\u01b0\u01b1\u0005a\u0000\u0000\u01b1\u01b2\u0005"+ + "l\u0000\u0000\u01b2\u01b3\u0001\u0000\u0000\u0000\u01b3\u01b4\u0006\u0003"+ + "\u0000\u0000\u01b4\u0017\u0001\u0000\u0000\u0000\u01b5\u01b6\u0005e\u0000"+ + "\u0000\u01b6\u01b7\u0005x\u0000\u0000\u01b7\u01b8\u0005p\u0000\u0000\u01b8"+ + "\u01b9\u0005l\u0000\u0000\u01b9\u01ba\u0005a\u0000\u0000\u01ba\u01bb\u0005"+ + "i\u0000\u0000\u01bb\u01bc\u0005n\u0000\u0000\u01bc\u01bd\u0001\u0000\u0000"+ + "\u0000\u01bd\u01be\u0006\u0004\u0003\u0000\u01be\u0019\u0001\u0000\u0000"+ + "\u0000\u01bf\u01c0\u0005f\u0000\u0000\u01c0\u01c1\u0005r\u0000\u0000\u01c1"+ + "\u01c2\u0005o\u0000\u0000\u01c2\u01c3\u0005m\u0000\u0000\u01c3\u01c4\u0001"+ + "\u0000\u0000\u0000\u01c4\u01c5\u0006\u0005\u0004\u0000\u01c5\u001b\u0001"+ + "\u0000\u0000\u0000\u01c6\u01c7\u0005g\u0000\u0000\u01c7\u01c8\u0005r\u0000"+ + "\u0000\u01c8\u01c9\u0005o\u0000\u0000\u01c9\u01ca\u0005k\u0000\u0000\u01ca"+ + "\u01cb\u0001\u0000\u0000\u0000\u01cb\u01cc\u0006\u0006\u0000\u0000\u01cc"+ + "\u001d\u0001\u0000\u0000\u0000\u01cd\u01ce\u0005i\u0000\u0000\u01ce\u01cf"+ + "\u0005n\u0000\u0000\u01cf\u01d0\u0005l\u0000\u0000\u01d0\u01d1\u0005i"+ + "\u0000\u0000\u01d1\u01d2\u0005n\u0000\u0000\u01d2\u01d3\u0005e\u0000\u0000"+ + "\u01d3\u01d4\u0005s\u0000\u0000\u01d4\u01d5\u0005t\u0000\u0000\u01d5\u01d6"+ + "\u0005a\u0000\u0000\u01d6\u01d7\u0005t\u0000\u0000\u01d7\u01d8\u0005s"+ + "\u0000\u0000\u01d8\u01d9\u0001\u0000\u0000\u0000\u01d9\u01da\u0006\u0007"+ + "\u0000\u0000\u01da\u001f\u0001\u0000\u0000\u0000\u01db\u01dc\u0005k\u0000"+ + "\u0000\u01dc\u01dd\u0005e\u0000\u0000\u01dd\u01de\u0005e\u0000\u0000\u01de"+ + "\u01df\u0005p\u0000\u0000\u01df\u01e0\u0001\u0000\u0000\u0000\u01e0\u01e1"+ + "\u0006\b\u0001\u0000\u01e1!\u0001\u0000\u0000\u0000\u01e2\u01e3\u0005"+ + "l\u0000\u0000\u01e3\u01e4\u0005i\u0000\u0000\u01e4\u01e5\u0005m\u0000"+ + "\u0000\u01e5\u01e6\u0005i\u0000\u0000\u01e6\u01e7\u0005t\u0000\u0000\u01e7"+ + "\u01e8\u0001\u0000\u0000\u0000\u01e8\u01e9\u0006\t\u0000\u0000\u01e9#"+ + "\u0001\u0000\u0000\u0000\u01ea\u01eb\u0005l\u0000\u0000\u01eb\u01ec\u0005"+ + "o\u0000\u0000\u01ec\u01ed\u0005o\u0000\u0000\u01ed\u01ee\u0005k\u0000"+ + "\u0000\u01ee\u01ef\u0005u\u0000\u0000\u01ef\u01f0\u0005p\u0000\u0000\u01f0"+ + "\u01f1\u0001\u0000\u0000\u0000\u01f1\u01f2\u0006\n\u0005\u0000\u01f2%"+ + "\u0001\u0000\u0000\u0000\u01f3\u01f4\u0005m\u0000\u0000\u01f4\u01f5\u0005"+ + "e\u0000\u0000\u01f5\u01f6\u0005t\u0000\u0000\u01f6\u01f7\u0005a\u0000"+ + "\u0000\u01f7\u01f8\u0001\u0000\u0000\u0000\u01f8\u01f9\u0006\u000b\u0006"+ + "\u0000\u01f9\'\u0001\u0000\u0000\u0000\u01fa\u01fb\u0005m\u0000\u0000"+ + "\u01fb\u01fc\u0005e\u0000\u0000\u01fc\u01fd\u0005t\u0000\u0000\u01fd\u01fe"+ + "\u0005r\u0000\u0000\u01fe\u01ff\u0005i\u0000\u0000\u01ff\u0200\u0005c"+ + "\u0000\u0000\u0200\u0201\u0005s\u0000\u0000\u0201\u0202\u0001\u0000\u0000"+ + "\u0000\u0202\u0203\u0006\f\u0007\u0000\u0203)\u0001\u0000\u0000\u0000"+ + "\u0204\u0205\u0005m\u0000\u0000\u0205\u0206\u0005v\u0000\u0000\u0206\u0207"+ + "\u0005_\u0000\u0000\u0207\u0208\u0005e\u0000\u0000\u0208\u0209\u0005x"+ + "\u0000\u0000\u0209\u020a\u0005p\u0000\u0000\u020a\u020b\u0005a\u0000\u0000"+ + "\u020b\u020c\u0005n\u0000\u0000\u020c\u020d\u0005d\u0000\u0000\u020d\u020e"+ + "\u0001\u0000\u0000\u0000\u020e\u020f\u0006\r\b\u0000\u020f+\u0001\u0000"+ + "\u0000\u0000\u0210\u0211\u0005r\u0000\u0000\u0211\u0212\u0005e\u0000\u0000"+ + "\u0212\u0213\u0005n\u0000\u0000\u0213\u0214\u0005a\u0000\u0000\u0214\u0215"+ + "\u0005m\u0000\u0000\u0215\u0216\u0005e\u0000\u0000\u0216\u0217\u0001\u0000"+ + "\u0000\u0000\u0217\u0218\u0006\u000e\t\u0000\u0218-\u0001\u0000\u0000"+ + "\u0000\u0219\u021a\u0005r\u0000\u0000\u021a\u021b\u0005o\u0000\u0000\u021b"+ + "\u021c\u0005w\u0000\u0000\u021c\u021d\u0001\u0000\u0000\u0000\u021d\u021e"+ + "\u0006\u000f\u0000\u0000\u021e/\u0001\u0000\u0000\u0000\u021f\u0220\u0005"+ + "s\u0000\u0000\u0220\u0221\u0005h\u0000\u0000\u0221\u0222\u0005o\u0000"+ + "\u0000\u0222\u0223\u0005w\u0000\u0000\u0223\u0224\u0001\u0000\u0000\u0000"+ + "\u0224\u0225\u0006\u0010\n\u0000\u02251\u0001\u0000\u0000\u0000\u0226"+ + "\u0227\u0005s\u0000\u0000\u0227\u0228\u0005o\u0000\u0000\u0228\u0229\u0005"+ + "r\u0000\u0000\u0229\u022a\u0005t\u0000\u0000\u022a\u022b\u0001\u0000\u0000"+ + "\u0000\u022b\u022c\u0006\u0011\u0000\u0000\u022c3\u0001\u0000\u0000\u0000"+ + "\u022d\u022e\u0005s\u0000\u0000\u022e\u022f\u0005t\u0000\u0000\u022f\u0230"+ + "\u0005a\u0000\u0000\u0230\u0231\u0005t\u0000\u0000\u0231\u0232\u0005s"+ + "\u0000\u0000\u0232\u0233\u0001\u0000\u0000\u0000\u0233\u0234\u0006\u0012"+ + "\u0000\u0000\u02345\u0001\u0000\u0000\u0000\u0235\u0236\u0005w\u0000\u0000"+ + "\u0236\u0237\u0005h\u0000\u0000\u0237\u0238\u0005e\u0000\u0000\u0238\u0239"+ + "\u0005r\u0000\u0000\u0239\u023a\u0005e\u0000\u0000\u023a\u023b\u0001\u0000"+ + "\u0000\u0000\u023b\u023c\u0006\u0013\u0000\u0000\u023c7\u0001\u0000\u0000"+ + "\u0000\u023d\u023f\b\u0000\u0000\u0000\u023e\u023d\u0001\u0000\u0000\u0000"+ + "\u023f\u0240\u0001\u0000\u0000\u0000\u0240\u023e\u0001\u0000\u0000\u0000"+ + "\u0240\u0241\u0001\u0000\u0000\u0000\u0241\u0242\u0001\u0000\u0000\u0000"+ + "\u0242\u0243\u0006\u0014\u0000\u0000\u02439\u0001\u0000\u0000\u0000\u0244"+ + "\u0245\u0005/\u0000\u0000\u0245\u0246\u0005/\u0000\u0000\u0246\u024a\u0001"+ + "\u0000\u0000\u0000\u0247\u0249\b\u0001\u0000\u0000\u0248\u0247\u0001\u0000"+ + "\u0000\u0000\u0249\u024c\u0001\u0000\u0000\u0000\u024a\u0248\u0001\u0000"+ + "\u0000\u0000\u024a\u024b\u0001\u0000\u0000\u0000\u024b\u024e\u0001\u0000"+ + "\u0000\u0000\u024c\u024a\u0001\u0000\u0000\u0000\u024d\u024f\u0005\r\u0000"+ + "\u0000\u024e\u024d\u0001\u0000\u0000\u0000\u024e\u024f\u0001\u0000\u0000"+ + "\u0000\u024f\u0251\u0001\u0000\u0000\u0000\u0250\u0252\u0005\n\u0000\u0000"+ + "\u0251\u0250\u0001\u0000\u0000\u0000\u0251\u0252\u0001\u0000\u0000\u0000"+ + "\u0252\u0253\u0001\u0000\u0000\u0000\u0253\u0254\u0006\u0015\u000b\u0000"+ + "\u0254;\u0001\u0000\u0000\u0000\u0255\u0256\u0005/\u0000\u0000\u0256\u0257"+ + "\u0005*\u0000\u0000\u0257\u025c\u0001\u0000\u0000\u0000\u0258\u025b\u0003"+ + "<\u0016\u0000\u0259\u025b\t\u0000\u0000\u0000\u025a\u0258\u0001\u0000"+ + "\u0000\u0000\u025a\u0259\u0001\u0000\u0000\u0000\u025b\u025e\u0001\u0000"+ + "\u0000\u0000\u025c\u025d\u0001\u0000\u0000\u0000\u025c\u025a\u0001\u0000"+ + "\u0000\u0000\u025d\u025f\u0001\u0000\u0000\u0000\u025e\u025c\u0001\u0000"+ + "\u0000\u0000\u025f\u0260\u0005*\u0000\u0000\u0260\u0261\u0005/\u0000\u0000"+ + "\u0261\u0262\u0001\u0000\u0000\u0000\u0262\u0263\u0006\u0016\u000b\u0000"+ + "\u0263=\u0001\u0000\u0000\u0000\u0264\u0266\u0007\u0002\u0000\u0000\u0265"+ + "\u0264\u0001\u0000\u0000\u0000\u0266\u0267\u0001\u0000\u0000\u0000\u0267"+ + "\u0265\u0001\u0000\u0000\u0000\u0267\u0268\u0001\u0000\u0000\u0000\u0268"+ + "\u0269\u0001\u0000\u0000\u0000\u0269\u026a\u0006\u0017\u000b\u0000\u026a"+ + "?\u0001\u0000\u0000\u0000\u026b\u026f\b\u0003\u0000\u0000\u026c\u026d"+ + "\u0005/\u0000\u0000\u026d\u026f\b\u0004\u0000\u0000\u026e\u026b\u0001"+ + "\u0000\u0000\u0000\u026e\u026c\u0001\u0000\u0000\u0000\u026fA\u0001\u0000"+ + "\u0000\u0000\u0270\u0272\u0003@\u0018\u0000\u0271\u0270\u0001\u0000\u0000"+ + "\u0000\u0272\u0273\u0001\u0000\u0000\u0000\u0273\u0271\u0001\u0000\u0000"+ + "\u0000\u0273\u0274\u0001\u0000\u0000\u0000\u0274C\u0001\u0000\u0000\u0000"+ + "\u0275\u0276\u0003\u00b2Q\u0000\u0276\u0277\u0001\u0000\u0000\u0000\u0277"+ + "\u0278\u0006\u001a\f\u0000\u0278\u0279\u0006\u001a\r\u0000\u0279E\u0001"+ + "\u0000\u0000\u0000\u027a\u027b\u0003N\u001f\u0000\u027b\u027c\u0001\u0000"+ + "\u0000\u0000\u027c\u027d\u0006\u001b\u000e\u0000\u027d\u027e\u0006\u001b"+ + "\u000f\u0000\u027eG\u0001\u0000\u0000\u0000\u027f\u0280\u0003>\u0017\u0000"+ + "\u0280\u0281\u0001\u0000\u0000\u0000\u0281\u0282\u0006\u001c\u000b\u0000"+ + "\u0282I\u0001\u0000\u0000\u0000\u0283\u0284\u0003:\u0015\u0000\u0284\u0285"+ + "\u0001\u0000\u0000\u0000\u0285\u0286\u0006\u001d\u000b\u0000\u0286K\u0001"+ + "\u0000\u0000\u0000\u0287\u0288\u0003<\u0016\u0000\u0288\u0289\u0001\u0000"+ + "\u0000\u0000\u0289\u028a\u0006\u001e\u000b\u0000\u028aM\u0001\u0000\u0000"+ + "\u0000\u028b\u028c\u0005|\u0000\u0000\u028c\u028d\u0001\u0000\u0000\u0000"+ + "\u028d\u028e\u0006\u001f\u000f\u0000\u028eO\u0001\u0000\u0000\u0000\u028f"+ + "\u0290\u0007\u0005\u0000\u0000\u0290Q\u0001\u0000\u0000\u0000\u0291\u0292"+ + "\u0007\u0006\u0000\u0000\u0292S\u0001\u0000\u0000\u0000\u0293\u0294\u0005"+ + "\\\u0000\u0000\u0294\u0295\u0007\u0007\u0000\u0000\u0295U\u0001\u0000"+ + "\u0000\u0000\u0296\u0297\b\b\u0000\u0000\u0297W\u0001\u0000\u0000\u0000"+ + "\u0298\u029a\u0007\t\u0000\u0000\u0299\u029b\u0007\n\u0000\u0000\u029a"+ + "\u0299\u0001\u0000\u0000\u0000\u029a\u029b\u0001\u0000\u0000\u0000\u029b"+ + "\u029d\u0001\u0000\u0000\u0000\u029c\u029e\u0003P \u0000\u029d\u029c\u0001"+ + "\u0000\u0000\u0000\u029e\u029f\u0001\u0000\u0000\u0000\u029f\u029d\u0001"+ + "\u0000\u0000\u0000\u029f\u02a0\u0001\u0000\u0000\u0000\u02a0Y\u0001\u0000"+ + "\u0000\u0000\u02a1\u02a2\u0005@\u0000\u0000\u02a2[\u0001\u0000\u0000\u0000"+ + "\u02a3\u02a4\u0005`\u0000\u0000\u02a4]\u0001\u0000\u0000\u0000\u02a5\u02a9"+ + "\b\u000b\u0000\u0000\u02a6\u02a7\u0005`\u0000\u0000\u02a7\u02a9\u0005"+ + "`\u0000\u0000\u02a8\u02a5\u0001\u0000\u0000\u0000\u02a8\u02a6\u0001\u0000"+ + "\u0000\u0000\u02a9_\u0001\u0000\u0000\u0000\u02aa\u02ab\u0005_\u0000\u0000"+ + "\u02aba\u0001\u0000\u0000\u0000\u02ac\u02b0\u0003R!\u0000\u02ad\u02b0"+ + "\u0003P \u0000\u02ae\u02b0\u0003`(\u0000\u02af\u02ac\u0001\u0000\u0000"+ + "\u0000\u02af\u02ad\u0001\u0000\u0000\u0000\u02af\u02ae\u0001\u0000\u0000"+ + "\u0000\u02b0c\u0001\u0000\u0000\u0000\u02b1\u02b6\u0005\"\u0000\u0000"+ + "\u02b2\u02b5\u0003T\"\u0000\u02b3\u02b5\u0003V#\u0000\u02b4\u02b2\u0001"+ + "\u0000\u0000\u0000\u02b4\u02b3\u0001\u0000\u0000\u0000\u02b5\u02b8\u0001"+ + "\u0000\u0000\u0000\u02b6\u02b4\u0001\u0000\u0000\u0000\u02b6\u02b7\u0001"+ + "\u0000\u0000\u0000\u02b7\u02b9\u0001\u0000\u0000\u0000\u02b8\u02b6\u0001"+ + "\u0000\u0000\u0000\u02b9\u02cf\u0005\"\u0000\u0000\u02ba\u02bb\u0005\""+ + "\u0000\u0000\u02bb\u02bc\u0005\"\u0000\u0000\u02bc\u02bd\u0005\"\u0000"+ + "\u0000\u02bd\u02c1\u0001\u0000\u0000\u0000\u02be\u02c0\b\u0001\u0000\u0000"+ + "\u02bf\u02be\u0001\u0000\u0000\u0000\u02c0\u02c3\u0001\u0000\u0000\u0000"+ + "\u02c1\u02c2\u0001\u0000\u0000\u0000\u02c1\u02bf\u0001\u0000\u0000\u0000"+ + "\u02c2\u02c4\u0001\u0000\u0000\u0000\u02c3\u02c1\u0001\u0000\u0000\u0000"+ + "\u02c4\u02c5\u0005\"\u0000\u0000\u02c5\u02c6\u0005\"\u0000\u0000\u02c6"+ + "\u02c7\u0005\"\u0000\u0000\u02c7\u02c9\u0001\u0000\u0000\u0000\u02c8\u02ca"+ + "\u0005\"\u0000\u0000\u02c9\u02c8\u0001\u0000\u0000\u0000\u02c9\u02ca\u0001"+ + "\u0000\u0000\u0000\u02ca\u02cc\u0001\u0000\u0000\u0000\u02cb\u02cd\u0005"+ + "\"\u0000\u0000\u02cc\u02cb\u0001\u0000\u0000\u0000\u02cc\u02cd\u0001\u0000"+ + "\u0000\u0000\u02cd\u02cf\u0001\u0000\u0000\u0000\u02ce\u02b1\u0001\u0000"+ + "\u0000\u0000\u02ce\u02ba\u0001\u0000\u0000\u0000\u02cfe\u0001\u0000\u0000"+ + "\u0000\u02d0\u02d2\u0003P \u0000\u02d1\u02d0\u0001\u0000\u0000\u0000\u02d2"+ + "\u02d3\u0001\u0000\u0000\u0000\u02d3\u02d1\u0001\u0000\u0000\u0000\u02d3"+ + "\u02d4\u0001\u0000\u0000\u0000\u02d4g\u0001\u0000\u0000\u0000\u02d5\u02d7"+ + "\u0003P \u0000\u02d6\u02d5\u0001\u0000\u0000\u0000\u02d7\u02d8\u0001\u0000"+ + "\u0000\u0000\u02d8\u02d6\u0001\u0000\u0000\u0000\u02d8\u02d9\u0001\u0000"+ + "\u0000\u0000\u02d9\u02da\u0001\u0000\u0000\u0000\u02da\u02de\u0003x4\u0000"+ + "\u02db\u02dd\u0003P \u0000\u02dc\u02db\u0001\u0000\u0000\u0000\u02dd\u02e0"+ + "\u0001\u0000\u0000\u0000\u02de\u02dc\u0001\u0000\u0000\u0000\u02de\u02df"+ + "\u0001\u0000\u0000\u0000\u02df\u0300\u0001\u0000\u0000\u0000\u02e0\u02de"+ + "\u0001\u0000\u0000\u0000\u02e1\u02e3\u0003x4\u0000\u02e2\u02e4\u0003P"+ + " \u0000\u02e3\u02e2\u0001\u0000\u0000\u0000\u02e4\u02e5\u0001\u0000\u0000"+ + "\u0000\u02e5\u02e3\u0001\u0000\u0000\u0000\u02e5\u02e6\u0001\u0000\u0000"+ + "\u0000\u02e6\u0300\u0001\u0000\u0000\u0000\u02e7\u02e9\u0003P \u0000\u02e8"+ + "\u02e7\u0001\u0000\u0000\u0000\u02e9\u02ea\u0001\u0000\u0000\u0000\u02ea"+ + "\u02e8\u0001\u0000\u0000\u0000\u02ea\u02eb\u0001\u0000\u0000\u0000\u02eb"+ + "\u02f3\u0001\u0000\u0000\u0000\u02ec\u02f0\u0003x4\u0000\u02ed\u02ef\u0003"+ + "P \u0000\u02ee\u02ed\u0001\u0000\u0000\u0000\u02ef\u02f2\u0001\u0000\u0000"+ + "\u0000\u02f0\u02ee\u0001\u0000\u0000\u0000\u02f0\u02f1\u0001\u0000\u0000"+ + "\u0000\u02f1\u02f4\u0001\u0000\u0000\u0000\u02f2\u02f0\u0001\u0000\u0000"+ + "\u0000\u02f3\u02ec\u0001\u0000\u0000\u0000\u02f3\u02f4\u0001\u0000\u0000"+ "\u0000\u02f4\u02f5\u0001\u0000\u0000\u0000\u02f5\u02f6\u0003X$\u0000\u02f6"+ - "\u02f8\u0001\u0000\u0000\u0000\u02f7\u02ce\u0001\u0000\u0000\u0000\u02f7"+ - "\u02d9\u0001\u0000\u0000\u0000\u02f7\u02e0\u0001\u0000\u0000\u0000\u02f7"+ - "\u02ef\u0001\u0000\u0000\u0000\u02f8i\u0001\u0000\u0000\u0000\u02f9\u02fa"+ - "\u0005b\u0000\u0000\u02fa\u02fb\u0005y\u0000\u0000\u02fbk\u0001\u0000"+ - "\u0000\u0000\u02fc\u02fd\u0005a\u0000\u0000\u02fd\u02fe\u0005n\u0000\u0000"+ - "\u02fe\u02ff\u0005d\u0000\u0000\u02ffm\u0001\u0000\u0000\u0000\u0300\u0301"+ - "\u0005a\u0000\u0000\u0301\u0302\u0005s\u0000\u0000\u0302\u0303\u0005c"+ - "\u0000\u0000\u0303o\u0001\u0000\u0000\u0000\u0304\u0305\u0005=\u0000\u0000"+ - "\u0305q\u0001\u0000\u0000\u0000\u0306\u0307\u0005:\u0000\u0000\u0307\u0308"+ - "\u0005:\u0000\u0000\u0308s\u0001\u0000\u0000\u0000\u0309\u030a\u0005,"+ - "\u0000\u0000\u030au\u0001\u0000\u0000\u0000\u030b\u030c\u0005d\u0000\u0000"+ - "\u030c\u030d\u0005e\u0000\u0000\u030d\u030e\u0005s\u0000\u0000\u030e\u030f"+ - "\u0005c\u0000\u0000\u030fw\u0001\u0000\u0000\u0000\u0310\u0311\u0005."+ - "\u0000\u0000\u0311y\u0001\u0000\u0000\u0000\u0312\u0313\u0005f\u0000\u0000"+ - "\u0313\u0314\u0005a\u0000\u0000\u0314\u0315\u0005l\u0000\u0000\u0315\u0316"+ - "\u0005s\u0000\u0000\u0316\u0317\u0005e\u0000\u0000\u0317{\u0001\u0000"+ - "\u0000\u0000\u0318\u0319\u0005f\u0000\u0000\u0319\u031a\u0005i\u0000\u0000"+ - "\u031a\u031b\u0005r\u0000\u0000\u031b\u031c\u0005s\u0000\u0000\u031c\u031d"+ - "\u0005t\u0000\u0000\u031d}\u0001\u0000\u0000\u0000\u031e\u031f\u0005l"+ - "\u0000\u0000\u031f\u0320\u0005a\u0000\u0000\u0320\u0321\u0005s\u0000\u0000"+ - "\u0321\u0322\u0005t\u0000\u0000\u0322\u007f\u0001\u0000\u0000\u0000\u0323"+ - "\u0324\u0005(\u0000\u0000\u0324\u0081\u0001\u0000\u0000\u0000\u0325\u0326"+ - "\u0005i\u0000\u0000\u0326\u0327\u0005n\u0000\u0000\u0327\u0083\u0001\u0000"+ - "\u0000\u0000\u0328\u0329\u0005i\u0000\u0000\u0329\u032a\u0005s\u0000\u0000"+ - "\u032a\u0085\u0001\u0000\u0000\u0000\u032b\u032c\u0005l\u0000\u0000\u032c"+ - "\u032d\u0005i\u0000\u0000\u032d\u032e\u0005k\u0000\u0000\u032e\u032f\u0005"+ - "e\u0000\u0000\u032f\u0087\u0001\u0000\u0000\u0000\u0330\u0331\u0005n\u0000"+ - "\u0000\u0331\u0332\u0005o\u0000\u0000\u0332\u0333\u0005t\u0000\u0000\u0333"+ - "\u0089\u0001\u0000\u0000\u0000\u0334\u0335\u0005n\u0000\u0000\u0335\u0336"+ - "\u0005u\u0000\u0000\u0336\u0337\u0005l\u0000\u0000\u0337\u0338\u0005l"+ - "\u0000\u0000\u0338\u008b\u0001\u0000\u0000\u0000\u0339\u033a\u0005n\u0000"+ - "\u0000\u033a\u033b\u0005u\u0000\u0000\u033b\u033c\u0005l\u0000\u0000\u033c"+ - "\u033d\u0005l\u0000\u0000\u033d\u033e\u0005s\u0000\u0000\u033e\u008d\u0001"+ - "\u0000\u0000\u0000\u033f\u0340\u0005o\u0000\u0000\u0340\u0341\u0005r\u0000"+ - "\u0000\u0341\u008f\u0001\u0000\u0000\u0000\u0342\u0343\u0005?\u0000\u0000"+ - "\u0343\u0091\u0001\u0000\u0000\u0000\u0344\u0345\u0005r\u0000\u0000\u0345"+ - "\u0346\u0005l\u0000\u0000\u0346\u0347\u0005i\u0000\u0000\u0347\u0348\u0005"+ - "k\u0000\u0000\u0348\u0349\u0005e\u0000\u0000\u0349\u0093\u0001\u0000\u0000"+ - "\u0000\u034a\u034b\u0005)\u0000\u0000\u034b\u0095\u0001\u0000\u0000\u0000"+ - "\u034c\u034d\u0005t\u0000\u0000\u034d\u034e\u0005r\u0000\u0000\u034e\u034f"+ - "\u0005u\u0000\u0000\u034f\u0350\u0005e\u0000\u0000\u0350\u0097\u0001\u0000"+ - "\u0000\u0000\u0351\u0352\u0005=\u0000\u0000\u0352\u0353\u0005=\u0000\u0000"+ - "\u0353\u0099\u0001\u0000\u0000\u0000\u0354\u0355\u0005=\u0000\u0000\u0355"+ - "\u0356\u0005~\u0000\u0000\u0356\u009b\u0001\u0000\u0000\u0000\u0357\u0358"+ - "\u0005!\u0000\u0000\u0358\u0359\u0005=\u0000\u0000\u0359\u009d\u0001\u0000"+ - "\u0000\u0000\u035a\u035b\u0005<\u0000\u0000\u035b\u009f\u0001\u0000\u0000"+ - "\u0000\u035c\u035d\u0005<\u0000\u0000\u035d\u035e\u0005=\u0000\u0000\u035e"+ - "\u00a1\u0001\u0000\u0000\u0000\u035f\u0360\u0005>\u0000\u0000\u0360\u00a3"+ - "\u0001\u0000\u0000\u0000\u0361\u0362\u0005>\u0000\u0000\u0362\u0363\u0005"+ - "=\u0000\u0000\u0363\u00a5\u0001\u0000\u0000\u0000\u0364\u0365\u0005+\u0000"+ - "\u0000\u0365\u00a7\u0001\u0000\u0000\u0000\u0366\u0367\u0005-\u0000\u0000"+ - "\u0367\u00a9\u0001\u0000\u0000\u0000\u0368\u0369\u0005*\u0000\u0000\u0369"+ - "\u00ab\u0001\u0000\u0000\u0000\u036a\u036b\u0005/\u0000\u0000\u036b\u00ad"+ - "\u0001\u0000\u0000\u0000\u036c\u036d\u0005%\u0000\u0000\u036d\u00af\u0001"+ - "\u0000\u0000\u0000\u036e\u036f\u0003\u0090@\u0000\u036f\u0373\u0003R!"+ - "\u0000\u0370\u0372\u0003b)\u0000\u0371\u0370\u0001\u0000\u0000\u0000\u0372"+ - "\u0375\u0001\u0000\u0000\u0000\u0373\u0371\u0001\u0000\u0000\u0000\u0373"+ - "\u0374\u0001\u0000\u0000\u0000\u0374\u037d\u0001\u0000\u0000\u0000\u0375"+ - "\u0373\u0001\u0000\u0000\u0000\u0376\u0378\u0003\u0090@\u0000\u0377\u0379"+ - "\u0003P \u0000\u0378\u0377\u0001\u0000\u0000\u0000\u0379\u037a\u0001\u0000"+ - "\u0000\u0000\u037a\u0378\u0001\u0000\u0000\u0000\u037a\u037b\u0001\u0000"+ - "\u0000\u0000\u037b\u037d\u0001\u0000\u0000\u0000\u037c\u036e\u0001\u0000"+ - "\u0000\u0000\u037c\u0376\u0001\u0000\u0000\u0000\u037d\u00b1\u0001\u0000"+ - "\u0000\u0000\u037e\u037f\u0005[\u0000\u0000\u037f\u0380\u0001\u0000\u0000"+ - "\u0000\u0380\u0381\u0006Q\u0000\u0000\u0381\u0382\u0006Q\u0000\u0000\u0382"+ - "\u00b3\u0001\u0000\u0000\u0000\u0383\u0384\u0005]\u0000\u0000\u0384\u0385"+ - "\u0001\u0000\u0000\u0000\u0385\u0386\u0006R\u000f\u0000\u0386\u0387\u0006"+ - "R\u000f\u0000\u0387\u00b5\u0001\u0000\u0000\u0000\u0388\u038c\u0003R!"+ - "\u0000\u0389\u038b\u0003b)\u0000\u038a\u0389\u0001\u0000\u0000\u0000\u038b"+ - "\u038e\u0001\u0000\u0000\u0000\u038c\u038a\u0001\u0000\u0000\u0000\u038c"+ - "\u038d\u0001\u0000\u0000\u0000\u038d\u0399\u0001\u0000\u0000\u0000\u038e"+ - "\u038c\u0001\u0000\u0000\u0000\u038f\u0392\u0003`(\u0000\u0390\u0392\u0003"+ - "Z%\u0000\u0391\u038f\u0001\u0000\u0000\u0000\u0391\u0390\u0001\u0000\u0000"+ - "\u0000\u0392\u0394\u0001\u0000\u0000\u0000\u0393\u0395\u0003b)\u0000\u0394"+ - "\u0393\u0001\u0000\u0000\u0000\u0395\u0396\u0001\u0000\u0000\u0000\u0396"+ - "\u0394\u0001\u0000\u0000\u0000\u0396\u0397\u0001\u0000\u0000\u0000\u0397"+ - "\u0399\u0001\u0000\u0000\u0000\u0398\u0388\u0001\u0000\u0000\u0000\u0398"+ - "\u0391\u0001\u0000\u0000\u0000\u0399\u00b7\u0001\u0000\u0000\u0000\u039a"+ - "\u039c\u0003\\&\u0000\u039b\u039d\u0003^\'\u0000\u039c\u039b\u0001\u0000"+ - "\u0000\u0000\u039d\u039e\u0001\u0000\u0000\u0000\u039e\u039c\u0001\u0000"+ - "\u0000\u0000\u039e\u039f\u0001\u0000\u0000\u0000\u039f\u03a0\u0001\u0000"+ - "\u0000\u0000\u03a0\u03a1\u0003\\&\u0000\u03a1\u00b9\u0001\u0000\u0000"+ - "\u0000\u03a2\u03a3\u0003\u00b8T\u0000\u03a3\u00bb\u0001\u0000\u0000\u0000"+ - "\u03a4\u03a5\u0003:\u0015\u0000\u03a5\u03a6\u0001\u0000\u0000\u0000\u03a6"+ - "\u03a7\u0006V\u000b\u0000\u03a7\u00bd\u0001\u0000\u0000\u0000\u03a8\u03a9"+ - "\u0003<\u0016\u0000\u03a9\u03aa\u0001\u0000\u0000\u0000\u03aa\u03ab\u0006"+ - "W\u000b\u0000\u03ab\u00bf\u0001\u0000\u0000\u0000\u03ac\u03ad\u0003>\u0017"+ - "\u0000\u03ad\u03ae\u0001\u0000\u0000\u0000\u03ae\u03af\u0006X\u000b\u0000"+ - "\u03af\u00c1\u0001\u0000\u0000\u0000\u03b0\u03b1\u0003N\u001f\u0000\u03b1"+ - "\u03b2\u0001\u0000\u0000\u0000\u03b2\u03b3\u0006Y\u000e\u0000\u03b3\u03b4"+ - "\u0006Y\u000f\u0000\u03b4\u00c3\u0001\u0000\u0000\u0000\u03b5\u03b6\u0003"+ - "\u00b2Q\u0000\u03b6\u03b7\u0001\u0000\u0000\u0000\u03b7\u03b8\u0006Z\f"+ - "\u0000\u03b8\u00c5\u0001\u0000\u0000\u0000\u03b9\u03ba\u0003\u00b4R\u0000"+ - "\u03ba\u03bb\u0001\u0000\u0000\u0000\u03bb\u03bc\u0006[\u0010\u0000\u03bc"+ - "\u00c7\u0001\u0000\u0000\u0000\u03bd\u03be\u0003t2\u0000\u03be\u03bf\u0001"+ - "\u0000\u0000\u0000\u03bf\u03c0\u0006\\\u0011\u0000\u03c0\u00c9\u0001\u0000"+ - "\u0000\u0000\u03c1\u03c2\u0003p0\u0000\u03c2\u03c3\u0001\u0000\u0000\u0000"+ - "\u03c3\u03c4\u0006]\u0012\u0000\u03c4\u00cb\u0001\u0000\u0000\u0000\u03c5"+ - "\u03c6\u0003d*\u0000\u03c6\u03c7\u0001\u0000\u0000\u0000\u03c7\u03c8\u0006"+ - "^\u0013\u0000\u03c8\u00cd\u0001\u0000\u0000\u0000\u03c9\u03ca\u0005m\u0000"+ - "\u0000\u03ca\u03cb\u0005e\u0000\u0000\u03cb\u03cc\u0005t\u0000\u0000\u03cc"+ - "\u03cd\u0005a\u0000\u0000\u03cd\u03ce\u0005d\u0000\u0000\u03ce\u03cf\u0005"+ - "a\u0000\u0000\u03cf\u03d0\u0005t\u0000\u0000\u03d0\u03d1\u0005a\u0000"+ - "\u0000\u03d1\u00cf\u0001\u0000\u0000\u0000\u03d2\u03d3\u0003B\u0019\u0000"+ - "\u03d3\u03d4\u0001\u0000\u0000\u0000\u03d4\u03d5\u0006`\u0014\u0000\u03d5"+ - "\u00d1\u0001\u0000\u0000\u0000\u03d6\u03d7\u0003:\u0015\u0000\u03d7\u03d8"+ - "\u0001\u0000\u0000\u0000\u03d8\u03d9\u0006a\u000b\u0000\u03d9\u00d3\u0001"+ - "\u0000\u0000\u0000\u03da\u03db\u0003<\u0016\u0000\u03db\u03dc\u0001\u0000"+ - "\u0000\u0000\u03dc\u03dd\u0006b\u000b\u0000\u03dd\u00d5\u0001\u0000\u0000"+ - "\u0000\u03de\u03df\u0003>\u0017\u0000\u03df\u03e0\u0001\u0000\u0000\u0000"+ - "\u03e0\u03e1\u0006c\u000b\u0000\u03e1\u00d7\u0001\u0000\u0000\u0000\u03e2"+ - "\u03e3\u0003N\u001f\u0000\u03e3\u03e4\u0001\u0000\u0000\u0000\u03e4\u03e5"+ - "\u0006d\u000e\u0000\u03e5\u03e6\u0006d\u000f\u0000\u03e6\u00d9\u0001\u0000"+ - "\u0000\u0000\u03e7\u03e8\u0003x4\u0000\u03e8\u03e9\u0001\u0000\u0000\u0000"+ - "\u03e9\u03ea\u0006e\u0015\u0000\u03ea\u00db\u0001\u0000\u0000\u0000\u03eb"+ - "\u03ec\u0003t2\u0000\u03ec\u03ed\u0001\u0000\u0000\u0000\u03ed\u03ee\u0006"+ - "f\u0011\u0000\u03ee\u00dd\u0001\u0000\u0000\u0000\u03ef\u03f4\u0003R!"+ - "\u0000\u03f0\u03f4\u0003P \u0000\u03f1\u03f4\u0003`(\u0000\u03f2\u03f4"+ - "\u0003\u00aaM\u0000\u03f3\u03ef\u0001\u0000\u0000\u0000\u03f3\u03f0\u0001"+ - "\u0000\u0000\u0000\u03f3\u03f1\u0001\u0000\u0000\u0000\u03f3\u03f2\u0001"+ - "\u0000\u0000\u0000\u03f4\u00df\u0001\u0000\u0000\u0000\u03f5\u03f8\u0003"+ - "R!\u0000\u03f6\u03f8\u0003\u00aaM\u0000\u03f7\u03f5\u0001\u0000\u0000"+ - "\u0000\u03f7\u03f6\u0001\u0000\u0000\u0000\u03f8\u03fc\u0001\u0000\u0000"+ - "\u0000\u03f9\u03fb\u0003\u00deg\u0000\u03fa\u03f9\u0001\u0000\u0000\u0000"+ - "\u03fb\u03fe\u0001\u0000\u0000\u0000\u03fc\u03fa\u0001\u0000\u0000\u0000"+ - "\u03fc\u03fd\u0001\u0000\u0000\u0000\u03fd\u0409\u0001\u0000\u0000\u0000"+ - "\u03fe\u03fc\u0001\u0000\u0000\u0000\u03ff\u0402\u0003`(\u0000\u0400\u0402"+ - "\u0003Z%\u0000\u0401\u03ff\u0001\u0000\u0000\u0000\u0401\u0400\u0001\u0000"+ - "\u0000\u0000\u0402\u0404\u0001\u0000\u0000\u0000\u0403\u0405\u0003\u00de"+ - "g\u0000\u0404\u0403\u0001\u0000\u0000\u0000\u0405\u0406\u0001\u0000\u0000"+ - "\u0000\u0406\u0404\u0001\u0000\u0000\u0000\u0406\u0407\u0001\u0000\u0000"+ - "\u0000\u0407\u0409\u0001\u0000\u0000\u0000\u0408\u03f7\u0001\u0000\u0000"+ - "\u0000\u0408\u0401\u0001\u0000\u0000\u0000\u0409\u00e1\u0001\u0000\u0000"+ - "\u0000\u040a\u040d\u0003\u00e0h\u0000\u040b\u040d\u0003\u00b8T\u0000\u040c"+ - "\u040a\u0001\u0000\u0000\u0000\u040c\u040b\u0001\u0000\u0000\u0000\u040d"+ - "\u040e\u0001\u0000\u0000\u0000\u040e\u040c\u0001\u0000\u0000\u0000\u040e"+ - "\u040f\u0001\u0000\u0000\u0000\u040f\u00e3\u0001\u0000\u0000\u0000\u0410"+ - "\u0411\u0003:\u0015\u0000\u0411\u0412\u0001\u0000\u0000\u0000\u0412\u0413"+ - "\u0006j\u000b\u0000\u0413\u00e5\u0001\u0000\u0000\u0000\u0414\u0415\u0003"+ - "<\u0016\u0000\u0415\u0416\u0001\u0000\u0000\u0000\u0416\u0417\u0006k\u000b"+ - "\u0000\u0417\u00e7\u0001\u0000\u0000\u0000\u0418\u0419\u0003>\u0017\u0000"+ - "\u0419\u041a\u0001\u0000\u0000\u0000\u041a\u041b\u0006l\u000b\u0000\u041b"+ - "\u00e9\u0001\u0000\u0000\u0000\u041c\u041d\u0003N\u001f\u0000\u041d\u041e"+ - "\u0001\u0000\u0000\u0000\u041e\u041f\u0006m\u000e\u0000\u041f\u0420\u0006"+ - "m\u000f\u0000\u0420\u00eb\u0001\u0000\u0000\u0000\u0421\u0422\u0003p0"+ - "\u0000\u0422\u0423\u0001\u0000\u0000\u0000\u0423\u0424\u0006n\u0012\u0000"+ - "\u0424\u00ed\u0001\u0000\u0000\u0000\u0425\u0426\u0003t2\u0000\u0426\u0427"+ - "\u0001\u0000\u0000\u0000\u0427\u0428\u0006o\u0011\u0000\u0428\u00ef\u0001"+ - "\u0000\u0000\u0000\u0429\u042a\u0003x4\u0000\u042a\u042b\u0001\u0000\u0000"+ - "\u0000\u042b\u042c\u0006p\u0015\u0000\u042c\u00f1\u0001\u0000\u0000\u0000"+ - "\u042d\u042e\u0005a\u0000\u0000\u042e\u042f\u0005s\u0000\u0000\u042f\u00f3"+ - "\u0001\u0000\u0000\u0000\u0430\u0431\u0003\u00e2i\u0000\u0431\u0432\u0001"+ - "\u0000\u0000\u0000\u0432\u0433\u0006r\u0016\u0000\u0433\u00f5\u0001\u0000"+ - "\u0000\u0000\u0434\u0435\u0003:\u0015\u0000\u0435\u0436\u0001\u0000\u0000"+ - "\u0000\u0436\u0437\u0006s\u000b\u0000\u0437\u00f7\u0001\u0000\u0000\u0000"+ - "\u0438\u0439\u0003<\u0016\u0000\u0439\u043a\u0001\u0000\u0000\u0000\u043a"+ - "\u043b\u0006t\u000b\u0000\u043b\u00f9\u0001\u0000\u0000\u0000\u043c\u043d"+ - "\u0003>\u0017\u0000\u043d\u043e\u0001\u0000\u0000\u0000\u043e\u043f\u0006"+ - "u\u000b\u0000\u043f\u00fb\u0001\u0000\u0000\u0000\u0440\u0441\u0003N\u001f"+ - "\u0000\u0441\u0442\u0001\u0000\u0000\u0000\u0442\u0443\u0006v\u000e\u0000"+ - "\u0443\u0444\u0006v\u000f\u0000\u0444\u00fd\u0001\u0000\u0000\u0000\u0445"+ - "\u0446\u0003\u00b2Q\u0000\u0446\u0447\u0001\u0000\u0000\u0000\u0447\u0448"+ - "\u0006w\f\u0000\u0448\u0449\u0006w\u0017\u0000\u0449\u00ff\u0001\u0000"+ - "\u0000\u0000\u044a\u044b\u0005o\u0000\u0000\u044b\u044c\u0005n\u0000\u0000"+ - "\u044c\u044d\u0001\u0000\u0000\u0000\u044d\u044e\u0006x\u0018\u0000\u044e"+ - "\u0101\u0001\u0000\u0000\u0000\u044f\u0450\u0005w\u0000\u0000\u0450\u0451"+ - "\u0005i\u0000\u0000\u0451\u0452\u0005t\u0000\u0000\u0452\u0453\u0005h"+ - "\u0000\u0000\u0453\u0454\u0001\u0000\u0000\u0000\u0454\u0455\u0006y\u0018"+ - "\u0000\u0455\u0103\u0001\u0000\u0000\u0000\u0456\u0457\b\f\u0000\u0000"+ - "\u0457\u0105\u0001\u0000\u0000\u0000\u0458\u045a\u0003\u0104z\u0000\u0459"+ - "\u0458\u0001\u0000\u0000\u0000\u045a\u045b\u0001\u0000\u0000\u0000\u045b"+ - "\u0459\u0001\u0000\u0000\u0000\u045b\u045c\u0001\u0000\u0000\u0000\u045c"+ - "\u045d\u0001\u0000\u0000\u0000\u045d\u045e\u0003\u0168\u00ac\u0000\u045e"+ - "\u0460\u0001\u0000\u0000\u0000\u045f\u0459\u0001\u0000\u0000\u0000\u045f"+ - "\u0460\u0001\u0000\u0000\u0000\u0460\u0462\u0001\u0000\u0000\u0000\u0461"+ - "\u0463\u0003\u0104z\u0000\u0462\u0461\u0001\u0000\u0000\u0000\u0463\u0464"+ - "\u0001\u0000\u0000\u0000\u0464\u0462\u0001\u0000\u0000\u0000\u0464\u0465"+ - "\u0001\u0000\u0000\u0000\u0465\u0107\u0001\u0000\u0000\u0000\u0466\u0467"+ - "\u0003\u00baU\u0000\u0467\u0468\u0001\u0000\u0000\u0000\u0468\u0469\u0006"+ - "|\u0019\u0000\u0469\u0109\u0001\u0000\u0000\u0000\u046a\u046b\u0003\u0106"+ - "{\u0000\u046b\u046c\u0001\u0000\u0000\u0000\u046c\u046d\u0006}\u001a\u0000"+ - "\u046d\u010b\u0001\u0000\u0000\u0000\u046e\u046f\u0003:\u0015\u0000\u046f"+ - "\u0470\u0001\u0000\u0000\u0000\u0470\u0471\u0006~\u000b\u0000\u0471\u010d"+ - "\u0001\u0000\u0000\u0000\u0472\u0473\u0003<\u0016\u0000\u0473\u0474\u0001"+ - "\u0000\u0000\u0000\u0474\u0475\u0006\u007f\u000b\u0000\u0475\u010f\u0001"+ - "\u0000\u0000\u0000\u0476\u0477\u0003>\u0017\u0000\u0477\u0478\u0001\u0000"+ - "\u0000\u0000\u0478\u0479\u0006\u0080\u000b\u0000\u0479\u0111\u0001\u0000"+ - "\u0000\u0000\u047a\u047b\u0003N\u001f\u0000\u047b\u047c\u0001\u0000\u0000"+ - "\u0000\u047c\u047d\u0006\u0081\u000e\u0000\u047d\u047e\u0006\u0081\u000f"+ - "\u0000\u047e\u047f\u0006\u0081\u000f\u0000\u047f\u0113\u0001\u0000\u0000"+ - "\u0000\u0480\u0481\u0003p0\u0000\u0481\u0482\u0001\u0000\u0000\u0000\u0482"+ - "\u0483\u0006\u0082\u0012\u0000\u0483\u0115\u0001\u0000\u0000\u0000\u0484"+ - "\u0485\u0003t2\u0000\u0485\u0486\u0001\u0000\u0000\u0000\u0486\u0487\u0006"+ - "\u0083\u0011\u0000\u0487\u0117\u0001\u0000\u0000\u0000\u0488\u0489\u0003"+ - "x4\u0000\u0489\u048a\u0001\u0000\u0000\u0000\u048a\u048b\u0006\u0084\u0015"+ - "\u0000\u048b\u0119\u0001\u0000\u0000\u0000\u048c\u048d\u0003\u0102y\u0000"+ - "\u048d\u048e\u0001\u0000\u0000\u0000\u048e\u048f\u0006\u0085\u001b\u0000"+ - "\u048f\u011b\u0001\u0000\u0000\u0000\u0490\u0491\u0003\u00e2i\u0000\u0491"+ - "\u0492\u0001\u0000\u0000\u0000\u0492\u0493\u0006\u0086\u0016\u0000\u0493"+ - "\u011d\u0001\u0000\u0000\u0000\u0494\u0495\u0003\u00baU\u0000\u0495\u0496"+ - "\u0001\u0000\u0000\u0000\u0496\u0497\u0006\u0087\u0019\u0000\u0497\u011f"+ - "\u0001\u0000\u0000\u0000\u0498\u0499\u0003:\u0015\u0000\u0499\u049a\u0001"+ - "\u0000\u0000\u0000\u049a\u049b\u0006\u0088\u000b\u0000\u049b\u0121\u0001"+ - "\u0000\u0000\u0000\u049c\u049d\u0003<\u0016\u0000\u049d\u049e\u0001\u0000"+ - "\u0000\u0000\u049e\u049f\u0006\u0089\u000b\u0000\u049f\u0123\u0001\u0000"+ - "\u0000\u0000\u04a0\u04a1\u0003>\u0017\u0000\u04a1\u04a2\u0001\u0000\u0000"+ - "\u0000\u04a2\u04a3\u0006\u008a\u000b\u0000\u04a3\u0125\u0001\u0000\u0000"+ - "\u0000\u04a4\u04a5\u0003N\u001f\u0000\u04a5\u04a6\u0001\u0000\u0000\u0000"+ - "\u04a6\u04a7\u0006\u008b\u000e\u0000\u04a7\u04a8\u0006\u008b\u000f\u0000"+ - "\u04a8\u0127\u0001\u0000\u0000\u0000\u04a9\u04aa\u0003t2\u0000\u04aa\u04ab"+ - "\u0001\u0000\u0000\u0000\u04ab\u04ac\u0006\u008c\u0011\u0000\u04ac\u0129"+ - "\u0001\u0000\u0000\u0000\u04ad\u04ae\u0003x4\u0000\u04ae\u04af\u0001\u0000"+ - "\u0000\u0000\u04af\u04b0\u0006\u008d\u0015\u0000\u04b0\u012b\u0001\u0000"+ - "\u0000\u0000\u04b1\u04b2\u0003\u0100x\u0000\u04b2\u04b3\u0001\u0000\u0000"+ - "\u0000\u04b3\u04b4\u0006\u008e\u001c\u0000\u04b4\u04b5\u0006\u008e\u001d"+ - "\u0000\u04b5\u012d\u0001\u0000\u0000\u0000\u04b6\u04b7\u0003B\u0019\u0000"+ - "\u04b7\u04b8\u0001\u0000\u0000\u0000\u04b8\u04b9\u0006\u008f\u0014\u0000"+ - "\u04b9\u012f\u0001\u0000\u0000\u0000\u04ba\u04bb\u0003:\u0015\u0000\u04bb"+ - "\u04bc\u0001\u0000\u0000\u0000\u04bc\u04bd\u0006\u0090\u000b\u0000\u04bd"+ - "\u0131\u0001\u0000\u0000\u0000\u04be\u04bf\u0003<\u0016\u0000\u04bf\u04c0"+ - "\u0001\u0000\u0000\u0000\u04c0\u04c1\u0006\u0091\u000b\u0000\u04c1\u0133"+ - "\u0001\u0000\u0000\u0000\u04c2\u04c3\u0003>\u0017\u0000\u04c3\u04c4\u0001"+ - "\u0000\u0000\u0000\u04c4\u04c5\u0006\u0092\u000b\u0000\u04c5\u0135\u0001"+ - "\u0000\u0000\u0000\u04c6\u04c7\u0003N\u001f\u0000\u04c7\u04c8\u0001\u0000"+ - "\u0000\u0000\u04c8\u04c9\u0006\u0093\u000e\u0000\u04c9\u04ca\u0006\u0093"+ - "\u000f\u0000\u04ca\u04cb\u0006\u0093\u000f\u0000\u04cb\u0137\u0001\u0000"+ - "\u0000\u0000\u04cc\u04cd\u0003t2\u0000\u04cd\u04ce\u0001\u0000\u0000\u0000"+ - "\u04ce\u04cf\u0006\u0094\u0011\u0000\u04cf\u0139\u0001\u0000\u0000\u0000"+ - "\u04d0\u04d1\u0003x4\u0000\u04d1\u04d2\u0001\u0000\u0000\u0000\u04d2\u04d3"+ - "\u0006\u0095\u0015\u0000\u04d3\u013b\u0001\u0000\u0000\u0000\u04d4\u04d5"+ - "\u0003\u00e2i\u0000\u04d5\u04d6\u0001\u0000\u0000\u0000\u04d6\u04d7\u0006"+ - "\u0096\u0016\u0000\u04d7\u013d\u0001\u0000\u0000\u0000\u04d8\u04d9\u0003"+ - ":\u0015\u0000\u04d9\u04da\u0001\u0000\u0000\u0000\u04da\u04db\u0006\u0097"+ - "\u000b\u0000\u04db\u013f\u0001\u0000\u0000\u0000\u04dc\u04dd\u0003<\u0016"+ - "\u0000\u04dd\u04de\u0001\u0000\u0000\u0000\u04de\u04df\u0006\u0098\u000b"+ - "\u0000\u04df\u0141\u0001\u0000\u0000\u0000\u04e0\u04e1\u0003>\u0017\u0000"+ - "\u04e1\u04e2\u0001\u0000\u0000\u0000\u04e2\u04e3\u0006\u0099\u000b\u0000"+ - "\u04e3\u0143\u0001\u0000\u0000\u0000\u04e4\u04e5\u0003N\u001f\u0000\u04e5"+ - "\u04e6\u0001\u0000\u0000\u0000\u04e6\u04e7\u0006\u009a\u000e\u0000\u04e7"+ - "\u04e8\u0006\u009a\u000f\u0000\u04e8\u0145\u0001\u0000\u0000\u0000\u04e9"+ - "\u04ea\u0003x4\u0000\u04ea\u04eb\u0001\u0000\u0000\u0000\u04eb\u04ec\u0006"+ - "\u009b\u0015\u0000\u04ec\u0147\u0001\u0000\u0000\u0000\u04ed\u04ee\u0003"+ - "\u00baU\u0000\u04ee\u04ef\u0001\u0000\u0000\u0000\u04ef\u04f0\u0006\u009c"+ - "\u0019\u0000\u04f0\u0149\u0001\u0000\u0000\u0000\u04f1\u04f2\u0003\u00b6"+ - "S\u0000\u04f2\u04f3\u0001\u0000\u0000\u0000\u04f3\u04f4\u0006\u009d\u001e"+ - "\u0000\u04f4\u014b\u0001\u0000\u0000\u0000\u04f5\u04f6\u0003:\u0015\u0000"+ - "\u04f6\u04f7\u0001\u0000\u0000\u0000\u04f7\u04f8\u0006\u009e\u000b\u0000"+ - "\u04f8\u014d\u0001\u0000\u0000\u0000\u04f9\u04fa\u0003<\u0016\u0000\u04fa"+ - "\u04fb\u0001\u0000\u0000\u0000\u04fb\u04fc\u0006\u009f\u000b\u0000\u04fc"+ - "\u014f\u0001\u0000\u0000\u0000\u04fd\u04fe\u0003>\u0017\u0000\u04fe\u04ff"+ - "\u0001\u0000\u0000\u0000\u04ff\u0500\u0006\u00a0\u000b\u0000\u0500\u0151"+ - "\u0001\u0000\u0000\u0000\u0501\u0502\u0003N\u001f\u0000\u0502\u0503\u0001"+ - "\u0000\u0000\u0000\u0503\u0504\u0006\u00a1\u000e\u0000\u0504\u0505\u0006"+ - "\u00a1\u000f\u0000\u0505\u0153\u0001\u0000\u0000\u0000\u0506\u0507\u0005"+ - "i\u0000\u0000\u0507\u0508\u0005n\u0000\u0000\u0508\u0509\u0005f\u0000"+ - "\u0000\u0509\u050a\u0005o\u0000\u0000\u050a\u0155\u0001\u0000\u0000\u0000"+ - "\u050b\u050c\u0003:\u0015\u0000\u050c\u050d\u0001\u0000\u0000\u0000\u050d"+ - "\u050e\u0006\u00a3\u000b\u0000\u050e\u0157\u0001\u0000\u0000\u0000\u050f"+ - "\u0510\u0003<\u0016\u0000\u0510\u0511\u0001\u0000\u0000\u0000\u0511\u0512"+ - "\u0006\u00a4\u000b\u0000\u0512\u0159\u0001\u0000\u0000\u0000\u0513\u0514"+ - "\u0003>\u0017\u0000\u0514\u0515\u0001\u0000\u0000\u0000\u0515\u0516\u0006"+ - "\u00a5\u000b\u0000\u0516\u015b\u0001\u0000\u0000\u0000\u0517\u0518\u0003"+ - "N\u001f\u0000\u0518\u0519\u0001\u0000\u0000\u0000\u0519\u051a\u0006\u00a6"+ - "\u000e\u0000\u051a\u051b\u0006\u00a6\u000f\u0000\u051b\u015d\u0001\u0000"+ - "\u0000\u0000\u051c\u051d\u0005f\u0000\u0000\u051d\u051e\u0005u\u0000\u0000"+ - "\u051e\u051f\u0005n\u0000\u0000\u051f\u0520\u0005c\u0000\u0000\u0520\u0521"+ - "\u0005t\u0000\u0000\u0521\u0522\u0005i\u0000\u0000\u0522\u0523\u0005o"+ - "\u0000\u0000\u0523\u0524\u0005n\u0000\u0000\u0524\u0525\u0005s\u0000\u0000"+ - "\u0525\u015f\u0001\u0000\u0000\u0000\u0526\u0527\u0003:\u0015\u0000\u0527"+ - "\u0528\u0001\u0000\u0000\u0000\u0528\u0529\u0006\u00a8\u000b\u0000\u0529"+ - "\u0161\u0001\u0000\u0000\u0000\u052a\u052b\u0003<\u0016\u0000\u052b\u052c"+ - "\u0001\u0000\u0000\u0000\u052c\u052d\u0006\u00a9\u000b\u0000\u052d\u0163"+ - "\u0001\u0000\u0000\u0000\u052e\u052f\u0003>\u0017\u0000\u052f\u0530\u0001"+ - "\u0000\u0000\u0000\u0530\u0531\u0006\u00aa\u000b\u0000\u0531\u0165\u0001"+ - "\u0000\u0000\u0000\u0532\u0533\u0003\u00b4R\u0000\u0533\u0534\u0001\u0000"+ - "\u0000\u0000\u0534\u0535\u0006\u00ab\u0010\u0000\u0535\u0536\u0006\u00ab"+ - "\u000f\u0000\u0536\u0167\u0001\u0000\u0000\u0000\u0537\u0538\u0005:\u0000"+ - "\u0000\u0538\u0169\u0001\u0000\u0000\u0000\u0539\u053f\u0003Z%\u0000\u053a"+ - "\u053f\u0003P \u0000\u053b\u053f\u0003x4\u0000\u053c\u053f\u0003R!\u0000"+ - "\u053d\u053f\u0003`(\u0000\u053e\u0539\u0001\u0000\u0000\u0000\u053e\u053a"+ - "\u0001\u0000\u0000\u0000\u053e\u053b\u0001\u0000\u0000\u0000\u053e\u053c"+ - "\u0001\u0000\u0000\u0000\u053e\u053d\u0001\u0000\u0000\u0000\u053f\u0540"+ - "\u0001\u0000\u0000\u0000\u0540\u053e\u0001\u0000\u0000\u0000\u0540\u0541"+ - "\u0001\u0000\u0000\u0000\u0541\u016b\u0001\u0000\u0000\u0000\u0542\u0543"+ - "\u0003:\u0015\u0000\u0543\u0544\u0001\u0000\u0000\u0000\u0544\u0545\u0006"+ - "\u00ae\u000b\u0000\u0545\u016d\u0001\u0000\u0000\u0000\u0546\u0547\u0003"+ - "<\u0016\u0000\u0547\u0548\u0001\u0000\u0000\u0000\u0548\u0549\u0006\u00af"+ - "\u000b\u0000\u0549\u016f\u0001\u0000\u0000\u0000\u054a\u054b\u0003>\u0017"+ - "\u0000\u054b\u054c\u0001\u0000\u0000\u0000\u054c\u054d\u0006\u00b0\u000b"+ - "\u0000\u054d\u0171\u0001\u0000\u0000\u0000\u054e\u054f\u0003N\u001f\u0000"+ - "\u054f\u0550\u0001\u0000\u0000\u0000\u0550\u0551\u0006\u00b1\u000e\u0000"+ - "\u0551\u0552\u0006\u00b1\u000f\u0000\u0552\u0173\u0001\u0000\u0000\u0000"+ - "\u0553\u0554\u0003B\u0019\u0000\u0554\u0555\u0001\u0000\u0000\u0000\u0555"+ - "\u0556\u0006\u00b2\u0014\u0000\u0556\u0557\u0006\u00b2\u000f\u0000\u0557"+ - "\u0558\u0006\u00b2\u001f\u0000\u0558\u0175\u0001\u0000\u0000\u0000\u0559"+ - "\u055a\u0003:\u0015\u0000\u055a\u055b\u0001\u0000\u0000\u0000\u055b\u055c"+ - "\u0006\u00b3\u000b\u0000\u055c\u0177\u0001\u0000\u0000\u0000\u055d\u055e"+ - "\u0003<\u0016\u0000\u055e\u055f\u0001\u0000\u0000\u0000\u055f\u0560\u0006"+ - "\u00b4\u000b\u0000\u0560\u0179\u0001\u0000\u0000\u0000\u0561\u0562\u0003"+ - ">\u0017\u0000\u0562\u0563\u0001\u0000\u0000\u0000\u0563\u0564\u0006\u00b5"+ - "\u000b\u0000\u0564\u017b\u0001\u0000\u0000\u0000\u0565\u0566\u0003t2\u0000"+ - "\u0566\u0567\u0001\u0000\u0000\u0000\u0567\u0568\u0006\u00b6\u0011\u0000"+ - "\u0568\u0569\u0006\u00b6\u000f\u0000\u0569\u056a\u0006\u00b6\u0007\u0000"+ - "\u056a\u017d\u0001\u0000\u0000\u0000\u056b\u056c\u0003:\u0015\u0000\u056c"+ - "\u056d\u0001\u0000\u0000\u0000\u056d\u056e\u0006\u00b7\u000b\u0000\u056e"+ - "\u017f\u0001\u0000\u0000\u0000\u056f\u0570\u0003<\u0016\u0000\u0570\u0571"+ - "\u0001\u0000\u0000\u0000\u0571\u0572\u0006\u00b8\u000b\u0000\u0572\u0181"+ - "\u0001\u0000\u0000\u0000\u0573\u0574\u0003>\u0017\u0000\u0574\u0575\u0001"+ - "\u0000\u0000\u0000\u0575\u0576\u0006\u00b9\u000b\u0000\u0576\u0183\u0001"+ - "\u0000\u0000\u0000\u0577\u0578\u0003\u00baU\u0000\u0578\u0579\u0001\u0000"+ - "\u0000\u0000\u0579\u057a\u0006\u00ba\u000f\u0000\u057a\u057b\u0006\u00ba"+ - "\u0000\u0000\u057b\u057c\u0006\u00ba\u0019\u0000\u057c\u0185\u0001\u0000"+ - "\u0000\u0000\u057d\u057e\u0003\u00b6S\u0000\u057e\u057f\u0001\u0000\u0000"+ - "\u0000\u057f\u0580\u0006\u00bb\u000f\u0000\u0580\u0581\u0006\u00bb\u0000"+ - "\u0000\u0581\u0582\u0006\u00bb\u001e\u0000\u0582\u0187\u0001\u0000\u0000"+ - "\u0000\u0583\u0584\u0003j-\u0000\u0584\u0585\u0001\u0000\u0000\u0000\u0585"+ - "\u0586\u0006\u00bc\u000f\u0000\u0586\u0587\u0006\u00bc\u0000\u0000\u0587"+ - "\u0588\u0006\u00bc \u0000\u0588\u0189\u0001\u0000\u0000\u0000\u0589\u058a"+ - "\u0003N\u001f\u0000\u058a\u058b\u0001\u0000\u0000\u0000\u058b\u058c\u0006"+ - "\u00bd\u000e\u0000\u058c\u058d\u0006\u00bd\u000f\u0000\u058d\u018b\u0001"+ - "\u0000\u0000\u0000A\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b"+ - "\t\n\u000b\f\r\u000e\u000f\u0238\u0242\u0246\u0249\u0252\u0254\u025f\u0266"+ - "\u026b\u0292\u0297\u02a0\u02a7\u02ac\u02ae\u02b9\u02c1\u02c4\u02c6\u02cb"+ - "\u02d0\u02d6\u02dd\u02e2\u02e8\u02eb\u02f3\u02f7\u0373\u037a\u037c\u038c"+ - "\u0391\u0396\u0398\u039e\u03f3\u03f7\u03fc\u0401\u0406\u0408\u040c\u040e"+ - "\u045b\u045f\u0464\u053e\u0540!\u0005\u0002\u0000\u0005\u0004\u0000\u0005"+ - "\u0006\u0000\u0005\u0001\u0000\u0005\u0003\u0000\u0005\b\u0000\u0005\f"+ - "\u0000\u0005\u000e\u0000\u0005\n\u0000\u0005\u0005\u0000\u0005\u000b\u0000"+ - "\u0000\u0001\u0000\u0007E\u0000\u0005\u0000\u0000\u0007\u001d\u0000\u0004"+ - "\u0000\u0000\u0007F\u0000\u0007&\u0000\u0007$\u0000\u0007\u001e\u0000"+ - "\u0007\u0019\u0000\u0007(\u0000\u0007P\u0000\u0005\r\u0000\u0005\u0007"+ - "\u0000\u0007H\u0000\u0007Z\u0000\u0007Y\u0000\u0007X\u0000\u0005\t\u0000"+ - "\u0007G\u0000\u0005\u000f\u0000\u0007!\u0000"; + "\u0300\u0001\u0000\u0000\u0000\u02f7\u02f9\u0003x4\u0000\u02f8\u02fa\u0003"+ + "P \u0000\u02f9\u02f8\u0001\u0000\u0000\u0000\u02fa\u02fb\u0001\u0000\u0000"+ + "\u0000\u02fb\u02f9\u0001\u0000\u0000\u0000\u02fb\u02fc\u0001\u0000\u0000"+ + "\u0000\u02fc\u02fd\u0001\u0000\u0000\u0000\u02fd\u02fe\u0003X$\u0000\u02fe"+ + "\u0300\u0001\u0000\u0000\u0000\u02ff\u02d6\u0001\u0000\u0000\u0000\u02ff"+ + "\u02e1\u0001\u0000\u0000\u0000\u02ff\u02e8\u0001\u0000\u0000\u0000\u02ff"+ + "\u02f7\u0001\u0000\u0000\u0000\u0300i\u0001\u0000\u0000\u0000\u0301\u0302"+ + "\u0005b\u0000\u0000\u0302\u0303\u0005y\u0000\u0000\u0303k\u0001\u0000"+ + "\u0000\u0000\u0304\u0305\u0005a\u0000\u0000\u0305\u0306\u0005n\u0000\u0000"+ + "\u0306\u0307\u0005d\u0000\u0000\u0307m\u0001\u0000\u0000\u0000\u0308\u0309"+ + "\u0005a\u0000\u0000\u0309\u030a\u0005s\u0000\u0000\u030a\u030b\u0005c"+ + "\u0000\u0000\u030bo\u0001\u0000\u0000\u0000\u030c\u030d\u0005=\u0000\u0000"+ + "\u030dq\u0001\u0000\u0000\u0000\u030e\u030f\u0005:\u0000\u0000\u030f\u0310"+ + "\u0005:\u0000\u0000\u0310s\u0001\u0000\u0000\u0000\u0311\u0312\u0005,"+ + "\u0000\u0000\u0312u\u0001\u0000\u0000\u0000\u0313\u0314\u0005d\u0000\u0000"+ + "\u0314\u0315\u0005e\u0000\u0000\u0315\u0316\u0005s\u0000\u0000\u0316\u0317"+ + "\u0005c\u0000\u0000\u0317w\u0001\u0000\u0000\u0000\u0318\u0319\u0005."+ + "\u0000\u0000\u0319y\u0001\u0000\u0000\u0000\u031a\u031b\u0005f\u0000\u0000"+ + "\u031b\u031c\u0005a\u0000\u0000\u031c\u031d\u0005l\u0000\u0000\u031d\u031e"+ + "\u0005s\u0000\u0000\u031e\u031f\u0005e\u0000\u0000\u031f{\u0001\u0000"+ + "\u0000\u0000\u0320\u0321\u0005f\u0000\u0000\u0321\u0322\u0005i\u0000\u0000"+ + "\u0322\u0323\u0005r\u0000\u0000\u0323\u0324\u0005s\u0000\u0000\u0324\u0325"+ + "\u0005t\u0000\u0000\u0325}\u0001\u0000\u0000\u0000\u0326\u0327\u0005l"+ + "\u0000\u0000\u0327\u0328\u0005a\u0000\u0000\u0328\u0329\u0005s\u0000\u0000"+ + "\u0329\u032a\u0005t\u0000\u0000\u032a\u007f\u0001\u0000\u0000\u0000\u032b"+ + "\u032c\u0005(\u0000\u0000\u032c\u0081\u0001\u0000\u0000\u0000\u032d\u032e"+ + "\u0005i\u0000\u0000\u032e\u032f\u0005n\u0000\u0000\u032f\u0083\u0001\u0000"+ + "\u0000\u0000\u0330\u0331\u0005i\u0000\u0000\u0331\u0332\u0005s\u0000\u0000"+ + "\u0332\u0085\u0001\u0000\u0000\u0000\u0333\u0334\u0005l\u0000\u0000\u0334"+ + "\u0335\u0005i\u0000\u0000\u0335\u0336\u0005k\u0000\u0000\u0336\u0337\u0005"+ + "e\u0000\u0000\u0337\u0087\u0001\u0000\u0000\u0000\u0338\u0339\u0005n\u0000"+ + "\u0000\u0339\u033a\u0005o\u0000\u0000\u033a\u033b\u0005t\u0000\u0000\u033b"+ + "\u0089\u0001\u0000\u0000\u0000\u033c\u033d\u0005n\u0000\u0000\u033d\u033e"+ + "\u0005u\u0000\u0000\u033e\u033f\u0005l\u0000\u0000\u033f\u0340\u0005l"+ + "\u0000\u0000\u0340\u008b\u0001\u0000\u0000\u0000\u0341\u0342\u0005n\u0000"+ + "\u0000\u0342\u0343\u0005u\u0000\u0000\u0343\u0344\u0005l\u0000\u0000\u0344"+ + "\u0345\u0005l\u0000\u0000\u0345\u0346\u0005s\u0000\u0000\u0346\u008d\u0001"+ + "\u0000\u0000\u0000\u0347\u0348\u0005o\u0000\u0000\u0348\u0349\u0005r\u0000"+ + "\u0000\u0349\u008f\u0001\u0000\u0000\u0000\u034a\u034b\u0005?\u0000\u0000"+ + "\u034b\u0091\u0001\u0000\u0000\u0000\u034c\u034d\u0005r\u0000\u0000\u034d"+ + "\u034e\u0005l\u0000\u0000\u034e\u034f\u0005i\u0000\u0000\u034f\u0350\u0005"+ + "k\u0000\u0000\u0350\u0351\u0005e\u0000\u0000\u0351\u0093\u0001\u0000\u0000"+ + "\u0000\u0352\u0353\u0005)\u0000\u0000\u0353\u0095\u0001\u0000\u0000\u0000"+ + "\u0354\u0355\u0005t\u0000\u0000\u0355\u0356\u0005r\u0000\u0000\u0356\u0357"+ + "\u0005u\u0000\u0000\u0357\u0358\u0005e\u0000\u0000\u0358\u0097\u0001\u0000"+ + "\u0000\u0000\u0359\u035a\u0005=\u0000\u0000\u035a\u035b\u0005=\u0000\u0000"+ + "\u035b\u0099\u0001\u0000\u0000\u0000\u035c\u035d\u0005=\u0000\u0000\u035d"+ + "\u035e\u0005~\u0000\u0000\u035e\u009b\u0001\u0000\u0000\u0000\u035f\u0360"+ + "\u0005!\u0000\u0000\u0360\u0361\u0005=\u0000\u0000\u0361\u009d\u0001\u0000"+ + "\u0000\u0000\u0362\u0363\u0005<\u0000\u0000\u0363\u009f\u0001\u0000\u0000"+ + "\u0000\u0364\u0365\u0005<\u0000\u0000\u0365\u0366\u0005=\u0000\u0000\u0366"+ + "\u00a1\u0001\u0000\u0000\u0000\u0367\u0368\u0005>\u0000\u0000\u0368\u00a3"+ + "\u0001\u0000\u0000\u0000\u0369\u036a\u0005>\u0000\u0000\u036a\u036b\u0005"+ + "=\u0000\u0000\u036b\u00a5\u0001\u0000\u0000\u0000\u036c\u036d\u0005+\u0000"+ + "\u0000\u036d\u00a7\u0001\u0000\u0000\u0000\u036e\u036f\u0005-\u0000\u0000"+ + "\u036f\u00a9\u0001\u0000\u0000\u0000\u0370\u0371\u0005*\u0000\u0000\u0371"+ + "\u00ab\u0001\u0000\u0000\u0000\u0372\u0373\u0005/\u0000\u0000\u0373\u00ad"+ + "\u0001\u0000\u0000\u0000\u0374\u0375\u0005%\u0000\u0000\u0375\u00af\u0001"+ + "\u0000\u0000\u0000\u0376\u0377\u0003\u0090@\u0000\u0377\u037b\u0003R!"+ + "\u0000\u0378\u037a\u0003b)\u0000\u0379\u0378\u0001\u0000\u0000\u0000\u037a"+ + "\u037d\u0001\u0000\u0000\u0000\u037b\u0379\u0001\u0000\u0000\u0000\u037b"+ + "\u037c\u0001\u0000\u0000\u0000\u037c\u0385\u0001\u0000\u0000\u0000\u037d"+ + "\u037b\u0001\u0000\u0000\u0000\u037e\u0380\u0003\u0090@\u0000\u037f\u0381"+ + "\u0003P \u0000\u0380\u037f\u0001\u0000\u0000\u0000\u0381\u0382\u0001\u0000"+ + "\u0000\u0000\u0382\u0380\u0001\u0000\u0000\u0000\u0382\u0383\u0001\u0000"+ + "\u0000\u0000\u0383\u0385\u0001\u0000\u0000\u0000\u0384\u0376\u0001\u0000"+ + "\u0000\u0000\u0384\u037e\u0001\u0000\u0000\u0000\u0385\u00b1\u0001\u0000"+ + "\u0000\u0000\u0386\u0387\u0005[\u0000\u0000\u0387\u0388\u0001\u0000\u0000"+ + "\u0000\u0388\u0389\u0006Q\u0000\u0000\u0389\u038a\u0006Q\u0000\u0000\u038a"+ + "\u00b3\u0001\u0000\u0000\u0000\u038b\u038c\u0005]\u0000\u0000\u038c\u038d"+ + "\u0001\u0000\u0000\u0000\u038d\u038e\u0006R\u000f\u0000\u038e\u038f\u0006"+ + "R\u000f\u0000\u038f\u00b5\u0001\u0000\u0000\u0000\u0390\u0394\u0003R!"+ + "\u0000\u0391\u0393\u0003b)\u0000\u0392\u0391\u0001\u0000\u0000\u0000\u0393"+ + "\u0396\u0001\u0000\u0000\u0000\u0394\u0392\u0001\u0000\u0000\u0000\u0394"+ + "\u0395\u0001\u0000\u0000\u0000\u0395\u03a1\u0001\u0000\u0000\u0000\u0396"+ + "\u0394\u0001\u0000\u0000\u0000\u0397\u039a\u0003`(\u0000\u0398\u039a\u0003"+ + "Z%\u0000\u0399\u0397\u0001\u0000\u0000\u0000\u0399\u0398\u0001\u0000\u0000"+ + "\u0000\u039a\u039c\u0001\u0000\u0000\u0000\u039b\u039d\u0003b)\u0000\u039c"+ + "\u039b\u0001\u0000\u0000\u0000\u039d\u039e\u0001\u0000\u0000\u0000\u039e"+ + "\u039c\u0001\u0000\u0000\u0000\u039e\u039f\u0001\u0000\u0000\u0000\u039f"+ + "\u03a1\u0001\u0000\u0000\u0000\u03a0\u0390\u0001\u0000\u0000\u0000\u03a0"+ + "\u0399\u0001\u0000\u0000\u0000\u03a1\u00b7\u0001\u0000\u0000\u0000\u03a2"+ + "\u03a4\u0003\\&\u0000\u03a3\u03a5\u0003^\'\u0000\u03a4\u03a3\u0001\u0000"+ + "\u0000\u0000\u03a5\u03a6\u0001\u0000\u0000\u0000\u03a6\u03a4\u0001\u0000"+ + "\u0000\u0000\u03a6\u03a7\u0001\u0000\u0000\u0000\u03a7\u03a8\u0001\u0000"+ + "\u0000\u0000\u03a8\u03a9\u0003\\&\u0000\u03a9\u00b9\u0001\u0000\u0000"+ + "\u0000\u03aa\u03ab\u0003\u00b8T\u0000\u03ab\u00bb\u0001\u0000\u0000\u0000"+ + "\u03ac\u03ad\u0003:\u0015\u0000\u03ad\u03ae\u0001\u0000\u0000\u0000\u03ae"+ + "\u03af\u0006V\u000b\u0000\u03af\u00bd\u0001\u0000\u0000\u0000\u03b0\u03b1"+ + "\u0003<\u0016\u0000\u03b1\u03b2\u0001\u0000\u0000\u0000\u03b2\u03b3\u0006"+ + "W\u000b\u0000\u03b3\u00bf\u0001\u0000\u0000\u0000\u03b4\u03b5\u0003>\u0017"+ + "\u0000\u03b5\u03b6\u0001\u0000\u0000\u0000\u03b6\u03b7\u0006X\u000b\u0000"+ + "\u03b7\u00c1\u0001\u0000\u0000\u0000\u03b8\u03b9\u0003N\u001f\u0000\u03b9"+ + "\u03ba\u0001\u0000\u0000\u0000\u03ba\u03bb\u0006Y\u000e\u0000\u03bb\u03bc"+ + "\u0006Y\u000f\u0000\u03bc\u00c3\u0001\u0000\u0000\u0000\u03bd\u03be\u0003"+ + "\u00b2Q\u0000\u03be\u03bf\u0001\u0000\u0000\u0000\u03bf\u03c0\u0006Z\f"+ + "\u0000\u03c0\u00c5\u0001\u0000\u0000\u0000\u03c1\u03c2\u0003\u00b4R\u0000"+ + "\u03c2\u03c3\u0001\u0000\u0000\u0000\u03c3\u03c4\u0006[\u0010\u0000\u03c4"+ + "\u00c7\u0001\u0000\u0000\u0000\u03c5\u03c6\u0003\u016c\u00ae\u0000\u03c6"+ + "\u03c7\u0001\u0000\u0000\u0000\u03c7\u03c8\u0006\\\u0011\u0000\u03c8\u00c9"+ + "\u0001\u0000\u0000\u0000\u03c9\u03ca\u0003t2\u0000\u03ca\u03cb\u0001\u0000"+ + "\u0000\u0000\u03cb\u03cc\u0006]\u0012\u0000\u03cc\u00cb\u0001\u0000\u0000"+ + "\u0000\u03cd\u03ce\u0003p0\u0000\u03ce\u03cf\u0001\u0000\u0000\u0000\u03cf"+ + "\u03d0\u0006^\u0013\u0000\u03d0\u00cd\u0001\u0000\u0000\u0000\u03d1\u03d2"+ + "\u0005m\u0000\u0000\u03d2\u03d3\u0005e\u0000\u0000\u03d3\u03d4\u0005t"+ + "\u0000\u0000\u03d4\u03d5\u0005a\u0000\u0000\u03d5\u03d6\u0005d\u0000\u0000"+ + "\u03d6\u03d7\u0005a\u0000\u0000\u03d7\u03d8\u0005t\u0000\u0000\u03d8\u03d9"+ + "\u0005a\u0000\u0000\u03d9\u00cf\u0001\u0000\u0000\u0000\u03da\u03db\u0003"+ + "B\u0019\u0000\u03db\u03dc\u0001\u0000\u0000\u0000\u03dc\u03dd\u0006`\u0014"+ + "\u0000\u03dd\u00d1\u0001\u0000\u0000\u0000\u03de\u03df\u0003d*\u0000\u03df"+ + "\u03e0\u0001\u0000\u0000\u0000\u03e0\u03e1\u0006a\u0015\u0000\u03e1\u00d3"+ + "\u0001\u0000\u0000\u0000\u03e2\u03e3\u0003:\u0015\u0000\u03e3\u03e4\u0001"+ + "\u0000\u0000\u0000\u03e4\u03e5\u0006b\u000b\u0000\u03e5\u00d5\u0001\u0000"+ + "\u0000\u0000\u03e6\u03e7\u0003<\u0016\u0000\u03e7\u03e8\u0001\u0000\u0000"+ + "\u0000\u03e8\u03e9\u0006c\u000b\u0000\u03e9\u00d7\u0001\u0000\u0000\u0000"+ + "\u03ea\u03eb\u0003>\u0017\u0000\u03eb\u03ec\u0001\u0000\u0000\u0000\u03ec"+ + "\u03ed\u0006d\u000b\u0000\u03ed\u00d9\u0001\u0000\u0000\u0000\u03ee\u03ef"+ + "\u0003N\u001f\u0000\u03ef\u03f0\u0001\u0000\u0000\u0000\u03f0\u03f1\u0006"+ + "e\u000e\u0000\u03f1\u03f2\u0006e\u000f\u0000\u03f2\u00db\u0001\u0000\u0000"+ + "\u0000\u03f3\u03f4\u0003x4\u0000\u03f4\u03f5\u0001\u0000\u0000\u0000\u03f5"+ + "\u03f6\u0006f\u0016\u0000\u03f6\u00dd\u0001\u0000\u0000\u0000\u03f7\u03f8"+ + "\u0003t2\u0000\u03f8\u03f9\u0001\u0000\u0000\u0000\u03f9\u03fa\u0006g"+ + "\u0012\u0000\u03fa\u00df\u0001\u0000\u0000\u0000\u03fb\u0400\u0003R!\u0000"+ + "\u03fc\u0400\u0003P \u0000\u03fd\u0400\u0003`(\u0000\u03fe\u0400\u0003"+ + "\u00aaM\u0000\u03ff\u03fb\u0001\u0000\u0000\u0000\u03ff\u03fc\u0001\u0000"+ + "\u0000\u0000\u03ff\u03fd\u0001\u0000\u0000\u0000\u03ff\u03fe\u0001\u0000"+ + "\u0000\u0000\u0400\u00e1\u0001\u0000\u0000\u0000\u0401\u0404\u0003R!\u0000"+ + "\u0402\u0404\u0003\u00aaM\u0000\u0403\u0401\u0001\u0000\u0000\u0000\u0403"+ + "\u0402\u0001\u0000\u0000\u0000\u0404\u0408\u0001\u0000\u0000\u0000\u0405"+ + "\u0407\u0003\u00e0h\u0000\u0406\u0405\u0001\u0000\u0000\u0000\u0407\u040a"+ + "\u0001\u0000\u0000\u0000\u0408\u0406\u0001\u0000\u0000\u0000\u0408\u0409"+ + "\u0001\u0000\u0000\u0000\u0409\u0415\u0001\u0000\u0000\u0000\u040a\u0408"+ + "\u0001\u0000\u0000\u0000\u040b\u040e\u0003`(\u0000\u040c\u040e\u0003Z"+ + "%\u0000\u040d\u040b\u0001\u0000\u0000\u0000\u040d\u040c\u0001\u0000\u0000"+ + "\u0000\u040e\u0410\u0001\u0000\u0000\u0000\u040f\u0411\u0003\u00e0h\u0000"+ + "\u0410\u040f\u0001\u0000\u0000\u0000\u0411\u0412\u0001\u0000\u0000\u0000"+ + "\u0412\u0410\u0001\u0000\u0000\u0000\u0412\u0413\u0001\u0000\u0000\u0000"+ + "\u0413\u0415\u0001\u0000\u0000\u0000\u0414\u0403\u0001\u0000\u0000\u0000"+ + "\u0414\u040d\u0001\u0000\u0000\u0000\u0415\u00e3\u0001\u0000\u0000\u0000"+ + "\u0416\u0419\u0003\u00e2i\u0000\u0417\u0419\u0003\u00b8T\u0000\u0418\u0416"+ + "\u0001\u0000\u0000\u0000\u0418\u0417\u0001\u0000\u0000\u0000\u0419\u041a"+ + "\u0001\u0000\u0000\u0000\u041a\u0418\u0001\u0000\u0000\u0000\u041a\u041b"+ + "\u0001\u0000\u0000\u0000\u041b\u00e5\u0001\u0000\u0000\u0000\u041c\u041d"+ + "\u0003:\u0015\u0000\u041d\u041e\u0001\u0000\u0000\u0000\u041e\u041f\u0006"+ + "k\u000b\u0000\u041f\u00e7\u0001\u0000\u0000\u0000\u0420\u0421\u0003<\u0016"+ + "\u0000\u0421\u0422\u0001\u0000\u0000\u0000\u0422\u0423\u0006l\u000b\u0000"+ + "\u0423\u00e9\u0001\u0000\u0000\u0000\u0424\u0425\u0003>\u0017\u0000\u0425"+ + "\u0426\u0001\u0000\u0000\u0000\u0426\u0427\u0006m\u000b\u0000\u0427\u00eb"+ + "\u0001\u0000\u0000\u0000\u0428\u0429\u0003N\u001f\u0000\u0429\u042a\u0001"+ + "\u0000\u0000\u0000\u042a\u042b\u0006n\u000e\u0000\u042b\u042c\u0006n\u000f"+ + "\u0000\u042c\u00ed\u0001\u0000\u0000\u0000\u042d\u042e\u0003p0\u0000\u042e"+ + "\u042f\u0001\u0000\u0000\u0000\u042f\u0430\u0006o\u0013\u0000\u0430\u00ef"+ + "\u0001\u0000\u0000\u0000\u0431\u0432\u0003t2\u0000\u0432\u0433\u0001\u0000"+ + "\u0000\u0000\u0433\u0434\u0006p\u0012\u0000\u0434\u00f1\u0001\u0000\u0000"+ + "\u0000\u0435\u0436\u0003x4\u0000\u0436\u0437\u0001\u0000\u0000\u0000\u0437"+ + "\u0438\u0006q\u0016\u0000\u0438\u00f3\u0001\u0000\u0000\u0000\u0439\u043a"+ + "\u0005a\u0000\u0000\u043a\u043b\u0005s\u0000\u0000\u043b\u00f5\u0001\u0000"+ + "\u0000\u0000\u043c\u043d\u0003\u00e4j\u0000\u043d\u043e\u0001\u0000\u0000"+ + "\u0000\u043e\u043f\u0006s\u0017\u0000\u043f\u00f7\u0001\u0000\u0000\u0000"+ + "\u0440\u0441\u0003:\u0015\u0000\u0441\u0442\u0001\u0000\u0000\u0000\u0442"+ + "\u0443\u0006t\u000b\u0000\u0443\u00f9\u0001\u0000\u0000\u0000\u0444\u0445"+ + "\u0003<\u0016\u0000\u0445\u0446\u0001\u0000\u0000\u0000\u0446\u0447\u0006"+ + "u\u000b\u0000\u0447\u00fb\u0001\u0000\u0000\u0000\u0448\u0449\u0003>\u0017"+ + "\u0000\u0449\u044a\u0001\u0000\u0000\u0000\u044a\u044b\u0006v\u000b\u0000"+ + "\u044b\u00fd\u0001\u0000\u0000\u0000\u044c\u044d\u0003N\u001f\u0000\u044d"+ + "\u044e\u0001\u0000\u0000\u0000\u044e\u044f\u0006w\u000e\u0000\u044f\u0450"+ + "\u0006w\u000f\u0000\u0450\u00ff\u0001\u0000\u0000\u0000\u0451\u0452\u0003"+ + "\u00b2Q\u0000\u0452\u0453\u0001\u0000\u0000\u0000\u0453\u0454\u0006x\f"+ + "\u0000\u0454\u0455\u0006x\u0018\u0000\u0455\u0101\u0001\u0000\u0000\u0000"+ + "\u0456\u0457\u0005o\u0000\u0000\u0457\u0458\u0005n\u0000\u0000\u0458\u0459"+ + "\u0001\u0000\u0000\u0000\u0459\u045a\u0006y\u0019\u0000\u045a\u0103\u0001"+ + "\u0000\u0000\u0000\u045b\u045c\u0005w\u0000\u0000\u045c\u045d\u0005i\u0000"+ + "\u0000\u045d\u045e\u0005t\u0000\u0000\u045e\u045f\u0005h\u0000\u0000\u045f"+ + "\u0460\u0001\u0000\u0000\u0000\u0460\u0461\u0006z\u0019\u0000\u0461\u0105"+ + "\u0001\u0000\u0000\u0000\u0462\u0463\b\f\u0000\u0000\u0463\u0107\u0001"+ + "\u0000\u0000\u0000\u0464\u0466\u0003\u0106{\u0000\u0465\u0464\u0001\u0000"+ + "\u0000\u0000\u0466\u0467\u0001\u0000\u0000\u0000\u0467\u0465\u0001\u0000"+ + "\u0000\u0000\u0467\u0468\u0001\u0000\u0000\u0000\u0468\u0469\u0001\u0000"+ + "\u0000\u0000\u0469\u046a\u0003\u016c\u00ae\u0000\u046a\u046c\u0001\u0000"+ + "\u0000\u0000\u046b\u0465\u0001\u0000\u0000\u0000\u046b\u046c\u0001\u0000"+ + "\u0000\u0000\u046c\u046e\u0001\u0000\u0000\u0000\u046d\u046f\u0003\u0106"+ + "{\u0000\u046e\u046d\u0001\u0000\u0000\u0000\u046f\u0470\u0001\u0000\u0000"+ + "\u0000\u0470\u046e\u0001\u0000\u0000\u0000\u0470\u0471\u0001\u0000\u0000"+ + "\u0000\u0471\u0109\u0001\u0000\u0000\u0000\u0472\u0473\u0003\u0108|\u0000"+ + "\u0473\u0474\u0001\u0000\u0000\u0000\u0474\u0475\u0006}\u001a\u0000\u0475"+ + "\u010b\u0001\u0000\u0000\u0000\u0476\u0477\u0003:\u0015\u0000\u0477\u0478"+ + "\u0001\u0000\u0000\u0000\u0478\u0479\u0006~\u000b\u0000\u0479\u010d\u0001"+ + "\u0000\u0000\u0000\u047a\u047b\u0003<\u0016\u0000\u047b\u047c\u0001\u0000"+ + "\u0000\u0000\u047c\u047d\u0006\u007f\u000b\u0000\u047d\u010f\u0001\u0000"+ + "\u0000\u0000\u047e\u047f\u0003>\u0017\u0000\u047f\u0480\u0001\u0000\u0000"+ + "\u0000\u0480\u0481\u0006\u0080\u000b\u0000\u0481\u0111\u0001\u0000\u0000"+ + "\u0000\u0482\u0483\u0003N\u001f\u0000\u0483\u0484\u0001\u0000\u0000\u0000"+ + "\u0484\u0485\u0006\u0081\u000e\u0000\u0485\u0486\u0006\u0081\u000f\u0000"+ + "\u0486\u0487\u0006\u0081\u000f\u0000\u0487\u0113\u0001\u0000\u0000\u0000"+ + "\u0488\u0489\u0003p0\u0000\u0489\u048a\u0001\u0000\u0000\u0000\u048a\u048b"+ + "\u0006\u0082\u0013\u0000\u048b\u0115\u0001\u0000\u0000\u0000\u048c\u048d"+ + "\u0003t2\u0000\u048d\u048e\u0001\u0000\u0000\u0000\u048e\u048f\u0006\u0083"+ + "\u0012\u0000\u048f\u0117\u0001\u0000\u0000\u0000\u0490\u0491\u0003x4\u0000"+ + "\u0491\u0492\u0001\u0000\u0000\u0000\u0492\u0493\u0006\u0084\u0016\u0000"+ + "\u0493\u0119\u0001\u0000\u0000\u0000\u0494\u0495\u0003\u0104z\u0000\u0495"+ + "\u0496\u0001\u0000\u0000\u0000\u0496\u0497\u0006\u0085\u001b\u0000\u0497"+ + "\u011b\u0001\u0000\u0000\u0000\u0498\u0499\u0003\u00e4j\u0000\u0499\u049a"+ + "\u0001\u0000\u0000\u0000\u049a\u049b\u0006\u0086\u0017\u0000\u049b\u011d"+ + "\u0001\u0000\u0000\u0000\u049c\u049d\u0003\u00baU\u0000\u049d\u049e\u0001"+ + "\u0000\u0000\u0000\u049e\u049f\u0006\u0087\u001c\u0000\u049f\u011f\u0001"+ + "\u0000\u0000\u0000\u04a0\u04a1\u0003:\u0015\u0000\u04a1\u04a2\u0001\u0000"+ + "\u0000\u0000\u04a2\u04a3\u0006\u0088\u000b\u0000\u04a3\u0121\u0001\u0000"+ + "\u0000\u0000\u04a4\u04a5\u0003<\u0016\u0000\u04a5\u04a6\u0001\u0000\u0000"+ + "\u0000\u04a6\u04a7\u0006\u0089\u000b\u0000\u04a7\u0123\u0001\u0000\u0000"+ + "\u0000\u04a8\u04a9\u0003>\u0017\u0000\u04a9\u04aa\u0001\u0000\u0000\u0000"+ + "\u04aa\u04ab\u0006\u008a\u000b\u0000\u04ab\u0125\u0001\u0000\u0000\u0000"+ + "\u04ac\u04ad\u0003N\u001f\u0000\u04ad\u04ae\u0001\u0000\u0000\u0000\u04ae"+ + "\u04af\u0006\u008b\u000e\u0000\u04af\u04b0\u0006\u008b\u000f\u0000\u04b0"+ + "\u0127\u0001\u0000\u0000\u0000\u04b1\u04b2\u0003\u016c\u00ae\u0000\u04b2"+ + "\u04b3\u0001\u0000\u0000\u0000\u04b3\u04b4\u0006\u008c\u0011\u0000\u04b4"+ + "\u0129\u0001\u0000\u0000\u0000\u04b5\u04b6\u0003t2\u0000\u04b6\u04b7\u0001"+ + "\u0000\u0000\u0000\u04b7\u04b8\u0006\u008d\u0012\u0000\u04b8\u012b\u0001"+ + "\u0000\u0000\u0000\u04b9\u04ba\u0003x4\u0000\u04ba\u04bb\u0001\u0000\u0000"+ + "\u0000\u04bb\u04bc\u0006\u008e\u0016\u0000\u04bc\u012d\u0001\u0000\u0000"+ + "\u0000\u04bd\u04be\u0003\u0102y\u0000\u04be\u04bf\u0001\u0000\u0000\u0000"+ + "\u04bf\u04c0\u0006\u008f\u001d\u0000\u04c0\u04c1\u0006\u008f\u001e\u0000"+ + "\u04c1\u012f\u0001\u0000\u0000\u0000\u04c2\u04c3\u0003B\u0019\u0000\u04c3"+ + "\u04c4\u0001\u0000\u0000\u0000\u04c4\u04c5\u0006\u0090\u0014\u0000\u04c5"+ + "\u0131\u0001\u0000\u0000\u0000\u04c6\u04c7\u0003d*\u0000\u04c7\u04c8\u0001"+ + "\u0000\u0000\u0000\u04c8\u04c9\u0006\u0091\u0015\u0000\u04c9\u0133\u0001"+ + "\u0000\u0000\u0000\u04ca\u04cb\u0003:\u0015\u0000\u04cb\u04cc\u0001\u0000"+ + "\u0000\u0000\u04cc\u04cd\u0006\u0092\u000b\u0000\u04cd\u0135\u0001\u0000"+ + "\u0000\u0000\u04ce\u04cf\u0003<\u0016\u0000\u04cf\u04d0\u0001\u0000\u0000"+ + "\u0000\u04d0\u04d1\u0006\u0093\u000b\u0000\u04d1\u0137\u0001\u0000\u0000"+ + "\u0000\u04d2\u04d3\u0003>\u0017\u0000\u04d3\u04d4\u0001\u0000\u0000\u0000"+ + "\u04d4\u04d5\u0006\u0094\u000b\u0000\u04d5\u0139\u0001\u0000\u0000\u0000"+ + "\u04d6\u04d7\u0003N\u001f\u0000\u04d7\u04d8\u0001\u0000\u0000\u0000\u04d8"+ + "\u04d9\u0006\u0095\u000e\u0000\u04d9\u04da\u0006\u0095\u000f\u0000\u04da"+ + "\u04db\u0006\u0095\u000f\u0000\u04db\u013b\u0001\u0000\u0000\u0000\u04dc"+ + "\u04dd\u0003t2\u0000\u04dd\u04de\u0001\u0000\u0000\u0000\u04de\u04df\u0006"+ + "\u0096\u0012\u0000\u04df\u013d\u0001\u0000\u0000\u0000\u04e0\u04e1\u0003"+ + "x4\u0000\u04e1\u04e2\u0001\u0000\u0000\u0000\u04e2\u04e3\u0006\u0097\u0016"+ + "\u0000\u04e3\u013f\u0001\u0000\u0000\u0000\u04e4\u04e5\u0003\u00e4j\u0000"+ + "\u04e5\u04e6\u0001\u0000\u0000\u0000\u04e6\u04e7\u0006\u0098\u0017\u0000"+ + "\u04e7\u0141\u0001\u0000\u0000\u0000\u04e8\u04e9\u0003:\u0015\u0000\u04e9"+ + "\u04ea\u0001\u0000\u0000\u0000\u04ea\u04eb\u0006\u0099\u000b\u0000\u04eb"+ + "\u0143\u0001\u0000\u0000\u0000\u04ec\u04ed\u0003<\u0016\u0000\u04ed\u04ee"+ + "\u0001\u0000\u0000\u0000\u04ee\u04ef\u0006\u009a\u000b\u0000\u04ef\u0145"+ + "\u0001\u0000\u0000\u0000\u04f0\u04f1\u0003>\u0017\u0000\u04f1\u04f2\u0001"+ + "\u0000\u0000\u0000\u04f2\u04f3\u0006\u009b\u000b\u0000\u04f3\u0147\u0001"+ + "\u0000\u0000\u0000\u04f4\u04f5\u0003N\u001f\u0000\u04f5\u04f6\u0001\u0000"+ + "\u0000\u0000\u04f6\u04f7\u0006\u009c\u000e\u0000\u04f7\u04f8\u0006\u009c"+ + "\u000f\u0000\u04f8\u0149\u0001\u0000\u0000\u0000\u04f9\u04fa\u0003x4\u0000"+ + "\u04fa\u04fb\u0001\u0000\u0000\u0000\u04fb\u04fc\u0006\u009d\u0016\u0000"+ + "\u04fc\u014b\u0001\u0000\u0000\u0000\u04fd\u04fe\u0003\u00baU\u0000\u04fe"+ + "\u04ff\u0001\u0000\u0000\u0000\u04ff\u0500\u0006\u009e\u001c\u0000\u0500"+ + "\u014d\u0001\u0000\u0000\u0000\u0501\u0502\u0003\u00b6S\u0000\u0502\u0503"+ + "\u0001\u0000\u0000\u0000\u0503\u0504\u0006\u009f\u001f\u0000\u0504\u014f"+ + "\u0001\u0000\u0000\u0000\u0505\u0506\u0003:\u0015\u0000\u0506\u0507\u0001"+ + "\u0000\u0000\u0000\u0507\u0508\u0006\u00a0\u000b\u0000\u0508\u0151\u0001"+ + "\u0000\u0000\u0000\u0509\u050a\u0003<\u0016\u0000\u050a\u050b\u0001\u0000"+ + "\u0000\u0000\u050b\u050c\u0006\u00a1\u000b\u0000\u050c\u0153\u0001\u0000"+ + "\u0000\u0000\u050d\u050e\u0003>\u0017\u0000\u050e\u050f\u0001\u0000\u0000"+ + "\u0000\u050f\u0510\u0006\u00a2\u000b\u0000\u0510\u0155\u0001\u0000\u0000"+ + "\u0000\u0511\u0512\u0003N\u001f\u0000\u0512\u0513\u0001\u0000\u0000\u0000"+ + "\u0513\u0514\u0006\u00a3\u000e\u0000\u0514\u0515\u0006\u00a3\u000f\u0000"+ + "\u0515\u0157\u0001\u0000\u0000\u0000\u0516\u0517\u0005i\u0000\u0000\u0517"+ + "\u0518\u0005n\u0000\u0000\u0518\u0519\u0005f\u0000\u0000\u0519\u051a\u0005"+ + "o\u0000\u0000\u051a\u0159\u0001\u0000\u0000\u0000\u051b\u051c\u0003:\u0015"+ + "\u0000\u051c\u051d\u0001\u0000\u0000\u0000\u051d\u051e\u0006\u00a5\u000b"+ + "\u0000\u051e\u015b\u0001\u0000\u0000\u0000\u051f\u0520\u0003<\u0016\u0000"+ + "\u0520\u0521\u0001\u0000\u0000\u0000\u0521\u0522\u0006\u00a6\u000b\u0000"+ + "\u0522\u015d\u0001\u0000\u0000\u0000\u0523\u0524\u0003>\u0017\u0000\u0524"+ + "\u0525\u0001\u0000\u0000\u0000\u0525\u0526\u0006\u00a7\u000b\u0000\u0526"+ + "\u015f\u0001\u0000\u0000\u0000\u0527\u0528\u0003N\u001f\u0000\u0528\u0529"+ + "\u0001\u0000\u0000\u0000\u0529\u052a\u0006\u00a8\u000e\u0000\u052a\u052b"+ + "\u0006\u00a8\u000f\u0000\u052b\u0161\u0001\u0000\u0000\u0000\u052c\u052d"+ + "\u0005f\u0000\u0000\u052d\u052e\u0005u\u0000\u0000\u052e\u052f\u0005n"+ + "\u0000\u0000\u052f\u0530\u0005c\u0000\u0000\u0530\u0531\u0005t\u0000\u0000"+ + "\u0531\u0532\u0005i\u0000\u0000\u0532\u0533\u0005o\u0000\u0000\u0533\u0534"+ + "\u0005n\u0000\u0000\u0534\u0535\u0005s\u0000\u0000\u0535\u0163\u0001\u0000"+ + "\u0000\u0000\u0536\u0537\u0003:\u0015\u0000\u0537\u0538\u0001\u0000\u0000"+ + "\u0000\u0538\u0539\u0006\u00aa\u000b\u0000\u0539\u0165\u0001\u0000\u0000"+ + "\u0000\u053a\u053b\u0003<\u0016\u0000\u053b\u053c\u0001\u0000\u0000\u0000"+ + "\u053c\u053d\u0006\u00ab\u000b\u0000\u053d\u0167\u0001\u0000\u0000\u0000"+ + "\u053e\u053f\u0003>\u0017\u0000\u053f\u0540\u0001\u0000\u0000\u0000\u0540"+ + "\u0541\u0006\u00ac\u000b\u0000\u0541\u0169\u0001\u0000\u0000\u0000\u0542"+ + "\u0543\u0003\u00b4R\u0000\u0543\u0544\u0001\u0000\u0000\u0000\u0544\u0545"+ + "\u0006\u00ad\u0010\u0000\u0545\u0546\u0006\u00ad\u000f\u0000\u0546\u016b"+ + "\u0001\u0000\u0000\u0000\u0547\u0548\u0005:\u0000\u0000\u0548\u016d\u0001"+ + "\u0000\u0000\u0000\u0549\u054f\u0003Z%\u0000\u054a\u054f\u0003P \u0000"+ + "\u054b\u054f\u0003x4\u0000\u054c\u054f\u0003R!\u0000\u054d\u054f\u0003"+ + "`(\u0000\u054e\u0549\u0001\u0000\u0000\u0000\u054e\u054a\u0001\u0000\u0000"+ + "\u0000\u054e\u054b\u0001\u0000\u0000\u0000\u054e\u054c\u0001\u0000\u0000"+ + "\u0000\u054e\u054d\u0001\u0000\u0000\u0000\u054f\u0550\u0001\u0000\u0000"+ + "\u0000\u0550\u054e\u0001\u0000\u0000\u0000\u0550\u0551\u0001\u0000\u0000"+ + "\u0000\u0551\u016f\u0001\u0000\u0000\u0000\u0552\u0553\u0003:\u0015\u0000"+ + "\u0553\u0554\u0001\u0000\u0000\u0000\u0554\u0555\u0006\u00b0\u000b\u0000"+ + "\u0555\u0171\u0001\u0000\u0000\u0000\u0556\u0557\u0003<\u0016\u0000\u0557"+ + "\u0558\u0001\u0000\u0000\u0000\u0558\u0559\u0006\u00b1\u000b\u0000\u0559"+ + "\u0173\u0001\u0000\u0000\u0000\u055a\u055b\u0003>\u0017\u0000\u055b\u055c"+ + "\u0001\u0000\u0000\u0000\u055c\u055d\u0006\u00b2\u000b\u0000\u055d\u0175"+ + "\u0001\u0000\u0000\u0000\u055e\u055f\u0003N\u001f\u0000\u055f\u0560\u0001"+ + "\u0000\u0000\u0000\u0560\u0561\u0006\u00b3\u000e\u0000\u0561\u0562\u0006"+ + "\u00b3\u000f\u0000\u0562\u0177\u0001\u0000\u0000\u0000\u0563\u0564\u0003"+ + "B\u0019\u0000\u0564\u0565\u0001\u0000\u0000\u0000\u0565\u0566\u0006\u00b4"+ + "\u0014\u0000\u0566\u0567\u0006\u00b4\u000f\u0000\u0567\u0568\u0006\u00b4"+ + " \u0000\u0568\u0179\u0001\u0000\u0000\u0000\u0569\u056a\u0003d*\u0000"+ + "\u056a\u056b\u0001\u0000\u0000\u0000\u056b\u056c\u0006\u00b5\u0015\u0000"+ + "\u056c\u056d\u0006\u00b5\u000f\u0000\u056d\u056e\u0006\u00b5 \u0000\u056e"+ + "\u017b\u0001\u0000\u0000\u0000\u056f\u0570\u0003:\u0015\u0000\u0570\u0571"+ + "\u0001\u0000\u0000\u0000\u0571\u0572\u0006\u00b6\u000b\u0000\u0572\u017d"+ + "\u0001\u0000\u0000\u0000\u0573\u0574\u0003<\u0016\u0000\u0574\u0575\u0001"+ + "\u0000\u0000\u0000\u0575\u0576\u0006\u00b7\u000b\u0000\u0576\u017f\u0001"+ + "\u0000\u0000\u0000\u0577\u0578\u0003>\u0017\u0000\u0578\u0579\u0001\u0000"+ + "\u0000\u0000\u0579\u057a\u0006\u00b8\u000b\u0000\u057a\u0181\u0001\u0000"+ + "\u0000\u0000\u057b\u057c\u0003\u016c\u00ae\u0000\u057c\u057d\u0001\u0000"+ + "\u0000\u0000\u057d\u057e\u0006\u00b9\u0011\u0000\u057e\u057f\u0006\u00b9"+ + "\u000f\u0000\u057f\u0580\u0006\u00b9\u0007\u0000\u0580\u0183\u0001\u0000"+ + "\u0000\u0000\u0581\u0582\u0003t2\u0000\u0582\u0583\u0001\u0000\u0000\u0000"+ + "\u0583\u0584\u0006\u00ba\u0012\u0000\u0584\u0585\u0006\u00ba\u000f\u0000"+ + "\u0585\u0586\u0006\u00ba\u0007\u0000\u0586\u0185\u0001\u0000\u0000\u0000"+ + "\u0587\u0588\u0003:\u0015\u0000\u0588\u0589\u0001\u0000\u0000\u0000\u0589"+ + "\u058a\u0006\u00bb\u000b\u0000\u058a\u0187\u0001\u0000\u0000\u0000\u058b"+ + "\u058c\u0003<\u0016\u0000\u058c\u058d\u0001\u0000\u0000\u0000\u058d\u058e"+ + "\u0006\u00bc\u000b\u0000\u058e\u0189\u0001\u0000\u0000\u0000\u058f\u0590"+ + "\u0003>\u0017\u0000\u0590\u0591\u0001\u0000\u0000\u0000\u0591\u0592\u0006"+ + "\u00bd\u000b\u0000\u0592\u018b\u0001\u0000\u0000\u0000\u0593\u0594\u0003"+ + "\u00baU\u0000\u0594\u0595\u0001\u0000\u0000\u0000\u0595\u0596\u0006\u00be"+ + "\u000f\u0000\u0596\u0597\u0006\u00be\u0000\u0000\u0597\u0598\u0006\u00be"+ + "\u001c\u0000\u0598\u018d\u0001\u0000\u0000\u0000\u0599\u059a\u0003\u00b6"+ + "S\u0000\u059a\u059b\u0001\u0000\u0000\u0000\u059b\u059c\u0006\u00bf\u000f"+ + "\u0000\u059c\u059d\u0006\u00bf\u0000\u0000\u059d\u059e\u0006\u00bf\u001f"+ + "\u0000\u059e\u018f\u0001\u0000\u0000\u0000\u059f\u05a0\u0003j-\u0000\u05a0"+ + "\u05a1\u0001\u0000\u0000\u0000\u05a1\u05a2\u0006\u00c0\u000f\u0000\u05a2"+ + "\u05a3\u0006\u00c0\u0000\u0000\u05a3\u05a4\u0006\u00c0!\u0000\u05a4\u0191"+ + "\u0001\u0000\u0000\u0000\u05a5\u05a6\u0003N\u001f\u0000\u05a6\u05a7\u0001"+ + "\u0000\u0000\u0000\u05a7\u05a8\u0006\u00c1\u000e\u0000\u05a8\u05a9\u0006"+ + "\u00c1\u000f\u0000\u05a9\u0193\u0001\u0000\u0000\u0000A\u0000\u0001\u0002"+ + "\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0240\u024a"+ + "\u024e\u0251\u025a\u025c\u0267\u026e\u0273\u029a\u029f\u02a8\u02af\u02b4"+ + "\u02b6\u02c1\u02c9\u02cc\u02ce\u02d3\u02d8\u02de\u02e5\u02ea\u02f0\u02f3"+ + "\u02fb\u02ff\u037b\u0382\u0384\u0394\u0399\u039e\u03a0\u03a6\u03ff\u0403"+ + "\u0408\u040d\u0412\u0414\u0418\u041a\u0467\u046b\u0470\u054e\u0550\"\u0005"+ + "\u0002\u0000\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0001\u0000\u0005"+ + "\u0003\u0000\u0005\b\u0000\u0005\f\u0000\u0005\u000e\u0000\u0005\n\u0000"+ + "\u0005\u0005\u0000\u0005\u000b\u0000\u0000\u0001\u0000\u0007E\u0000\u0005"+ + "\u0000\u0000\u0007\u001d\u0000\u0004\u0000\u0000\u0007F\u0000\u0007r\u0000"+ + "\u0007&\u0000\u0007$\u0000\u0007\u0019\u0000\u0007\u001e\u0000\u0007("+ + "\u0000\u0007P\u0000\u0005\r\u0000\u0005\u0007\u0000\u0007Z\u0000\u0007"+ + "Y\u0000\u0007H\u0000\u0007X\u0000\u0005\t\u0000\u0007G\u0000\u0005\u000f"+ + "\u0000\u0007!\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index 5900020590110..6c5edef9e98f0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -151,7 +151,7 @@ UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS -INDEX_UNQUOTED_IDENTIFIER +UNQUOTED_SOURCE EXPLAIN_WS EXPLAIN_LINE_COMMENT EXPLAIN_MULTILINE_COMMENT @@ -269,7 +269,9 @@ rowCommand fields field fromCommand -indexIdentifier +indexPattern +clusterString +indexString metadata metadataOption deprecated_metadata @@ -312,4 +314,4 @@ lookupCommand atn: -[4, 1, 124, 554, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 122, 8, 1, 10, 1, 12, 1, 125, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 133, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 149, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 161, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 168, 8, 5, 10, 5, 12, 5, 171, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 178, 8, 5, 1, 5, 1, 5, 3, 5, 182, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 190, 8, 5, 10, 5, 12, 5, 193, 9, 5, 1, 6, 1, 6, 3, 6, 197, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 204, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 209, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 216, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 222, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 230, 8, 8, 10, 8, 12, 8, 233, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 243, 8, 9, 1, 9, 1, 9, 1, 9, 5, 9, 248, 8, 9, 10, 9, 12, 9, 251, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 259, 8, 10, 10, 10, 12, 10, 262, 9, 10, 3, 10, 264, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 5, 13, 276, 8, 13, 10, 13, 12, 13, 279, 9, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 286, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 292, 8, 15, 10, 15, 12, 15, 295, 9, 15, 1, 15, 3, 15, 298, 8, 15, 1, 16, 1, 16, 1, 17, 1, 17, 3, 17, 304, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 5, 18, 310, 8, 18, 10, 18, 12, 18, 313, 9, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 323, 8, 20, 10, 20, 12, 20, 326, 9, 20, 1, 20, 3, 20, 329, 8, 20, 1, 20, 1, 20, 3, 20, 333, 8, 20, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 3, 22, 340, 8, 22, 1, 22, 1, 22, 3, 22, 344, 8, 22, 1, 23, 1, 23, 1, 23, 1, 23, 3, 23, 350, 8, 23, 1, 24, 1, 24, 1, 24, 5, 24, 355, 8, 24, 10, 24, 12, 24, 358, 9, 24, 1, 25, 1, 25, 1, 25, 5, 25, 363, 8, 25, 10, 25, 12, 25, 366, 9, 25, 1, 26, 1, 26, 1, 26, 5, 26, 371, 8, 26, 10, 26, 12, 26, 374, 9, 26, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 393, 8, 29, 10, 29, 12, 29, 396, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 404, 8, 29, 10, 29, 12, 29, 407, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 415, 8, 29, 10, 29, 12, 29, 418, 9, 29, 1, 29, 1, 29, 3, 29, 422, 8, 29, 1, 30, 1, 30, 3, 30, 426, 8, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 435, 8, 32, 10, 32, 12, 32, 438, 9, 32, 1, 33, 1, 33, 3, 33, 442, 8, 33, 1, 33, 1, 33, 3, 33, 446, 8, 33, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, 5, 36, 458, 8, 36, 10, 36, 12, 36, 461, 9, 36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 471, 8, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 5, 41, 483, 8, 41, 10, 41, 12, 41, 486, 9, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 44, 1, 44, 3, 44, 496, 8, 44, 1, 45, 3, 45, 499, 8, 45, 1, 45, 1, 45, 1, 46, 3, 46, 504, 8, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 3, 53, 529, 8, 53, 1, 53, 1, 53, 1, 53, 1, 53, 5, 53, 535, 8, 53, 10, 53, 12, 53, 538, 9, 53, 3, 53, 540, 8, 53, 1, 54, 1, 54, 1, 54, 3, 54, 545, 8, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 0, 4, 2, 10, 16, 18, 56, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 0, 7, 1, 0, 63, 64, 1, 0, 65, 67, 1, 0, 71, 72, 2, 0, 35, 35, 39, 39, 1, 0, 42, 43, 2, 0, 41, 41, 55, 55, 2, 0, 56, 56, 58, 62, 580, 0, 112, 1, 0, 0, 0, 2, 115, 1, 0, 0, 0, 4, 132, 1, 0, 0, 0, 6, 148, 1, 0, 0, 0, 8, 150, 1, 0, 0, 0, 10, 181, 1, 0, 0, 0, 12, 208, 1, 0, 0, 0, 14, 215, 1, 0, 0, 0, 16, 221, 1, 0, 0, 0, 18, 242, 1, 0, 0, 0, 20, 252, 1, 0, 0, 0, 22, 267, 1, 0, 0, 0, 24, 269, 1, 0, 0, 0, 26, 272, 1, 0, 0, 0, 28, 285, 1, 0, 0, 0, 30, 287, 1, 0, 0, 0, 32, 299, 1, 0, 0, 0, 34, 303, 1, 0, 0, 0, 36, 305, 1, 0, 0, 0, 38, 314, 1, 0, 0, 0, 40, 318, 1, 0, 0, 0, 42, 334, 1, 0, 0, 0, 44, 337, 1, 0, 0, 0, 46, 345, 1, 0, 0, 0, 48, 351, 1, 0, 0, 0, 50, 359, 1, 0, 0, 0, 52, 367, 1, 0, 0, 0, 54, 375, 1, 0, 0, 0, 56, 377, 1, 0, 0, 0, 58, 421, 1, 0, 0, 0, 60, 425, 1, 0, 0, 0, 62, 427, 1, 0, 0, 0, 64, 430, 1, 0, 0, 0, 66, 439, 1, 0, 0, 0, 68, 447, 1, 0, 0, 0, 70, 450, 1, 0, 0, 0, 72, 453, 1, 0, 0, 0, 74, 462, 1, 0, 0, 0, 76, 466, 1, 0, 0, 0, 78, 472, 1, 0, 0, 0, 80, 476, 1, 0, 0, 0, 82, 479, 1, 0, 0, 0, 84, 487, 1, 0, 0, 0, 86, 491, 1, 0, 0, 0, 88, 495, 1, 0, 0, 0, 90, 498, 1, 0, 0, 0, 92, 503, 1, 0, 0, 0, 94, 507, 1, 0, 0, 0, 96, 509, 1, 0, 0, 0, 98, 511, 1, 0, 0, 0, 100, 514, 1, 0, 0, 0, 102, 518, 1, 0, 0, 0, 104, 521, 1, 0, 0, 0, 106, 524, 1, 0, 0, 0, 108, 544, 1, 0, 0, 0, 110, 548, 1, 0, 0, 0, 112, 113, 3, 2, 1, 0, 113, 114, 5, 0, 0, 1, 114, 1, 1, 0, 0, 0, 115, 116, 6, 1, -1, 0, 116, 117, 3, 4, 2, 0, 117, 123, 1, 0, 0, 0, 118, 119, 10, 1, 0, 0, 119, 120, 5, 29, 0, 0, 120, 122, 3, 6, 3, 0, 121, 118, 1, 0, 0, 0, 122, 125, 1, 0, 0, 0, 123, 121, 1, 0, 0, 0, 123, 124, 1, 0, 0, 0, 124, 3, 1, 0, 0, 0, 125, 123, 1, 0, 0, 0, 126, 133, 3, 98, 49, 0, 127, 133, 3, 30, 15, 0, 128, 133, 3, 24, 12, 0, 129, 133, 3, 40, 20, 0, 130, 133, 3, 102, 51, 0, 131, 133, 3, 104, 52, 0, 132, 126, 1, 0, 0, 0, 132, 127, 1, 0, 0, 0, 132, 128, 1, 0, 0, 0, 132, 129, 1, 0, 0, 0, 132, 130, 1, 0, 0, 0, 132, 131, 1, 0, 0, 0, 133, 5, 1, 0, 0, 0, 134, 149, 3, 42, 21, 0, 135, 149, 3, 46, 23, 0, 136, 149, 3, 62, 31, 0, 137, 149, 3, 110, 55, 0, 138, 149, 3, 68, 34, 0, 139, 149, 3, 64, 32, 0, 140, 149, 3, 44, 22, 0, 141, 149, 3, 8, 4, 0, 142, 149, 3, 70, 35, 0, 143, 149, 3, 72, 36, 0, 144, 149, 3, 76, 38, 0, 145, 149, 3, 78, 39, 0, 146, 149, 3, 106, 53, 0, 147, 149, 3, 80, 40, 0, 148, 134, 1, 0, 0, 0, 148, 135, 1, 0, 0, 0, 148, 136, 1, 0, 0, 0, 148, 137, 1, 0, 0, 0, 148, 138, 1, 0, 0, 0, 148, 139, 1, 0, 0, 0, 148, 140, 1, 0, 0, 0, 148, 141, 1, 0, 0, 0, 148, 142, 1, 0, 0, 0, 148, 143, 1, 0, 0, 0, 148, 144, 1, 0, 0, 0, 148, 145, 1, 0, 0, 0, 148, 146, 1, 0, 0, 0, 148, 147, 1, 0, 0, 0, 149, 7, 1, 0, 0, 0, 150, 151, 5, 20, 0, 0, 151, 152, 3, 10, 5, 0, 152, 9, 1, 0, 0, 0, 153, 154, 6, 5, -1, 0, 154, 155, 5, 48, 0, 0, 155, 182, 3, 10, 5, 7, 156, 182, 3, 14, 7, 0, 157, 182, 3, 12, 6, 0, 158, 160, 3, 14, 7, 0, 159, 161, 5, 48, 0, 0, 160, 159, 1, 0, 0, 0, 160, 161, 1, 0, 0, 0, 161, 162, 1, 0, 0, 0, 162, 163, 5, 45, 0, 0, 163, 164, 5, 44, 0, 0, 164, 169, 3, 14, 7, 0, 165, 166, 5, 38, 0, 0, 166, 168, 3, 14, 7, 0, 167, 165, 1, 0, 0, 0, 168, 171, 1, 0, 0, 0, 169, 167, 1, 0, 0, 0, 169, 170, 1, 0, 0, 0, 170, 172, 1, 0, 0, 0, 171, 169, 1, 0, 0, 0, 172, 173, 5, 54, 0, 0, 173, 182, 1, 0, 0, 0, 174, 175, 3, 14, 7, 0, 175, 177, 5, 46, 0, 0, 176, 178, 5, 48, 0, 0, 177, 176, 1, 0, 0, 0, 177, 178, 1, 0, 0, 0, 178, 179, 1, 0, 0, 0, 179, 180, 5, 49, 0, 0, 180, 182, 1, 0, 0, 0, 181, 153, 1, 0, 0, 0, 181, 156, 1, 0, 0, 0, 181, 157, 1, 0, 0, 0, 181, 158, 1, 0, 0, 0, 181, 174, 1, 0, 0, 0, 182, 191, 1, 0, 0, 0, 183, 184, 10, 4, 0, 0, 184, 185, 5, 34, 0, 0, 185, 190, 3, 10, 5, 5, 186, 187, 10, 3, 0, 0, 187, 188, 5, 51, 0, 0, 188, 190, 3, 10, 5, 4, 189, 183, 1, 0, 0, 0, 189, 186, 1, 0, 0, 0, 190, 193, 1, 0, 0, 0, 191, 189, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 11, 1, 0, 0, 0, 193, 191, 1, 0, 0, 0, 194, 196, 3, 14, 7, 0, 195, 197, 5, 48, 0, 0, 196, 195, 1, 0, 0, 0, 196, 197, 1, 0, 0, 0, 197, 198, 1, 0, 0, 0, 198, 199, 5, 47, 0, 0, 199, 200, 3, 94, 47, 0, 200, 209, 1, 0, 0, 0, 201, 203, 3, 14, 7, 0, 202, 204, 5, 48, 0, 0, 203, 202, 1, 0, 0, 0, 203, 204, 1, 0, 0, 0, 204, 205, 1, 0, 0, 0, 205, 206, 5, 53, 0, 0, 206, 207, 3, 94, 47, 0, 207, 209, 1, 0, 0, 0, 208, 194, 1, 0, 0, 0, 208, 201, 1, 0, 0, 0, 209, 13, 1, 0, 0, 0, 210, 216, 3, 16, 8, 0, 211, 212, 3, 16, 8, 0, 212, 213, 3, 96, 48, 0, 213, 214, 3, 16, 8, 0, 214, 216, 1, 0, 0, 0, 215, 210, 1, 0, 0, 0, 215, 211, 1, 0, 0, 0, 216, 15, 1, 0, 0, 0, 217, 218, 6, 8, -1, 0, 218, 222, 3, 18, 9, 0, 219, 220, 7, 0, 0, 0, 220, 222, 3, 16, 8, 3, 221, 217, 1, 0, 0, 0, 221, 219, 1, 0, 0, 0, 222, 231, 1, 0, 0, 0, 223, 224, 10, 2, 0, 0, 224, 225, 7, 1, 0, 0, 225, 230, 3, 16, 8, 3, 226, 227, 10, 1, 0, 0, 227, 228, 7, 0, 0, 0, 228, 230, 3, 16, 8, 2, 229, 223, 1, 0, 0, 0, 229, 226, 1, 0, 0, 0, 230, 233, 1, 0, 0, 0, 231, 229, 1, 0, 0, 0, 231, 232, 1, 0, 0, 0, 232, 17, 1, 0, 0, 0, 233, 231, 1, 0, 0, 0, 234, 235, 6, 9, -1, 0, 235, 243, 3, 58, 29, 0, 236, 243, 3, 48, 24, 0, 237, 243, 3, 20, 10, 0, 238, 239, 5, 44, 0, 0, 239, 240, 3, 10, 5, 0, 240, 241, 5, 54, 0, 0, 241, 243, 1, 0, 0, 0, 242, 234, 1, 0, 0, 0, 242, 236, 1, 0, 0, 0, 242, 237, 1, 0, 0, 0, 242, 238, 1, 0, 0, 0, 243, 249, 1, 0, 0, 0, 244, 245, 10, 1, 0, 0, 245, 246, 5, 37, 0, 0, 246, 248, 3, 22, 11, 0, 247, 244, 1, 0, 0, 0, 248, 251, 1, 0, 0, 0, 249, 247, 1, 0, 0, 0, 249, 250, 1, 0, 0, 0, 250, 19, 1, 0, 0, 0, 251, 249, 1, 0, 0, 0, 252, 253, 3, 54, 27, 0, 253, 263, 5, 44, 0, 0, 254, 264, 5, 65, 0, 0, 255, 260, 3, 10, 5, 0, 256, 257, 5, 38, 0, 0, 257, 259, 3, 10, 5, 0, 258, 256, 1, 0, 0, 0, 259, 262, 1, 0, 0, 0, 260, 258, 1, 0, 0, 0, 260, 261, 1, 0, 0, 0, 261, 264, 1, 0, 0, 0, 262, 260, 1, 0, 0, 0, 263, 254, 1, 0, 0, 0, 263, 255, 1, 0, 0, 0, 263, 264, 1, 0, 0, 0, 264, 265, 1, 0, 0, 0, 265, 266, 5, 54, 0, 0, 266, 21, 1, 0, 0, 0, 267, 268, 3, 54, 27, 0, 268, 23, 1, 0, 0, 0, 269, 270, 5, 16, 0, 0, 270, 271, 3, 26, 13, 0, 271, 25, 1, 0, 0, 0, 272, 277, 3, 28, 14, 0, 273, 274, 5, 38, 0, 0, 274, 276, 3, 28, 14, 0, 275, 273, 1, 0, 0, 0, 276, 279, 1, 0, 0, 0, 277, 275, 1, 0, 0, 0, 277, 278, 1, 0, 0, 0, 278, 27, 1, 0, 0, 0, 279, 277, 1, 0, 0, 0, 280, 286, 3, 10, 5, 0, 281, 282, 3, 48, 24, 0, 282, 283, 5, 36, 0, 0, 283, 284, 3, 10, 5, 0, 284, 286, 1, 0, 0, 0, 285, 280, 1, 0, 0, 0, 285, 281, 1, 0, 0, 0, 286, 29, 1, 0, 0, 0, 287, 288, 5, 6, 0, 0, 288, 293, 3, 32, 16, 0, 289, 290, 5, 38, 0, 0, 290, 292, 3, 32, 16, 0, 291, 289, 1, 0, 0, 0, 292, 295, 1, 0, 0, 0, 293, 291, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 297, 1, 0, 0, 0, 295, 293, 1, 0, 0, 0, 296, 298, 3, 34, 17, 0, 297, 296, 1, 0, 0, 0, 297, 298, 1, 0, 0, 0, 298, 31, 1, 0, 0, 0, 299, 300, 5, 25, 0, 0, 300, 33, 1, 0, 0, 0, 301, 304, 3, 36, 18, 0, 302, 304, 3, 38, 19, 0, 303, 301, 1, 0, 0, 0, 303, 302, 1, 0, 0, 0, 304, 35, 1, 0, 0, 0, 305, 306, 5, 76, 0, 0, 306, 311, 3, 32, 16, 0, 307, 308, 5, 38, 0, 0, 308, 310, 3, 32, 16, 0, 309, 307, 1, 0, 0, 0, 310, 313, 1, 0, 0, 0, 311, 309, 1, 0, 0, 0, 311, 312, 1, 0, 0, 0, 312, 37, 1, 0, 0, 0, 313, 311, 1, 0, 0, 0, 314, 315, 5, 69, 0, 0, 315, 316, 3, 36, 18, 0, 316, 317, 5, 70, 0, 0, 317, 39, 1, 0, 0, 0, 318, 319, 5, 13, 0, 0, 319, 324, 3, 32, 16, 0, 320, 321, 5, 38, 0, 0, 321, 323, 3, 32, 16, 0, 322, 320, 1, 0, 0, 0, 323, 326, 1, 0, 0, 0, 324, 322, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 328, 1, 0, 0, 0, 326, 324, 1, 0, 0, 0, 327, 329, 3, 26, 13, 0, 328, 327, 1, 0, 0, 0, 328, 329, 1, 0, 0, 0, 329, 332, 1, 0, 0, 0, 330, 331, 5, 33, 0, 0, 331, 333, 3, 26, 13, 0, 332, 330, 1, 0, 0, 0, 332, 333, 1, 0, 0, 0, 333, 41, 1, 0, 0, 0, 334, 335, 5, 4, 0, 0, 335, 336, 3, 26, 13, 0, 336, 43, 1, 0, 0, 0, 337, 339, 5, 19, 0, 0, 338, 340, 3, 26, 13, 0, 339, 338, 1, 0, 0, 0, 339, 340, 1, 0, 0, 0, 340, 343, 1, 0, 0, 0, 341, 342, 5, 33, 0, 0, 342, 344, 3, 26, 13, 0, 343, 341, 1, 0, 0, 0, 343, 344, 1, 0, 0, 0, 344, 45, 1, 0, 0, 0, 345, 346, 5, 8, 0, 0, 346, 349, 3, 26, 13, 0, 347, 348, 5, 33, 0, 0, 348, 350, 3, 26, 13, 0, 349, 347, 1, 0, 0, 0, 349, 350, 1, 0, 0, 0, 350, 47, 1, 0, 0, 0, 351, 356, 3, 54, 27, 0, 352, 353, 5, 40, 0, 0, 353, 355, 3, 54, 27, 0, 354, 352, 1, 0, 0, 0, 355, 358, 1, 0, 0, 0, 356, 354, 1, 0, 0, 0, 356, 357, 1, 0, 0, 0, 357, 49, 1, 0, 0, 0, 358, 356, 1, 0, 0, 0, 359, 364, 3, 56, 28, 0, 360, 361, 5, 40, 0, 0, 361, 363, 3, 56, 28, 0, 362, 360, 1, 0, 0, 0, 363, 366, 1, 0, 0, 0, 364, 362, 1, 0, 0, 0, 364, 365, 1, 0, 0, 0, 365, 51, 1, 0, 0, 0, 366, 364, 1, 0, 0, 0, 367, 372, 3, 50, 25, 0, 368, 369, 5, 38, 0, 0, 369, 371, 3, 50, 25, 0, 370, 368, 1, 0, 0, 0, 371, 374, 1, 0, 0, 0, 372, 370, 1, 0, 0, 0, 372, 373, 1, 0, 0, 0, 373, 53, 1, 0, 0, 0, 374, 372, 1, 0, 0, 0, 375, 376, 7, 2, 0, 0, 376, 55, 1, 0, 0, 0, 377, 378, 5, 80, 0, 0, 378, 57, 1, 0, 0, 0, 379, 422, 5, 49, 0, 0, 380, 381, 3, 92, 46, 0, 381, 382, 5, 71, 0, 0, 382, 422, 1, 0, 0, 0, 383, 422, 3, 90, 45, 0, 384, 422, 3, 92, 46, 0, 385, 422, 3, 86, 43, 0, 386, 422, 3, 60, 30, 0, 387, 422, 3, 94, 47, 0, 388, 389, 5, 69, 0, 0, 389, 394, 3, 88, 44, 0, 390, 391, 5, 38, 0, 0, 391, 393, 3, 88, 44, 0, 392, 390, 1, 0, 0, 0, 393, 396, 1, 0, 0, 0, 394, 392, 1, 0, 0, 0, 394, 395, 1, 0, 0, 0, 395, 397, 1, 0, 0, 0, 396, 394, 1, 0, 0, 0, 397, 398, 5, 70, 0, 0, 398, 422, 1, 0, 0, 0, 399, 400, 5, 69, 0, 0, 400, 405, 3, 86, 43, 0, 401, 402, 5, 38, 0, 0, 402, 404, 3, 86, 43, 0, 403, 401, 1, 0, 0, 0, 404, 407, 1, 0, 0, 0, 405, 403, 1, 0, 0, 0, 405, 406, 1, 0, 0, 0, 406, 408, 1, 0, 0, 0, 407, 405, 1, 0, 0, 0, 408, 409, 5, 70, 0, 0, 409, 422, 1, 0, 0, 0, 410, 411, 5, 69, 0, 0, 411, 416, 3, 94, 47, 0, 412, 413, 5, 38, 0, 0, 413, 415, 3, 94, 47, 0, 414, 412, 1, 0, 0, 0, 415, 418, 1, 0, 0, 0, 416, 414, 1, 0, 0, 0, 416, 417, 1, 0, 0, 0, 417, 419, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 419, 420, 5, 70, 0, 0, 420, 422, 1, 0, 0, 0, 421, 379, 1, 0, 0, 0, 421, 380, 1, 0, 0, 0, 421, 383, 1, 0, 0, 0, 421, 384, 1, 0, 0, 0, 421, 385, 1, 0, 0, 0, 421, 386, 1, 0, 0, 0, 421, 387, 1, 0, 0, 0, 421, 388, 1, 0, 0, 0, 421, 399, 1, 0, 0, 0, 421, 410, 1, 0, 0, 0, 422, 59, 1, 0, 0, 0, 423, 426, 5, 52, 0, 0, 424, 426, 5, 68, 0, 0, 425, 423, 1, 0, 0, 0, 425, 424, 1, 0, 0, 0, 426, 61, 1, 0, 0, 0, 427, 428, 5, 10, 0, 0, 428, 429, 5, 31, 0, 0, 429, 63, 1, 0, 0, 0, 430, 431, 5, 18, 0, 0, 431, 436, 3, 66, 33, 0, 432, 433, 5, 38, 0, 0, 433, 435, 3, 66, 33, 0, 434, 432, 1, 0, 0, 0, 435, 438, 1, 0, 0, 0, 436, 434, 1, 0, 0, 0, 436, 437, 1, 0, 0, 0, 437, 65, 1, 0, 0, 0, 438, 436, 1, 0, 0, 0, 439, 441, 3, 10, 5, 0, 440, 442, 7, 3, 0, 0, 441, 440, 1, 0, 0, 0, 441, 442, 1, 0, 0, 0, 442, 445, 1, 0, 0, 0, 443, 444, 5, 50, 0, 0, 444, 446, 7, 4, 0, 0, 445, 443, 1, 0, 0, 0, 445, 446, 1, 0, 0, 0, 446, 67, 1, 0, 0, 0, 447, 448, 5, 9, 0, 0, 448, 449, 3, 52, 26, 0, 449, 69, 1, 0, 0, 0, 450, 451, 5, 2, 0, 0, 451, 452, 3, 52, 26, 0, 452, 71, 1, 0, 0, 0, 453, 454, 5, 15, 0, 0, 454, 459, 3, 74, 37, 0, 455, 456, 5, 38, 0, 0, 456, 458, 3, 74, 37, 0, 457, 455, 1, 0, 0, 0, 458, 461, 1, 0, 0, 0, 459, 457, 1, 0, 0, 0, 459, 460, 1, 0, 0, 0, 460, 73, 1, 0, 0, 0, 461, 459, 1, 0, 0, 0, 462, 463, 3, 50, 25, 0, 463, 464, 5, 84, 0, 0, 464, 465, 3, 50, 25, 0, 465, 75, 1, 0, 0, 0, 466, 467, 5, 1, 0, 0, 467, 468, 3, 18, 9, 0, 468, 470, 3, 94, 47, 0, 469, 471, 3, 82, 41, 0, 470, 469, 1, 0, 0, 0, 470, 471, 1, 0, 0, 0, 471, 77, 1, 0, 0, 0, 472, 473, 5, 7, 0, 0, 473, 474, 3, 18, 9, 0, 474, 475, 3, 94, 47, 0, 475, 79, 1, 0, 0, 0, 476, 477, 5, 14, 0, 0, 477, 478, 3, 48, 24, 0, 478, 81, 1, 0, 0, 0, 479, 484, 3, 84, 42, 0, 480, 481, 5, 38, 0, 0, 481, 483, 3, 84, 42, 0, 482, 480, 1, 0, 0, 0, 483, 486, 1, 0, 0, 0, 484, 482, 1, 0, 0, 0, 484, 485, 1, 0, 0, 0, 485, 83, 1, 0, 0, 0, 486, 484, 1, 0, 0, 0, 487, 488, 3, 54, 27, 0, 488, 489, 5, 36, 0, 0, 489, 490, 3, 58, 29, 0, 490, 85, 1, 0, 0, 0, 491, 492, 7, 5, 0, 0, 492, 87, 1, 0, 0, 0, 493, 496, 3, 90, 45, 0, 494, 496, 3, 92, 46, 0, 495, 493, 1, 0, 0, 0, 495, 494, 1, 0, 0, 0, 496, 89, 1, 0, 0, 0, 497, 499, 7, 0, 0, 0, 498, 497, 1, 0, 0, 0, 498, 499, 1, 0, 0, 0, 499, 500, 1, 0, 0, 0, 500, 501, 5, 32, 0, 0, 501, 91, 1, 0, 0, 0, 502, 504, 7, 0, 0, 0, 503, 502, 1, 0, 0, 0, 503, 504, 1, 0, 0, 0, 504, 505, 1, 0, 0, 0, 505, 506, 5, 31, 0, 0, 506, 93, 1, 0, 0, 0, 507, 508, 5, 30, 0, 0, 508, 95, 1, 0, 0, 0, 509, 510, 7, 6, 0, 0, 510, 97, 1, 0, 0, 0, 511, 512, 5, 5, 0, 0, 512, 513, 3, 100, 50, 0, 513, 99, 1, 0, 0, 0, 514, 515, 5, 69, 0, 0, 515, 516, 3, 2, 1, 0, 516, 517, 5, 70, 0, 0, 517, 101, 1, 0, 0, 0, 518, 519, 5, 17, 0, 0, 519, 520, 5, 106, 0, 0, 520, 103, 1, 0, 0, 0, 521, 522, 5, 12, 0, 0, 522, 523, 5, 110, 0, 0, 523, 105, 1, 0, 0, 0, 524, 525, 5, 3, 0, 0, 525, 528, 5, 90, 0, 0, 526, 527, 5, 88, 0, 0, 527, 529, 3, 50, 25, 0, 528, 526, 1, 0, 0, 0, 528, 529, 1, 0, 0, 0, 529, 539, 1, 0, 0, 0, 530, 531, 5, 89, 0, 0, 531, 536, 3, 108, 54, 0, 532, 533, 5, 38, 0, 0, 533, 535, 3, 108, 54, 0, 534, 532, 1, 0, 0, 0, 535, 538, 1, 0, 0, 0, 536, 534, 1, 0, 0, 0, 536, 537, 1, 0, 0, 0, 537, 540, 1, 0, 0, 0, 538, 536, 1, 0, 0, 0, 539, 530, 1, 0, 0, 0, 539, 540, 1, 0, 0, 0, 540, 107, 1, 0, 0, 0, 541, 542, 3, 50, 25, 0, 542, 543, 5, 36, 0, 0, 543, 545, 1, 0, 0, 0, 544, 541, 1, 0, 0, 0, 544, 545, 1, 0, 0, 0, 545, 546, 1, 0, 0, 0, 546, 547, 3, 50, 25, 0, 547, 109, 1, 0, 0, 0, 548, 549, 5, 11, 0, 0, 549, 550, 5, 25, 0, 0, 550, 551, 5, 88, 0, 0, 551, 552, 3, 52, 26, 0, 552, 111, 1, 0, 0, 0, 53, 123, 132, 148, 160, 169, 177, 181, 189, 191, 196, 203, 208, 215, 221, 229, 231, 242, 249, 260, 263, 277, 285, 293, 297, 303, 311, 324, 328, 332, 339, 343, 349, 356, 364, 372, 394, 405, 416, 421, 425, 436, 441, 445, 459, 470, 484, 495, 498, 503, 528, 536, 539, 544] \ No newline at end of file +[4, 1, 124, 567, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 126, 8, 1, 10, 1, 12, 1, 129, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 137, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 153, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 165, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 172, 8, 5, 10, 5, 12, 5, 175, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 182, 8, 5, 1, 5, 1, 5, 3, 5, 186, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 194, 8, 5, 10, 5, 12, 5, 197, 9, 5, 1, 6, 1, 6, 3, 6, 201, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 208, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 213, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 220, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 226, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 234, 8, 8, 10, 8, 12, 8, 237, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 247, 8, 9, 1, 9, 1, 9, 1, 9, 5, 9, 252, 8, 9, 10, 9, 12, 9, 255, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 263, 8, 10, 10, 10, 12, 10, 266, 9, 10, 3, 10, 268, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 5, 13, 280, 8, 13, 10, 13, 12, 13, 283, 9, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 290, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 296, 8, 15, 10, 15, 12, 15, 299, 9, 15, 1, 15, 3, 15, 302, 8, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 3, 16, 309, 8, 16, 1, 17, 1, 17, 1, 18, 1, 18, 1, 19, 1, 19, 3, 19, 317, 8, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 323, 8, 20, 10, 20, 12, 20, 326, 9, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 336, 8, 22, 10, 22, 12, 22, 339, 9, 22, 1, 22, 3, 22, 342, 8, 22, 1, 22, 1, 22, 3, 22, 346, 8, 22, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 3, 24, 353, 8, 24, 1, 24, 1, 24, 3, 24, 357, 8, 24, 1, 25, 1, 25, 1, 25, 1, 25, 3, 25, 363, 8, 25, 1, 26, 1, 26, 1, 26, 5, 26, 368, 8, 26, 10, 26, 12, 26, 371, 9, 26, 1, 27, 1, 27, 1, 27, 5, 27, 376, 8, 27, 10, 27, 12, 27, 379, 9, 27, 1, 28, 1, 28, 1, 28, 5, 28, 384, 8, 28, 10, 28, 12, 28, 387, 9, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 406, 8, 31, 10, 31, 12, 31, 409, 9, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 417, 8, 31, 10, 31, 12, 31, 420, 9, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 428, 8, 31, 10, 31, 12, 31, 431, 9, 31, 1, 31, 1, 31, 3, 31, 435, 8, 31, 1, 32, 1, 32, 3, 32, 439, 8, 32, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 448, 8, 34, 10, 34, 12, 34, 451, 9, 34, 1, 35, 1, 35, 3, 35, 455, 8, 35, 1, 35, 1, 35, 3, 35, 459, 8, 35, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 471, 8, 38, 10, 38, 12, 38, 474, 9, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 3, 40, 484, 8, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 5, 43, 496, 8, 43, 10, 43, 12, 43, 499, 9, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 3, 46, 509, 8, 46, 1, 47, 3, 47, 512, 8, 47, 1, 47, 1, 47, 1, 48, 3, 48, 517, 8, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 3, 55, 542, 8, 55, 1, 55, 1, 55, 1, 55, 1, 55, 5, 55, 548, 8, 55, 10, 55, 12, 55, 551, 9, 55, 3, 55, 553, 8, 55, 1, 56, 1, 56, 1, 56, 3, 56, 558, 8, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 0, 4, 2, 10, 16, 18, 58, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 0, 8, 1, 0, 63, 64, 1, 0, 65, 67, 2, 0, 25, 25, 30, 30, 1, 0, 71, 72, 2, 0, 35, 35, 39, 39, 1, 0, 42, 43, 2, 0, 41, 41, 55, 55, 2, 0, 56, 56, 58, 62, 592, 0, 116, 1, 0, 0, 0, 2, 119, 1, 0, 0, 0, 4, 136, 1, 0, 0, 0, 6, 152, 1, 0, 0, 0, 8, 154, 1, 0, 0, 0, 10, 185, 1, 0, 0, 0, 12, 212, 1, 0, 0, 0, 14, 219, 1, 0, 0, 0, 16, 225, 1, 0, 0, 0, 18, 246, 1, 0, 0, 0, 20, 256, 1, 0, 0, 0, 22, 271, 1, 0, 0, 0, 24, 273, 1, 0, 0, 0, 26, 276, 1, 0, 0, 0, 28, 289, 1, 0, 0, 0, 30, 291, 1, 0, 0, 0, 32, 308, 1, 0, 0, 0, 34, 310, 1, 0, 0, 0, 36, 312, 1, 0, 0, 0, 38, 316, 1, 0, 0, 0, 40, 318, 1, 0, 0, 0, 42, 327, 1, 0, 0, 0, 44, 331, 1, 0, 0, 0, 46, 347, 1, 0, 0, 0, 48, 350, 1, 0, 0, 0, 50, 358, 1, 0, 0, 0, 52, 364, 1, 0, 0, 0, 54, 372, 1, 0, 0, 0, 56, 380, 1, 0, 0, 0, 58, 388, 1, 0, 0, 0, 60, 390, 1, 0, 0, 0, 62, 434, 1, 0, 0, 0, 64, 438, 1, 0, 0, 0, 66, 440, 1, 0, 0, 0, 68, 443, 1, 0, 0, 0, 70, 452, 1, 0, 0, 0, 72, 460, 1, 0, 0, 0, 74, 463, 1, 0, 0, 0, 76, 466, 1, 0, 0, 0, 78, 475, 1, 0, 0, 0, 80, 479, 1, 0, 0, 0, 82, 485, 1, 0, 0, 0, 84, 489, 1, 0, 0, 0, 86, 492, 1, 0, 0, 0, 88, 500, 1, 0, 0, 0, 90, 504, 1, 0, 0, 0, 92, 508, 1, 0, 0, 0, 94, 511, 1, 0, 0, 0, 96, 516, 1, 0, 0, 0, 98, 520, 1, 0, 0, 0, 100, 522, 1, 0, 0, 0, 102, 524, 1, 0, 0, 0, 104, 527, 1, 0, 0, 0, 106, 531, 1, 0, 0, 0, 108, 534, 1, 0, 0, 0, 110, 537, 1, 0, 0, 0, 112, 557, 1, 0, 0, 0, 114, 561, 1, 0, 0, 0, 116, 117, 3, 2, 1, 0, 117, 118, 5, 0, 0, 1, 118, 1, 1, 0, 0, 0, 119, 120, 6, 1, -1, 0, 120, 121, 3, 4, 2, 0, 121, 127, 1, 0, 0, 0, 122, 123, 10, 1, 0, 0, 123, 124, 5, 29, 0, 0, 124, 126, 3, 6, 3, 0, 125, 122, 1, 0, 0, 0, 126, 129, 1, 0, 0, 0, 127, 125, 1, 0, 0, 0, 127, 128, 1, 0, 0, 0, 128, 3, 1, 0, 0, 0, 129, 127, 1, 0, 0, 0, 130, 137, 3, 102, 51, 0, 131, 137, 3, 30, 15, 0, 132, 137, 3, 24, 12, 0, 133, 137, 3, 44, 22, 0, 134, 137, 3, 106, 53, 0, 135, 137, 3, 108, 54, 0, 136, 130, 1, 0, 0, 0, 136, 131, 1, 0, 0, 0, 136, 132, 1, 0, 0, 0, 136, 133, 1, 0, 0, 0, 136, 134, 1, 0, 0, 0, 136, 135, 1, 0, 0, 0, 137, 5, 1, 0, 0, 0, 138, 153, 3, 46, 23, 0, 139, 153, 3, 50, 25, 0, 140, 153, 3, 66, 33, 0, 141, 153, 3, 114, 57, 0, 142, 153, 3, 72, 36, 0, 143, 153, 3, 68, 34, 0, 144, 153, 3, 48, 24, 0, 145, 153, 3, 8, 4, 0, 146, 153, 3, 74, 37, 0, 147, 153, 3, 76, 38, 0, 148, 153, 3, 80, 40, 0, 149, 153, 3, 82, 41, 0, 150, 153, 3, 110, 55, 0, 151, 153, 3, 84, 42, 0, 152, 138, 1, 0, 0, 0, 152, 139, 1, 0, 0, 0, 152, 140, 1, 0, 0, 0, 152, 141, 1, 0, 0, 0, 152, 142, 1, 0, 0, 0, 152, 143, 1, 0, 0, 0, 152, 144, 1, 0, 0, 0, 152, 145, 1, 0, 0, 0, 152, 146, 1, 0, 0, 0, 152, 147, 1, 0, 0, 0, 152, 148, 1, 0, 0, 0, 152, 149, 1, 0, 0, 0, 152, 150, 1, 0, 0, 0, 152, 151, 1, 0, 0, 0, 153, 7, 1, 0, 0, 0, 154, 155, 5, 20, 0, 0, 155, 156, 3, 10, 5, 0, 156, 9, 1, 0, 0, 0, 157, 158, 6, 5, -1, 0, 158, 159, 5, 48, 0, 0, 159, 186, 3, 10, 5, 7, 160, 186, 3, 14, 7, 0, 161, 186, 3, 12, 6, 0, 162, 164, 3, 14, 7, 0, 163, 165, 5, 48, 0, 0, 164, 163, 1, 0, 0, 0, 164, 165, 1, 0, 0, 0, 165, 166, 1, 0, 0, 0, 166, 167, 5, 45, 0, 0, 167, 168, 5, 44, 0, 0, 168, 173, 3, 14, 7, 0, 169, 170, 5, 38, 0, 0, 170, 172, 3, 14, 7, 0, 171, 169, 1, 0, 0, 0, 172, 175, 1, 0, 0, 0, 173, 171, 1, 0, 0, 0, 173, 174, 1, 0, 0, 0, 174, 176, 1, 0, 0, 0, 175, 173, 1, 0, 0, 0, 176, 177, 5, 54, 0, 0, 177, 186, 1, 0, 0, 0, 178, 179, 3, 14, 7, 0, 179, 181, 5, 46, 0, 0, 180, 182, 5, 48, 0, 0, 181, 180, 1, 0, 0, 0, 181, 182, 1, 0, 0, 0, 182, 183, 1, 0, 0, 0, 183, 184, 5, 49, 0, 0, 184, 186, 1, 0, 0, 0, 185, 157, 1, 0, 0, 0, 185, 160, 1, 0, 0, 0, 185, 161, 1, 0, 0, 0, 185, 162, 1, 0, 0, 0, 185, 178, 1, 0, 0, 0, 186, 195, 1, 0, 0, 0, 187, 188, 10, 4, 0, 0, 188, 189, 5, 34, 0, 0, 189, 194, 3, 10, 5, 5, 190, 191, 10, 3, 0, 0, 191, 192, 5, 51, 0, 0, 192, 194, 3, 10, 5, 4, 193, 187, 1, 0, 0, 0, 193, 190, 1, 0, 0, 0, 194, 197, 1, 0, 0, 0, 195, 193, 1, 0, 0, 0, 195, 196, 1, 0, 0, 0, 196, 11, 1, 0, 0, 0, 197, 195, 1, 0, 0, 0, 198, 200, 3, 14, 7, 0, 199, 201, 5, 48, 0, 0, 200, 199, 1, 0, 0, 0, 200, 201, 1, 0, 0, 0, 201, 202, 1, 0, 0, 0, 202, 203, 5, 47, 0, 0, 203, 204, 3, 98, 49, 0, 204, 213, 1, 0, 0, 0, 205, 207, 3, 14, 7, 0, 206, 208, 5, 48, 0, 0, 207, 206, 1, 0, 0, 0, 207, 208, 1, 0, 0, 0, 208, 209, 1, 0, 0, 0, 209, 210, 5, 53, 0, 0, 210, 211, 3, 98, 49, 0, 211, 213, 1, 0, 0, 0, 212, 198, 1, 0, 0, 0, 212, 205, 1, 0, 0, 0, 213, 13, 1, 0, 0, 0, 214, 220, 3, 16, 8, 0, 215, 216, 3, 16, 8, 0, 216, 217, 3, 100, 50, 0, 217, 218, 3, 16, 8, 0, 218, 220, 1, 0, 0, 0, 219, 214, 1, 0, 0, 0, 219, 215, 1, 0, 0, 0, 220, 15, 1, 0, 0, 0, 221, 222, 6, 8, -1, 0, 222, 226, 3, 18, 9, 0, 223, 224, 7, 0, 0, 0, 224, 226, 3, 16, 8, 3, 225, 221, 1, 0, 0, 0, 225, 223, 1, 0, 0, 0, 226, 235, 1, 0, 0, 0, 227, 228, 10, 2, 0, 0, 228, 229, 7, 1, 0, 0, 229, 234, 3, 16, 8, 3, 230, 231, 10, 1, 0, 0, 231, 232, 7, 0, 0, 0, 232, 234, 3, 16, 8, 2, 233, 227, 1, 0, 0, 0, 233, 230, 1, 0, 0, 0, 234, 237, 1, 0, 0, 0, 235, 233, 1, 0, 0, 0, 235, 236, 1, 0, 0, 0, 236, 17, 1, 0, 0, 0, 237, 235, 1, 0, 0, 0, 238, 239, 6, 9, -1, 0, 239, 247, 3, 62, 31, 0, 240, 247, 3, 52, 26, 0, 241, 247, 3, 20, 10, 0, 242, 243, 5, 44, 0, 0, 243, 244, 3, 10, 5, 0, 244, 245, 5, 54, 0, 0, 245, 247, 1, 0, 0, 0, 246, 238, 1, 0, 0, 0, 246, 240, 1, 0, 0, 0, 246, 241, 1, 0, 0, 0, 246, 242, 1, 0, 0, 0, 247, 253, 1, 0, 0, 0, 248, 249, 10, 1, 0, 0, 249, 250, 5, 37, 0, 0, 250, 252, 3, 22, 11, 0, 251, 248, 1, 0, 0, 0, 252, 255, 1, 0, 0, 0, 253, 251, 1, 0, 0, 0, 253, 254, 1, 0, 0, 0, 254, 19, 1, 0, 0, 0, 255, 253, 1, 0, 0, 0, 256, 257, 3, 58, 29, 0, 257, 267, 5, 44, 0, 0, 258, 268, 5, 65, 0, 0, 259, 264, 3, 10, 5, 0, 260, 261, 5, 38, 0, 0, 261, 263, 3, 10, 5, 0, 262, 260, 1, 0, 0, 0, 263, 266, 1, 0, 0, 0, 264, 262, 1, 0, 0, 0, 264, 265, 1, 0, 0, 0, 265, 268, 1, 0, 0, 0, 266, 264, 1, 0, 0, 0, 267, 258, 1, 0, 0, 0, 267, 259, 1, 0, 0, 0, 267, 268, 1, 0, 0, 0, 268, 269, 1, 0, 0, 0, 269, 270, 5, 54, 0, 0, 270, 21, 1, 0, 0, 0, 271, 272, 3, 58, 29, 0, 272, 23, 1, 0, 0, 0, 273, 274, 5, 16, 0, 0, 274, 275, 3, 26, 13, 0, 275, 25, 1, 0, 0, 0, 276, 281, 3, 28, 14, 0, 277, 278, 5, 38, 0, 0, 278, 280, 3, 28, 14, 0, 279, 277, 1, 0, 0, 0, 280, 283, 1, 0, 0, 0, 281, 279, 1, 0, 0, 0, 281, 282, 1, 0, 0, 0, 282, 27, 1, 0, 0, 0, 283, 281, 1, 0, 0, 0, 284, 290, 3, 10, 5, 0, 285, 286, 3, 52, 26, 0, 286, 287, 5, 36, 0, 0, 287, 288, 3, 10, 5, 0, 288, 290, 1, 0, 0, 0, 289, 284, 1, 0, 0, 0, 289, 285, 1, 0, 0, 0, 290, 29, 1, 0, 0, 0, 291, 292, 5, 6, 0, 0, 292, 297, 3, 32, 16, 0, 293, 294, 5, 38, 0, 0, 294, 296, 3, 32, 16, 0, 295, 293, 1, 0, 0, 0, 296, 299, 1, 0, 0, 0, 297, 295, 1, 0, 0, 0, 297, 298, 1, 0, 0, 0, 298, 301, 1, 0, 0, 0, 299, 297, 1, 0, 0, 0, 300, 302, 3, 38, 19, 0, 301, 300, 1, 0, 0, 0, 301, 302, 1, 0, 0, 0, 302, 31, 1, 0, 0, 0, 303, 304, 3, 34, 17, 0, 304, 305, 5, 114, 0, 0, 305, 306, 3, 36, 18, 0, 306, 309, 1, 0, 0, 0, 307, 309, 3, 36, 18, 0, 308, 303, 1, 0, 0, 0, 308, 307, 1, 0, 0, 0, 309, 33, 1, 0, 0, 0, 310, 311, 5, 25, 0, 0, 311, 35, 1, 0, 0, 0, 312, 313, 7, 2, 0, 0, 313, 37, 1, 0, 0, 0, 314, 317, 3, 40, 20, 0, 315, 317, 3, 42, 21, 0, 316, 314, 1, 0, 0, 0, 316, 315, 1, 0, 0, 0, 317, 39, 1, 0, 0, 0, 318, 319, 5, 76, 0, 0, 319, 324, 5, 25, 0, 0, 320, 321, 5, 38, 0, 0, 321, 323, 5, 25, 0, 0, 322, 320, 1, 0, 0, 0, 323, 326, 1, 0, 0, 0, 324, 322, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 41, 1, 0, 0, 0, 326, 324, 1, 0, 0, 0, 327, 328, 5, 69, 0, 0, 328, 329, 3, 40, 20, 0, 329, 330, 5, 70, 0, 0, 330, 43, 1, 0, 0, 0, 331, 332, 5, 13, 0, 0, 332, 337, 3, 32, 16, 0, 333, 334, 5, 38, 0, 0, 334, 336, 3, 32, 16, 0, 335, 333, 1, 0, 0, 0, 336, 339, 1, 0, 0, 0, 337, 335, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 341, 1, 0, 0, 0, 339, 337, 1, 0, 0, 0, 340, 342, 3, 26, 13, 0, 341, 340, 1, 0, 0, 0, 341, 342, 1, 0, 0, 0, 342, 345, 1, 0, 0, 0, 343, 344, 5, 33, 0, 0, 344, 346, 3, 26, 13, 0, 345, 343, 1, 0, 0, 0, 345, 346, 1, 0, 0, 0, 346, 45, 1, 0, 0, 0, 347, 348, 5, 4, 0, 0, 348, 349, 3, 26, 13, 0, 349, 47, 1, 0, 0, 0, 350, 352, 5, 19, 0, 0, 351, 353, 3, 26, 13, 0, 352, 351, 1, 0, 0, 0, 352, 353, 1, 0, 0, 0, 353, 356, 1, 0, 0, 0, 354, 355, 5, 33, 0, 0, 355, 357, 3, 26, 13, 0, 356, 354, 1, 0, 0, 0, 356, 357, 1, 0, 0, 0, 357, 49, 1, 0, 0, 0, 358, 359, 5, 8, 0, 0, 359, 362, 3, 26, 13, 0, 360, 361, 5, 33, 0, 0, 361, 363, 3, 26, 13, 0, 362, 360, 1, 0, 0, 0, 362, 363, 1, 0, 0, 0, 363, 51, 1, 0, 0, 0, 364, 369, 3, 58, 29, 0, 365, 366, 5, 40, 0, 0, 366, 368, 3, 58, 29, 0, 367, 365, 1, 0, 0, 0, 368, 371, 1, 0, 0, 0, 369, 367, 1, 0, 0, 0, 369, 370, 1, 0, 0, 0, 370, 53, 1, 0, 0, 0, 371, 369, 1, 0, 0, 0, 372, 377, 3, 60, 30, 0, 373, 374, 5, 40, 0, 0, 374, 376, 3, 60, 30, 0, 375, 373, 1, 0, 0, 0, 376, 379, 1, 0, 0, 0, 377, 375, 1, 0, 0, 0, 377, 378, 1, 0, 0, 0, 378, 55, 1, 0, 0, 0, 379, 377, 1, 0, 0, 0, 380, 385, 3, 54, 27, 0, 381, 382, 5, 38, 0, 0, 382, 384, 3, 54, 27, 0, 383, 381, 1, 0, 0, 0, 384, 387, 1, 0, 0, 0, 385, 383, 1, 0, 0, 0, 385, 386, 1, 0, 0, 0, 386, 57, 1, 0, 0, 0, 387, 385, 1, 0, 0, 0, 388, 389, 7, 3, 0, 0, 389, 59, 1, 0, 0, 0, 390, 391, 5, 80, 0, 0, 391, 61, 1, 0, 0, 0, 392, 435, 5, 49, 0, 0, 393, 394, 3, 96, 48, 0, 394, 395, 5, 71, 0, 0, 395, 435, 1, 0, 0, 0, 396, 435, 3, 94, 47, 0, 397, 435, 3, 96, 48, 0, 398, 435, 3, 90, 45, 0, 399, 435, 3, 64, 32, 0, 400, 435, 3, 98, 49, 0, 401, 402, 5, 69, 0, 0, 402, 407, 3, 92, 46, 0, 403, 404, 5, 38, 0, 0, 404, 406, 3, 92, 46, 0, 405, 403, 1, 0, 0, 0, 406, 409, 1, 0, 0, 0, 407, 405, 1, 0, 0, 0, 407, 408, 1, 0, 0, 0, 408, 410, 1, 0, 0, 0, 409, 407, 1, 0, 0, 0, 410, 411, 5, 70, 0, 0, 411, 435, 1, 0, 0, 0, 412, 413, 5, 69, 0, 0, 413, 418, 3, 90, 45, 0, 414, 415, 5, 38, 0, 0, 415, 417, 3, 90, 45, 0, 416, 414, 1, 0, 0, 0, 417, 420, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 418, 419, 1, 0, 0, 0, 419, 421, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 421, 422, 5, 70, 0, 0, 422, 435, 1, 0, 0, 0, 423, 424, 5, 69, 0, 0, 424, 429, 3, 98, 49, 0, 425, 426, 5, 38, 0, 0, 426, 428, 3, 98, 49, 0, 427, 425, 1, 0, 0, 0, 428, 431, 1, 0, 0, 0, 429, 427, 1, 0, 0, 0, 429, 430, 1, 0, 0, 0, 430, 432, 1, 0, 0, 0, 431, 429, 1, 0, 0, 0, 432, 433, 5, 70, 0, 0, 433, 435, 1, 0, 0, 0, 434, 392, 1, 0, 0, 0, 434, 393, 1, 0, 0, 0, 434, 396, 1, 0, 0, 0, 434, 397, 1, 0, 0, 0, 434, 398, 1, 0, 0, 0, 434, 399, 1, 0, 0, 0, 434, 400, 1, 0, 0, 0, 434, 401, 1, 0, 0, 0, 434, 412, 1, 0, 0, 0, 434, 423, 1, 0, 0, 0, 435, 63, 1, 0, 0, 0, 436, 439, 5, 52, 0, 0, 437, 439, 5, 68, 0, 0, 438, 436, 1, 0, 0, 0, 438, 437, 1, 0, 0, 0, 439, 65, 1, 0, 0, 0, 440, 441, 5, 10, 0, 0, 441, 442, 5, 31, 0, 0, 442, 67, 1, 0, 0, 0, 443, 444, 5, 18, 0, 0, 444, 449, 3, 70, 35, 0, 445, 446, 5, 38, 0, 0, 446, 448, 3, 70, 35, 0, 447, 445, 1, 0, 0, 0, 448, 451, 1, 0, 0, 0, 449, 447, 1, 0, 0, 0, 449, 450, 1, 0, 0, 0, 450, 69, 1, 0, 0, 0, 451, 449, 1, 0, 0, 0, 452, 454, 3, 10, 5, 0, 453, 455, 7, 4, 0, 0, 454, 453, 1, 0, 0, 0, 454, 455, 1, 0, 0, 0, 455, 458, 1, 0, 0, 0, 456, 457, 5, 50, 0, 0, 457, 459, 7, 5, 0, 0, 458, 456, 1, 0, 0, 0, 458, 459, 1, 0, 0, 0, 459, 71, 1, 0, 0, 0, 460, 461, 5, 9, 0, 0, 461, 462, 3, 56, 28, 0, 462, 73, 1, 0, 0, 0, 463, 464, 5, 2, 0, 0, 464, 465, 3, 56, 28, 0, 465, 75, 1, 0, 0, 0, 466, 467, 5, 15, 0, 0, 467, 472, 3, 78, 39, 0, 468, 469, 5, 38, 0, 0, 469, 471, 3, 78, 39, 0, 470, 468, 1, 0, 0, 0, 471, 474, 1, 0, 0, 0, 472, 470, 1, 0, 0, 0, 472, 473, 1, 0, 0, 0, 473, 77, 1, 0, 0, 0, 474, 472, 1, 0, 0, 0, 475, 476, 3, 54, 27, 0, 476, 477, 5, 84, 0, 0, 477, 478, 3, 54, 27, 0, 478, 79, 1, 0, 0, 0, 479, 480, 5, 1, 0, 0, 480, 481, 3, 18, 9, 0, 481, 483, 3, 98, 49, 0, 482, 484, 3, 86, 43, 0, 483, 482, 1, 0, 0, 0, 483, 484, 1, 0, 0, 0, 484, 81, 1, 0, 0, 0, 485, 486, 5, 7, 0, 0, 486, 487, 3, 18, 9, 0, 487, 488, 3, 98, 49, 0, 488, 83, 1, 0, 0, 0, 489, 490, 5, 14, 0, 0, 490, 491, 3, 52, 26, 0, 491, 85, 1, 0, 0, 0, 492, 497, 3, 88, 44, 0, 493, 494, 5, 38, 0, 0, 494, 496, 3, 88, 44, 0, 495, 493, 1, 0, 0, 0, 496, 499, 1, 0, 0, 0, 497, 495, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 87, 1, 0, 0, 0, 499, 497, 1, 0, 0, 0, 500, 501, 3, 58, 29, 0, 501, 502, 5, 36, 0, 0, 502, 503, 3, 62, 31, 0, 503, 89, 1, 0, 0, 0, 504, 505, 7, 6, 0, 0, 505, 91, 1, 0, 0, 0, 506, 509, 3, 94, 47, 0, 507, 509, 3, 96, 48, 0, 508, 506, 1, 0, 0, 0, 508, 507, 1, 0, 0, 0, 509, 93, 1, 0, 0, 0, 510, 512, 7, 0, 0, 0, 511, 510, 1, 0, 0, 0, 511, 512, 1, 0, 0, 0, 512, 513, 1, 0, 0, 0, 513, 514, 5, 32, 0, 0, 514, 95, 1, 0, 0, 0, 515, 517, 7, 0, 0, 0, 516, 515, 1, 0, 0, 0, 516, 517, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 519, 5, 31, 0, 0, 519, 97, 1, 0, 0, 0, 520, 521, 5, 30, 0, 0, 521, 99, 1, 0, 0, 0, 522, 523, 7, 7, 0, 0, 523, 101, 1, 0, 0, 0, 524, 525, 5, 5, 0, 0, 525, 526, 3, 104, 52, 0, 526, 103, 1, 0, 0, 0, 527, 528, 5, 69, 0, 0, 528, 529, 3, 2, 1, 0, 529, 530, 5, 70, 0, 0, 530, 105, 1, 0, 0, 0, 531, 532, 5, 17, 0, 0, 532, 533, 5, 106, 0, 0, 533, 107, 1, 0, 0, 0, 534, 535, 5, 12, 0, 0, 535, 536, 5, 110, 0, 0, 536, 109, 1, 0, 0, 0, 537, 538, 5, 3, 0, 0, 538, 541, 5, 90, 0, 0, 539, 540, 5, 88, 0, 0, 540, 542, 3, 54, 27, 0, 541, 539, 1, 0, 0, 0, 541, 542, 1, 0, 0, 0, 542, 552, 1, 0, 0, 0, 543, 544, 5, 89, 0, 0, 544, 549, 3, 112, 56, 0, 545, 546, 5, 38, 0, 0, 546, 548, 3, 112, 56, 0, 547, 545, 1, 0, 0, 0, 548, 551, 1, 0, 0, 0, 549, 547, 1, 0, 0, 0, 549, 550, 1, 0, 0, 0, 550, 553, 1, 0, 0, 0, 551, 549, 1, 0, 0, 0, 552, 543, 1, 0, 0, 0, 552, 553, 1, 0, 0, 0, 553, 111, 1, 0, 0, 0, 554, 555, 3, 54, 27, 0, 555, 556, 5, 36, 0, 0, 556, 558, 1, 0, 0, 0, 557, 554, 1, 0, 0, 0, 557, 558, 1, 0, 0, 0, 558, 559, 1, 0, 0, 0, 559, 560, 3, 54, 27, 0, 560, 113, 1, 0, 0, 0, 561, 562, 5, 11, 0, 0, 562, 563, 3, 32, 16, 0, 563, 564, 5, 88, 0, 0, 564, 565, 3, 56, 28, 0, 565, 115, 1, 0, 0, 0, 54, 127, 136, 152, 164, 173, 181, 185, 193, 195, 200, 207, 212, 219, 225, 233, 235, 246, 253, 264, 267, 281, 289, 297, 301, 308, 316, 324, 337, 341, 345, 352, 356, 362, 369, 377, 385, 407, 418, 429, 434, 438, 449, 454, 458, 472, 483, 497, 508, 511, 516, 541, 549, 552, 557] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index 6c21529d6a648..afaf57ba1d218 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -20,62 +20,62 @@ public class EsqlBaseParser extends Parser { DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, INLINESTATS=8, KEEP=9, LIMIT=10, LOOKUP=11, META=12, METRICS=13, MV_EXPAND=14, RENAME=15, ROW=16, SHOW=17, SORT=18, STATS=19, WHERE=20, UNKNOWN_CMD=21, LINE_COMMENT=22, - MULTILINE_COMMENT=23, WS=24, INDEX_UNQUOTED_IDENTIFIER=25, EXPLAIN_WS=26, - EXPLAIN_LINE_COMMENT=27, EXPLAIN_MULTILINE_COMMENT=28, PIPE=29, QUOTED_STRING=30, - INTEGER_LITERAL=31, DECIMAL_LITERAL=32, BY=33, AND=34, ASC=35, ASSIGN=36, - CAST_OP=37, COMMA=38, DESC=39, DOT=40, FALSE=41, FIRST=42, LAST=43, LP=44, - IN=45, IS=46, LIKE=47, NOT=48, NULL=49, NULLS=50, OR=51, PARAM=52, RLIKE=53, - RP=54, TRUE=55, EQ=56, CIEQ=57, NEQ=58, LT=59, LTE=60, GT=61, GTE=62, - PLUS=63, MINUS=64, ASTERISK=65, SLASH=66, PERCENT=67, NAMED_OR_POSITIONAL_PARAM=68, - OPENING_BRACKET=69, CLOSING_BRACKET=70, UNQUOTED_IDENTIFIER=71, QUOTED_IDENTIFIER=72, - EXPR_LINE_COMMENT=73, EXPR_MULTILINE_COMMENT=74, EXPR_WS=75, METADATA=76, - FROM_LINE_COMMENT=77, FROM_MULTILINE_COMMENT=78, FROM_WS=79, ID_PATTERN=80, - PROJECT_LINE_COMMENT=81, PROJECT_MULTILINE_COMMENT=82, PROJECT_WS=83, - AS=84, RENAME_LINE_COMMENT=85, RENAME_MULTILINE_COMMENT=86, RENAME_WS=87, - ON=88, WITH=89, ENRICH_POLICY_NAME=90, ENRICH_LINE_COMMENT=91, ENRICH_MULTILINE_COMMENT=92, - ENRICH_WS=93, ENRICH_FIELD_LINE_COMMENT=94, ENRICH_FIELD_MULTILINE_COMMENT=95, - ENRICH_FIELD_WS=96, LOOKUP_LINE_COMMENT=97, LOOKUP_MULTILINE_COMMENT=98, - LOOKUP_WS=99, LOOKUP_FIELD_LINE_COMMENT=100, LOOKUP_FIELD_MULTILINE_COMMENT=101, - LOOKUP_FIELD_WS=102, MVEXPAND_LINE_COMMENT=103, MVEXPAND_MULTILINE_COMMENT=104, - MVEXPAND_WS=105, INFO=106, SHOW_LINE_COMMENT=107, SHOW_MULTILINE_COMMENT=108, - SHOW_WS=109, FUNCTIONS=110, META_LINE_COMMENT=111, META_MULTILINE_COMMENT=112, - META_WS=113, COLON=114, SETTING=115, SETTING_LINE_COMMENT=116, SETTTING_MULTILINE_COMMENT=117, - SETTING_WS=118, METRICS_LINE_COMMENT=119, METRICS_MULTILINE_COMMENT=120, - METRICS_WS=121, CLOSING_METRICS_LINE_COMMENT=122, CLOSING_METRICS_MULTILINE_COMMENT=123, - CLOSING_METRICS_WS=124; + MULTILINE_COMMENT=23, WS=24, UNQUOTED_SOURCE=25, EXPLAIN_WS=26, EXPLAIN_LINE_COMMENT=27, + EXPLAIN_MULTILINE_COMMENT=28, PIPE=29, QUOTED_STRING=30, INTEGER_LITERAL=31, + DECIMAL_LITERAL=32, BY=33, AND=34, ASC=35, ASSIGN=36, CAST_OP=37, COMMA=38, + DESC=39, DOT=40, FALSE=41, FIRST=42, LAST=43, LP=44, IN=45, IS=46, LIKE=47, + NOT=48, NULL=49, NULLS=50, OR=51, PARAM=52, RLIKE=53, RP=54, TRUE=55, + EQ=56, CIEQ=57, NEQ=58, LT=59, LTE=60, GT=61, GTE=62, PLUS=63, MINUS=64, + ASTERISK=65, SLASH=66, PERCENT=67, NAMED_OR_POSITIONAL_PARAM=68, OPENING_BRACKET=69, + CLOSING_BRACKET=70, UNQUOTED_IDENTIFIER=71, QUOTED_IDENTIFIER=72, EXPR_LINE_COMMENT=73, + EXPR_MULTILINE_COMMENT=74, EXPR_WS=75, METADATA=76, FROM_LINE_COMMENT=77, + FROM_MULTILINE_COMMENT=78, FROM_WS=79, ID_PATTERN=80, PROJECT_LINE_COMMENT=81, + PROJECT_MULTILINE_COMMENT=82, PROJECT_WS=83, AS=84, RENAME_LINE_COMMENT=85, + RENAME_MULTILINE_COMMENT=86, RENAME_WS=87, ON=88, WITH=89, ENRICH_POLICY_NAME=90, + ENRICH_LINE_COMMENT=91, ENRICH_MULTILINE_COMMENT=92, ENRICH_WS=93, ENRICH_FIELD_LINE_COMMENT=94, + ENRICH_FIELD_MULTILINE_COMMENT=95, ENRICH_FIELD_WS=96, LOOKUP_LINE_COMMENT=97, + LOOKUP_MULTILINE_COMMENT=98, LOOKUP_WS=99, LOOKUP_FIELD_LINE_COMMENT=100, + LOOKUP_FIELD_MULTILINE_COMMENT=101, LOOKUP_FIELD_WS=102, MVEXPAND_LINE_COMMENT=103, + MVEXPAND_MULTILINE_COMMENT=104, MVEXPAND_WS=105, INFO=106, SHOW_LINE_COMMENT=107, + SHOW_MULTILINE_COMMENT=108, SHOW_WS=109, FUNCTIONS=110, META_LINE_COMMENT=111, + META_MULTILINE_COMMENT=112, META_WS=113, COLON=114, SETTING=115, SETTING_LINE_COMMENT=116, + SETTTING_MULTILINE_COMMENT=117, SETTING_WS=118, METRICS_LINE_COMMENT=119, + METRICS_MULTILINE_COMMENT=120, METRICS_WS=121, CLOSING_METRICS_LINE_COMMENT=122, + CLOSING_METRICS_MULTILINE_COMMENT=123, CLOSING_METRICS_WS=124; public static final int RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, RULE_valueExpression = 7, RULE_operatorExpression = 8, RULE_primaryExpression = 9, RULE_functionExpression = 10, RULE_dataType = 11, RULE_rowCommand = 12, - RULE_fields = 13, RULE_field = 14, RULE_fromCommand = 15, RULE_indexIdentifier = 16, - RULE_metadata = 17, RULE_metadataOption = 18, RULE_deprecated_metadata = 19, - RULE_metricsCommand = 20, RULE_evalCommand = 21, RULE_statsCommand = 22, - RULE_inlinestatsCommand = 23, RULE_qualifiedName = 24, RULE_qualifiedNamePattern = 25, - RULE_qualifiedNamePatterns = 26, RULE_identifier = 27, RULE_identifierPattern = 28, - RULE_constant = 29, RULE_params = 30, RULE_limitCommand = 31, RULE_sortCommand = 32, - RULE_orderExpression = 33, RULE_keepCommand = 34, RULE_dropCommand = 35, - RULE_renameCommand = 36, RULE_renameClause = 37, RULE_dissectCommand = 38, - RULE_grokCommand = 39, RULE_mvExpandCommand = 40, RULE_commandOptions = 41, - RULE_commandOption = 42, RULE_booleanValue = 43, RULE_numericValue = 44, - RULE_decimalValue = 45, RULE_integerValue = 46, RULE_string = 47, RULE_comparisonOperator = 48, - RULE_explainCommand = 49, RULE_subqueryExpression = 50, RULE_showCommand = 51, - RULE_metaCommand = 52, RULE_enrichCommand = 53, RULE_enrichWithClause = 54, - RULE_lookupCommand = 55; + RULE_fields = 13, RULE_field = 14, RULE_fromCommand = 15, RULE_indexPattern = 16, + RULE_clusterString = 17, RULE_indexString = 18, RULE_metadata = 19, RULE_metadataOption = 20, + RULE_deprecated_metadata = 21, RULE_metricsCommand = 22, RULE_evalCommand = 23, + RULE_statsCommand = 24, RULE_inlinestatsCommand = 25, RULE_qualifiedName = 26, + RULE_qualifiedNamePattern = 27, RULE_qualifiedNamePatterns = 28, RULE_identifier = 29, + RULE_identifierPattern = 30, RULE_constant = 31, RULE_params = 32, RULE_limitCommand = 33, + RULE_sortCommand = 34, RULE_orderExpression = 35, RULE_keepCommand = 36, + RULE_dropCommand = 37, RULE_renameCommand = 38, RULE_renameClause = 39, + RULE_dissectCommand = 40, RULE_grokCommand = 41, RULE_mvExpandCommand = 42, + RULE_commandOptions = 43, RULE_commandOption = 44, RULE_booleanValue = 45, + RULE_numericValue = 46, RULE_decimalValue = 47, RULE_integerValue = 48, + RULE_string = 49, RULE_comparisonOperator = 50, RULE_explainCommand = 51, + RULE_subqueryExpression = 52, RULE_showCommand = 53, RULE_metaCommand = 54, + RULE_enrichCommand = 55, RULE_enrichWithClause = 56, RULE_lookupCommand = 57; private static String[] makeRuleNames() { return new String[] { "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", "booleanExpression", "regexBooleanExpression", "valueExpression", "operatorExpression", "primaryExpression", "functionExpression", "dataType", "rowCommand", - "fields", "field", "fromCommand", "indexIdentifier", "metadata", "metadataOption", - "deprecated_metadata", "metricsCommand", "evalCommand", "statsCommand", - "inlinestatsCommand", "qualifiedName", "qualifiedNamePattern", "qualifiedNamePatterns", - "identifier", "identifierPattern", "constant", "params", "limitCommand", - "sortCommand", "orderExpression", "keepCommand", "dropCommand", "renameCommand", - "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand", "commandOptions", - "commandOption", "booleanValue", "numericValue", "decimalValue", "integerValue", - "string", "comparisonOperator", "explainCommand", "subqueryExpression", - "showCommand", "metaCommand", "enrichCommand", "enrichWithClause", "lookupCommand" + "fields", "field", "fromCommand", "indexPattern", "clusterString", "indexString", + "metadata", "metadataOption", "deprecated_metadata", "metricsCommand", + "evalCommand", "statsCommand", "inlinestatsCommand", "qualifiedName", + "qualifiedNamePattern", "qualifiedNamePatterns", "identifier", "identifierPattern", + "constant", "params", "limitCommand", "sortCommand", "orderExpression", + "keepCommand", "dropCommand", "renameCommand", "renameClause", "dissectCommand", + "grokCommand", "mvExpandCommand", "commandOptions", "commandOption", + "booleanValue", "numericValue", "decimalValue", "integerValue", "string", + "comparisonOperator", "explainCommand", "subqueryExpression", "showCommand", + "metaCommand", "enrichCommand", "enrichWithClause", "lookupCommand" }; } public static final String[] ruleNames = makeRuleNames(); @@ -103,11 +103,11 @@ private static String[] makeSymbolicNames() { null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "INLINESTATS", "KEEP", "LIMIT", "LOOKUP", "META", "METRICS", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", - "MULTILINE_COMMENT", "WS", "INDEX_UNQUOTED_IDENTIFIER", "EXPLAIN_WS", - "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "PIPE", "QUOTED_STRING", - "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", - "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", "IN", "IS", "LIKE", - "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", + "MULTILINE_COMMENT", "WS", "UNQUOTED_SOURCE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", + "EXPLAIN_MULTILINE_COMMENT", "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", + "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", + "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", "IN", "IS", "LIKE", "NOT", + "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", @@ -211,9 +211,9 @@ public final SingleStatementContext singleStatement() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(112); + setState(116); query(0); - setState(113); + setState(117); match(EOF); } } @@ -309,11 +309,11 @@ private QueryContext query(int _p) throws RecognitionException { _ctx = _localctx; _prevctx = _localctx; - setState(116); + setState(120); sourceCommand(); } _ctx.stop = _input.LT(-1); - setState(123); + setState(127); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -324,16 +324,16 @@ private QueryContext query(int _p) throws RecognitionException { { _localctx = new CompositeQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(118); + setState(122); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(119); + setState(123); match(PIPE); - setState(120); + setState(124); processingCommand(); } } } - setState(125); + setState(129); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } @@ -394,48 +394,48 @@ public final SourceCommandContext sourceCommand() throws RecognitionException { SourceCommandContext _localctx = new SourceCommandContext(_ctx, getState()); enterRule(_localctx, 4, RULE_sourceCommand); try { - setState(132); + setState(136); _errHandler.sync(this); switch (_input.LA(1)) { case EXPLAIN: enterOuterAlt(_localctx, 1); { - setState(126); + setState(130); explainCommand(); } break; case FROM: enterOuterAlt(_localctx, 2); { - setState(127); + setState(131); fromCommand(); } break; case ROW: enterOuterAlt(_localctx, 3); { - setState(128); + setState(132); rowCommand(); } break; case METRICS: enterOuterAlt(_localctx, 4); { - setState(129); + setState(133); metricsCommand(); } break; case SHOW: enterOuterAlt(_localctx, 5); { - setState(130); + setState(134); showCommand(); } break; case META: enterOuterAlt(_localctx, 6); { - setState(131); + setState(135); metaCommand(); } break; @@ -522,104 +522,104 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState()); enterRule(_localctx, 6, RULE_processingCommand); try { - setState(148); + setState(152); _errHandler.sync(this); switch (_input.LA(1)) { case EVAL: enterOuterAlt(_localctx, 1); { - setState(134); + setState(138); evalCommand(); } break; case INLINESTATS: enterOuterAlt(_localctx, 2); { - setState(135); + setState(139); inlinestatsCommand(); } break; case LIMIT: enterOuterAlt(_localctx, 3); { - setState(136); + setState(140); limitCommand(); } break; case LOOKUP: enterOuterAlt(_localctx, 4); { - setState(137); + setState(141); lookupCommand(); } break; case KEEP: enterOuterAlt(_localctx, 5); { - setState(138); + setState(142); keepCommand(); } break; case SORT: enterOuterAlt(_localctx, 6); { - setState(139); + setState(143); sortCommand(); } break; case STATS: enterOuterAlt(_localctx, 7); { - setState(140); + setState(144); statsCommand(); } break; case WHERE: enterOuterAlt(_localctx, 8); { - setState(141); + setState(145); whereCommand(); } break; case DROP: enterOuterAlt(_localctx, 9); { - setState(142); + setState(146); dropCommand(); } break; case RENAME: enterOuterAlt(_localctx, 10); { - setState(143); + setState(147); renameCommand(); } break; case DISSECT: enterOuterAlt(_localctx, 11); { - setState(144); + setState(148); dissectCommand(); } break; case GROK: enterOuterAlt(_localctx, 12); { - setState(145); + setState(149); grokCommand(); } break; case ENRICH: enterOuterAlt(_localctx, 13); { - setState(146); + setState(150); enrichCommand(); } break; case MV_EXPAND: enterOuterAlt(_localctx, 14); { - setState(147); + setState(151); mvExpandCommand(); } break; @@ -670,9 +670,9 @@ public final WhereCommandContext whereCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(150); + setState(154); match(WHERE); - setState(151); + setState(155); booleanExpression(0); } } @@ -867,7 +867,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(181); + setState(185); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: @@ -876,9 +876,9 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(154); + setState(158); match(NOT); - setState(155); + setState(159); booleanExpression(7); } break; @@ -887,7 +887,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(156); + setState(160); valueExpression(); } break; @@ -896,7 +896,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new RegexExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(157); + setState(161); regexBooleanExpression(); } break; @@ -905,41 +905,41 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalInContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(158); + setState(162); valueExpression(); - setState(160); + setState(164); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(159); + setState(163); match(NOT); } } - setState(162); + setState(166); match(IN); - setState(163); + setState(167); match(LP); - setState(164); + setState(168); valueExpression(); - setState(169); + setState(173); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(165); + setState(169); match(COMMA); - setState(166); + setState(170); valueExpression(); } } - setState(171); + setState(175); _errHandler.sync(this); _la = _input.LA(1); } - setState(172); + setState(176); match(RP); } break; @@ -948,27 +948,27 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new IsNullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(174); + setState(178); valueExpression(); - setState(175); + setState(179); match(IS); - setState(177); + setState(181); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(176); + setState(180); match(NOT); } } - setState(179); + setState(183); match(NULL); } break; } _ctx.stop = _input.LT(-1); - setState(191); + setState(195); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -976,7 +976,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(189); + setState(193); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { case 1: @@ -984,11 +984,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(183); + setState(187); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(184); + setState(188); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(185); + setState(189); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; @@ -997,18 +997,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(186); + setState(190); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(187); + setState(191); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(188); + setState(192); ((LogicalBinaryContext)_localctx).right = booleanExpression(4); } break; } } } - setState(193); + setState(197); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); } @@ -1063,48 +1063,48 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog enterRule(_localctx, 12, RULE_regexBooleanExpression); int _la; try { - setState(208); + setState(212); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(194); + setState(198); valueExpression(); - setState(196); + setState(200); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(195); + setState(199); match(NOT); } } - setState(198); + setState(202); ((RegexBooleanExpressionContext)_localctx).kind = match(LIKE); - setState(199); + setState(203); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(201); + setState(205); valueExpression(); - setState(203); + setState(207); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(202); + setState(206); match(NOT); } } - setState(205); + setState(209); ((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE); - setState(206); + setState(210); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; @@ -1190,14 +1190,14 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); enterRule(_localctx, 14, RULE_valueExpression); try { - setState(215); + setState(219); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(210); + setState(214); operatorExpression(0); } break; @@ -1205,11 +1205,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(211); + setState(215); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(212); + setState(216); comparisonOperator(); - setState(213); + setState(217); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -1334,7 +1334,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE int _alt; enterOuterAlt(_localctx, 1); { - setState(221); + setState(225); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: @@ -1343,7 +1343,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _ctx = _localctx; _prevctx = _localctx; - setState(218); + setState(222); primaryExpression(0); } break; @@ -1352,7 +1352,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(219); + setState(223); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1363,13 +1363,13 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(220); + setState(224); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(231); + setState(235); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1377,7 +1377,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(229); + setState(233); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: @@ -1385,9 +1385,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(223); + setState(227); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(224); + setState(228); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(((((_la - 65)) & ~0x3f) == 0 && ((1L << (_la - 65)) & 7L) != 0)) ) { @@ -1398,7 +1398,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(225); + setState(229); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -1407,9 +1407,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(226); + setState(230); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(227); + setState(231); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1420,14 +1420,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(228); + setState(232); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } } } - setState(233); + setState(237); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); } @@ -1585,7 +1585,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(242); + setState(246); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) { case 1: @@ -1594,7 +1594,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(235); + setState(239); constant(); } break; @@ -1603,7 +1603,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new DereferenceContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(236); + setState(240); qualifiedName(); } break; @@ -1612,7 +1612,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new FunctionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(237); + setState(241); functionExpression(); } break; @@ -1621,17 +1621,17 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new ParenthesizedExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(238); + setState(242); match(LP); - setState(239); + setState(243); booleanExpression(0); - setState(240); + setState(244); match(RP); } break; } _ctx.stop = _input.LT(-1); - setState(249); + setState(253); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1642,16 +1642,16 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc { _localctx = new InlineCastContext(new PrimaryExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_primaryExpression); - setState(244); + setState(248); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(245); + setState(249); match(CAST_OP); - setState(246); + setState(250); dataType(); } } } - setState(251); + setState(255); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); } @@ -1713,16 +1713,16 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(252); + setState(256); identifier(); - setState(253); + setState(257); match(LP); - setState(263); + setState(267); _errHandler.sync(this); switch (_input.LA(1)) { case ASTERISK: { - setState(254); + setState(258); match(ASTERISK); } break; @@ -1743,21 +1743,21 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx case QUOTED_IDENTIFIER: { { - setState(255); + setState(259); booleanExpression(0); - setState(260); + setState(264); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(256); + setState(260); match(COMMA); - setState(257); + setState(261); booleanExpression(0); } } - setState(262); + setState(266); _errHandler.sync(this); _la = _input.LA(1); } @@ -1769,7 +1769,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx default: break; } - setState(265); + setState(269); match(RP); } } @@ -1827,7 +1827,7 @@ public final DataTypeContext dataType() throws RecognitionException { _localctx = new ToDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(267); + setState(271); identifier(); } } @@ -1874,9 +1874,9 @@ public final RowCommandContext rowCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(269); + setState(273); match(ROW); - setState(270); + setState(274); fields(); } } @@ -1930,23 +1930,23 @@ public final FieldsContext fields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(272); + setState(276); field(); - setState(277); + setState(281); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,20,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(273); + setState(277); match(COMMA); - setState(274); + setState(278); field(); } } } - setState(279); + setState(283); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,20,_ctx); } @@ -1996,24 +1996,24 @@ public final FieldContext field() throws RecognitionException { FieldContext _localctx = new FieldContext(_ctx, getState()); enterRule(_localctx, 28, RULE_field); try { - setState(285); + setState(289); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,21,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(280); + setState(284); booleanExpression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(281); + setState(285); qualifiedName(); - setState(282); + setState(286); match(ASSIGN); - setState(283); + setState(287); booleanExpression(0); } break; @@ -2033,11 +2033,11 @@ public final FieldContext field() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class FromCommandContext extends ParserRuleContext { public TerminalNode FROM() { return getToken(EsqlBaseParser.FROM, 0); } - public List indexIdentifier() { - return getRuleContexts(IndexIdentifierContext.class); + public List indexPattern() { + return getRuleContexts(IndexPatternContext.class); } - public IndexIdentifierContext indexIdentifier(int i) { - return getRuleContext(IndexIdentifierContext.class,i); + public IndexPatternContext indexPattern(int i) { + return getRuleContext(IndexPatternContext.class,i); } public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } public TerminalNode COMMA(int i) { @@ -2073,34 +2073,34 @@ public final FromCommandContext fromCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(287); + setState(291); match(FROM); - setState(288); - indexIdentifier(); - setState(293); + setState(292); + indexPattern(); + setState(297); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(289); + setState(293); match(COMMA); - setState(290); - indexIdentifier(); + setState(294); + indexPattern(); } } } - setState(295); + setState(299); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); } - setState(297); + setState(301); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { case 1: { - setState(296); + setState(300); metadata(); } break; @@ -2119,36 +2119,157 @@ public final FromCommandContext fromCommand() throws RecognitionException { } @SuppressWarnings("CheckReturnValue") - public static class IndexIdentifierContext extends ParserRuleContext { - public TerminalNode INDEX_UNQUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.INDEX_UNQUOTED_IDENTIFIER, 0); } + public static class IndexPatternContext extends ParserRuleContext { + public ClusterStringContext clusterString() { + return getRuleContext(ClusterStringContext.class,0); + } + public TerminalNode COLON() { return getToken(EsqlBaseParser.COLON, 0); } + public IndexStringContext indexString() { + return getRuleContext(IndexStringContext.class,0); + } @SuppressWarnings("this-escape") - public IndexIdentifierContext(ParserRuleContext parent, int invokingState) { + public IndexPatternContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } - @Override public int getRuleIndex() { return RULE_indexIdentifier; } + @Override public int getRuleIndex() { return RULE_indexPattern; } @Override public void enterRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterIndexIdentifier(this); + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterIndexPattern(this); } @Override public void exitRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitIndexIdentifier(this); + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitIndexPattern(this); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitIndexIdentifier(this); + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitIndexPattern(this); else return visitor.visitChildren(this); } } - public final IndexIdentifierContext indexIdentifier() throws RecognitionException { - IndexIdentifierContext _localctx = new IndexIdentifierContext(_ctx, getState()); - enterRule(_localctx, 32, RULE_indexIdentifier); + public final IndexPatternContext indexPattern() throws RecognitionException { + IndexPatternContext _localctx = new IndexPatternContext(_ctx, getState()); + enterRule(_localctx, 32, RULE_indexPattern); + try { + setState(308); + _errHandler.sync(this); + switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { + case 1: + enterOuterAlt(_localctx, 1); + { + setState(303); + clusterString(); + setState(304); + match(COLON); + setState(305); + indexString(); + } + break; + case 2: + enterOuterAlt(_localctx, 2); + { + setState(307); + indexString(); + } + break; + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class ClusterStringContext extends ParserRuleContext { + public TerminalNode UNQUOTED_SOURCE() { return getToken(EsqlBaseParser.UNQUOTED_SOURCE, 0); } + @SuppressWarnings("this-escape") + public ClusterStringContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_clusterString; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterClusterString(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitClusterString(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitClusterString(this); + else return visitor.visitChildren(this); + } + } + + public final ClusterStringContext clusterString() throws RecognitionException { + ClusterStringContext _localctx = new ClusterStringContext(_ctx, getState()); + enterRule(_localctx, 34, RULE_clusterString); try { enterOuterAlt(_localctx, 1); { - setState(299); - match(INDEX_UNQUOTED_IDENTIFIER); + setState(310); + match(UNQUOTED_SOURCE); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class IndexStringContext extends ParserRuleContext { + public TerminalNode UNQUOTED_SOURCE() { return getToken(EsqlBaseParser.UNQUOTED_SOURCE, 0); } + public TerminalNode QUOTED_STRING() { return getToken(EsqlBaseParser.QUOTED_STRING, 0); } + @SuppressWarnings("this-escape") + public IndexStringContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_indexString; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterIndexString(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitIndexString(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitIndexString(this); + else return visitor.visitChildren(this); + } + } + + public final IndexStringContext indexString() throws RecognitionException { + IndexStringContext _localctx = new IndexStringContext(_ctx, getState()); + enterRule(_localctx, 36, RULE_indexString); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(312); + _la = _input.LA(1); + if ( !(_la==UNQUOTED_SOURCE || _la==QUOTED_STRING) ) { + _errHandler.recoverInline(this); + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); + consume(); + } } } catch (RecognitionException re) { @@ -2192,22 +2313,22 @@ public T accept(ParseTreeVisitor visitor) { public final MetadataContext metadata() throws RecognitionException { MetadataContext _localctx = new MetadataContext(_ctx, getState()); - enterRule(_localctx, 34, RULE_metadata); + enterRule(_localctx, 38, RULE_metadata); try { - setState(303); + setState(316); _errHandler.sync(this); switch (_input.LA(1)) { case METADATA: enterOuterAlt(_localctx, 1); { - setState(301); + setState(314); metadataOption(); } break; case OPENING_BRACKET: enterOuterAlt(_localctx, 2); { - setState(302); + setState(315); deprecated_metadata(); } break; @@ -2229,11 +2350,9 @@ public final MetadataContext metadata() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class MetadataOptionContext extends ParserRuleContext { public TerminalNode METADATA() { return getToken(EsqlBaseParser.METADATA, 0); } - public List indexIdentifier() { - return getRuleContexts(IndexIdentifierContext.class); - } - public IndexIdentifierContext indexIdentifier(int i) { - return getRuleContext(IndexIdentifierContext.class,i); + public List UNQUOTED_SOURCE() { return getTokens(EsqlBaseParser.UNQUOTED_SOURCE); } + public TerminalNode UNQUOTED_SOURCE(int i) { + return getToken(EsqlBaseParser.UNQUOTED_SOURCE, i); } public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } public TerminalNode COMMA(int i) { @@ -2261,32 +2380,32 @@ public T accept(ParseTreeVisitor visitor) { public final MetadataOptionContext metadataOption() throws RecognitionException { MetadataOptionContext _localctx = new MetadataOptionContext(_ctx, getState()); - enterRule(_localctx, 36, RULE_metadataOption); + enterRule(_localctx, 40, RULE_metadataOption); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(305); + setState(318); match(METADATA); - setState(306); - indexIdentifier(); - setState(311); + setState(319); + match(UNQUOTED_SOURCE); + setState(324); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,25,_ctx); + _alt = getInterpreter().adaptivePredict(_input,26,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(307); + setState(320); match(COMMA); - setState(308); - indexIdentifier(); + setState(321); + match(UNQUOTED_SOURCE); } } } - setState(313); + setState(326); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,25,_ctx); + _alt = getInterpreter().adaptivePredict(_input,26,_ctx); } } } @@ -2329,15 +2448,15 @@ public T accept(ParseTreeVisitor visitor) { public final Deprecated_metadataContext deprecated_metadata() throws RecognitionException { Deprecated_metadataContext _localctx = new Deprecated_metadataContext(_ctx, getState()); - enterRule(_localctx, 38, RULE_deprecated_metadata); + enterRule(_localctx, 42, RULE_deprecated_metadata); try { enterOuterAlt(_localctx, 1); { - setState(314); + setState(327); match(OPENING_BRACKET); - setState(315); + setState(328); metadataOption(); - setState(316); + setState(329); match(CLOSING_BRACKET); } } @@ -2357,11 +2476,11 @@ public static class MetricsCommandContext extends ParserRuleContext { public FieldsContext aggregates; public FieldsContext grouping; public TerminalNode METRICS() { return getToken(EsqlBaseParser.METRICS, 0); } - public List indexIdentifier() { - return getRuleContexts(IndexIdentifierContext.class); + public List indexPattern() { + return getRuleContexts(IndexPatternContext.class); } - public IndexIdentifierContext indexIdentifier(int i) { - return getRuleContext(IndexIdentifierContext.class,i); + public IndexPatternContext indexPattern(int i) { + return getRuleContext(IndexPatternContext.class,i); } public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } public TerminalNode COMMA(int i) { @@ -2396,51 +2515,51 @@ public T accept(ParseTreeVisitor visitor) { public final MetricsCommandContext metricsCommand() throws RecognitionException { MetricsCommandContext _localctx = new MetricsCommandContext(_ctx, getState()); - enterRule(_localctx, 40, RULE_metricsCommand); + enterRule(_localctx, 44, RULE_metricsCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(318); + setState(331); match(METRICS); - setState(319); - indexIdentifier(); - setState(324); + setState(332); + indexPattern(); + setState(337); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,26,_ctx); + _alt = getInterpreter().adaptivePredict(_input,27,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(320); + setState(333); match(COMMA); - setState(321); - indexIdentifier(); + setState(334); + indexPattern(); } } } - setState(326); + setState(339); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,26,_ctx); + _alt = getInterpreter().adaptivePredict(_input,27,_ctx); } - setState(328); + setState(341); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,27,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { case 1: { - setState(327); + setState(340); ((MetricsCommandContext)_localctx).aggregates = fields(); } break; } - setState(332); + setState(345); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { case 1: { - setState(330); + setState(343); match(BY); - setState(331); + setState(344); ((MetricsCommandContext)_localctx).grouping = fields(); } break; @@ -2486,13 +2605,13 @@ public T accept(ParseTreeVisitor visitor) { public final EvalCommandContext evalCommand() throws RecognitionException { EvalCommandContext _localctx = new EvalCommandContext(_ctx, getState()); - enterRule(_localctx, 42, RULE_evalCommand); + enterRule(_localctx, 46, RULE_evalCommand); try { enterOuterAlt(_localctx, 1); { - setState(334); + setState(347); match(EVAL); - setState(335); + setState(348); fields(); } } @@ -2541,30 +2660,30 @@ public T accept(ParseTreeVisitor visitor) { public final StatsCommandContext statsCommand() throws RecognitionException { StatsCommandContext _localctx = new StatsCommandContext(_ctx, getState()); - enterRule(_localctx, 44, RULE_statsCommand); + enterRule(_localctx, 48, RULE_statsCommand); try { enterOuterAlt(_localctx, 1); { - setState(337); + setState(350); match(STATS); - setState(339); + setState(352); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { case 1: { - setState(338); + setState(351); ((StatsCommandContext)_localctx).stats = fields(); } break; } - setState(343); + setState(356); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(341); + setState(354); match(BY); - setState(342); + setState(355); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -2616,22 +2735,22 @@ public T accept(ParseTreeVisitor visitor) { public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionException { InlinestatsCommandContext _localctx = new InlinestatsCommandContext(_ctx, getState()); - enterRule(_localctx, 46, RULE_inlinestatsCommand); + enterRule(_localctx, 50, RULE_inlinestatsCommand); try { enterOuterAlt(_localctx, 1); { - setState(345); + setState(358); match(INLINESTATS); - setState(346); + setState(359); ((InlinestatsCommandContext)_localctx).stats = fields(); - setState(349); + setState(362); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) { case 1: { - setState(347); + setState(360); match(BY); - setState(348); + setState(361); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -2683,30 +2802,30 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNameContext qualifiedName() throws RecognitionException { QualifiedNameContext _localctx = new QualifiedNameContext(_ctx, getState()); - enterRule(_localctx, 48, RULE_qualifiedName); + enterRule(_localctx, 52, RULE_qualifiedName); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(351); + setState(364); identifier(); - setState(356); + setState(369); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,32,_ctx); + _alt = getInterpreter().adaptivePredict(_input,33,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(352); + setState(365); match(DOT); - setState(353); + setState(366); identifier(); } } } - setState(358); + setState(371); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,32,_ctx); + _alt = getInterpreter().adaptivePredict(_input,33,_ctx); } } } @@ -2755,30 +2874,30 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNamePatternContext qualifiedNamePattern() throws RecognitionException { QualifiedNamePatternContext _localctx = new QualifiedNamePatternContext(_ctx, getState()); - enterRule(_localctx, 50, RULE_qualifiedNamePattern); + enterRule(_localctx, 54, RULE_qualifiedNamePattern); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(359); + setState(372); identifierPattern(); - setState(364); + setState(377); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,33,_ctx); + _alt = getInterpreter().adaptivePredict(_input,34,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(360); + setState(373); match(DOT); - setState(361); + setState(374); identifierPattern(); } } } - setState(366); + setState(379); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,33,_ctx); + _alt = getInterpreter().adaptivePredict(_input,34,_ctx); } } } @@ -2827,30 +2946,30 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNamePatternsContext qualifiedNamePatterns() throws RecognitionException { QualifiedNamePatternsContext _localctx = new QualifiedNamePatternsContext(_ctx, getState()); - enterRule(_localctx, 52, RULE_qualifiedNamePatterns); + enterRule(_localctx, 56, RULE_qualifiedNamePatterns); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(367); + setState(380); qualifiedNamePattern(); - setState(372); + setState(385); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,34,_ctx); + _alt = getInterpreter().adaptivePredict(_input,35,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(368); + setState(381); match(COMMA); - setState(369); + setState(382); qualifiedNamePattern(); } } } - setState(374); + setState(387); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,34,_ctx); + _alt = getInterpreter().adaptivePredict(_input,35,_ctx); } } } @@ -2891,12 +3010,12 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierContext identifier() throws RecognitionException { IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); - enterRule(_localctx, 54, RULE_identifier); + enterRule(_localctx, 58, RULE_identifier); int _la; try { enterOuterAlt(_localctx, 1); { - setState(375); + setState(388); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -2944,11 +3063,11 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierPatternContext identifierPattern() throws RecognitionException { IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); - enterRule(_localctx, 56, RULE_identifierPattern); + enterRule(_localctx, 60, RULE_identifierPattern); try { enterOuterAlt(_localctx, 1); { - setState(377); + setState(390); match(ID_PATTERN); } } @@ -3216,17 +3335,17 @@ public T accept(ParseTreeVisitor visitor) { public final ConstantContext constant() throws RecognitionException { ConstantContext _localctx = new ConstantContext(_ctx, getState()); - enterRule(_localctx, 58, RULE_constant); + enterRule(_localctx, 62, RULE_constant); int _la; try { - setState(421); + setState(434); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,39,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(379); + setState(392); match(NULL); } break; @@ -3234,9 +3353,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(380); + setState(393); integerValue(); - setState(381); + setState(394); match(UNQUOTED_IDENTIFIER); } break; @@ -3244,7 +3363,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(383); + setState(396); decimalValue(); } break; @@ -3252,7 +3371,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(384); + setState(397); integerValue(); } break; @@ -3260,7 +3379,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(385); + setState(398); booleanValue(); } break; @@ -3268,7 +3387,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParamsContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(386); + setState(399); params(); } break; @@ -3276,7 +3395,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(387); + setState(400); string(); } break; @@ -3284,27 +3403,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(388); + setState(401); match(OPENING_BRACKET); - setState(389); + setState(402); numericValue(); - setState(394); + setState(407); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(390); + setState(403); match(COMMA); - setState(391); + setState(404); numericValue(); } } - setState(396); + setState(409); _errHandler.sync(this); _la = _input.LA(1); } - setState(397); + setState(410); match(CLOSING_BRACKET); } break; @@ -3312,27 +3431,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(399); + setState(412); match(OPENING_BRACKET); - setState(400); + setState(413); booleanValue(); - setState(405); + setState(418); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(401); + setState(414); match(COMMA); - setState(402); + setState(415); booleanValue(); } } - setState(407); + setState(420); _errHandler.sync(this); _la = _input.LA(1); } - setState(408); + setState(421); match(CLOSING_BRACKET); } break; @@ -3340,27 +3459,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(410); + setState(423); match(OPENING_BRACKET); - setState(411); + setState(424); string(); - setState(416); + setState(429); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(412); + setState(425); match(COMMA); - setState(413); + setState(426); string(); } } - setState(418); + setState(431); _errHandler.sync(this); _la = _input.LA(1); } - setState(419); + setState(432); match(CLOSING_BRACKET); } break; @@ -3432,16 +3551,16 @@ public T accept(ParseTreeVisitor visitor) { public final ParamsContext params() throws RecognitionException { ParamsContext _localctx = new ParamsContext(_ctx, getState()); - enterRule(_localctx, 60, RULE_params); + enterRule(_localctx, 64, RULE_params); try { - setState(425); + setState(438); _errHandler.sync(this); switch (_input.LA(1)) { case PARAM: _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(423); + setState(436); match(PARAM); } break; @@ -3449,7 +3568,7 @@ public final ParamsContext params() throws RecognitionException { _localctx = new InputNamedOrPositionalParamContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(424); + setState(437); match(NAMED_OR_POSITIONAL_PARAM); } break; @@ -3494,13 +3613,13 @@ public T accept(ParseTreeVisitor visitor) { public final LimitCommandContext limitCommand() throws RecognitionException { LimitCommandContext _localctx = new LimitCommandContext(_ctx, getState()); - enterRule(_localctx, 62, RULE_limitCommand); + enterRule(_localctx, 66, RULE_limitCommand); try { enterOuterAlt(_localctx, 1); { - setState(427); + setState(440); match(LIMIT); - setState(428); + setState(441); match(INTEGER_LITERAL); } } @@ -3550,32 +3669,32 @@ public T accept(ParseTreeVisitor visitor) { public final SortCommandContext sortCommand() throws RecognitionException { SortCommandContext _localctx = new SortCommandContext(_ctx, getState()); - enterRule(_localctx, 64, RULE_sortCommand); + enterRule(_localctx, 68, RULE_sortCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(430); + setState(443); match(SORT); - setState(431); + setState(444); orderExpression(); - setState(436); + setState(449); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,40,_ctx); + _alt = getInterpreter().adaptivePredict(_input,41,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(432); + setState(445); match(COMMA); - setState(433); + setState(446); orderExpression(); } } } - setState(438); + setState(451); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,40,_ctx); + _alt = getInterpreter().adaptivePredict(_input,41,_ctx); } } } @@ -3624,19 +3743,19 @@ public T accept(ParseTreeVisitor visitor) { public final OrderExpressionContext orderExpression() throws RecognitionException { OrderExpressionContext _localctx = new OrderExpressionContext(_ctx, getState()); - enterRule(_localctx, 66, RULE_orderExpression); + enterRule(_localctx, 70, RULE_orderExpression); int _la; try { enterOuterAlt(_localctx, 1); { - setState(439); + setState(452); booleanExpression(0); - setState(441); + setState(454); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,42,_ctx) ) { case 1: { - setState(440); + setState(453); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -3650,14 +3769,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(445); + setState(458); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,42,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { case 1: { - setState(443); + setState(456); match(NULLS); - setState(444); + setState(457); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -3712,13 +3831,13 @@ public T accept(ParseTreeVisitor visitor) { public final KeepCommandContext keepCommand() throws RecognitionException { KeepCommandContext _localctx = new KeepCommandContext(_ctx, getState()); - enterRule(_localctx, 68, RULE_keepCommand); + enterRule(_localctx, 72, RULE_keepCommand); try { enterOuterAlt(_localctx, 1); { - setState(447); + setState(460); match(KEEP); - setState(448); + setState(461); qualifiedNamePatterns(); } } @@ -3761,13 +3880,13 @@ public T accept(ParseTreeVisitor visitor) { public final DropCommandContext dropCommand() throws RecognitionException { DropCommandContext _localctx = new DropCommandContext(_ctx, getState()); - enterRule(_localctx, 70, RULE_dropCommand); + enterRule(_localctx, 74, RULE_dropCommand); try { enterOuterAlt(_localctx, 1); { - setState(450); + setState(463); match(DROP); - setState(451); + setState(464); qualifiedNamePatterns(); } } @@ -3817,32 +3936,32 @@ public T accept(ParseTreeVisitor visitor) { public final RenameCommandContext renameCommand() throws RecognitionException { RenameCommandContext _localctx = new RenameCommandContext(_ctx, getState()); - enterRule(_localctx, 72, RULE_renameCommand); + enterRule(_localctx, 76, RULE_renameCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(453); + setState(466); match(RENAME); - setState(454); + setState(467); renameClause(); - setState(459); + setState(472); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,43,_ctx); + _alt = getInterpreter().adaptivePredict(_input,44,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(455); + setState(468); match(COMMA); - setState(456); + setState(469); renameClause(); } } } - setState(461); + setState(474); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,43,_ctx); + _alt = getInterpreter().adaptivePredict(_input,44,_ctx); } } } @@ -3890,15 +4009,15 @@ public T accept(ParseTreeVisitor visitor) { public final RenameClauseContext renameClause() throws RecognitionException { RenameClauseContext _localctx = new RenameClauseContext(_ctx, getState()); - enterRule(_localctx, 74, RULE_renameClause); + enterRule(_localctx, 78, RULE_renameClause); try { enterOuterAlt(_localctx, 1); { - setState(462); + setState(475); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(463); + setState(476); match(AS); - setState(464); + setState(477); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -3947,22 +4066,22 @@ public T accept(ParseTreeVisitor visitor) { public final DissectCommandContext dissectCommand() throws RecognitionException { DissectCommandContext _localctx = new DissectCommandContext(_ctx, getState()); - enterRule(_localctx, 76, RULE_dissectCommand); + enterRule(_localctx, 80, RULE_dissectCommand); try { enterOuterAlt(_localctx, 1); { - setState(466); + setState(479); match(DISSECT); - setState(467); + setState(480); primaryExpression(0); - setState(468); + setState(481); string(); - setState(470); + setState(483); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,44,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { case 1: { - setState(469); + setState(482); commandOptions(); } break; @@ -4011,15 +4130,15 @@ public T accept(ParseTreeVisitor visitor) { public final GrokCommandContext grokCommand() throws RecognitionException { GrokCommandContext _localctx = new GrokCommandContext(_ctx, getState()); - enterRule(_localctx, 78, RULE_grokCommand); + enterRule(_localctx, 82, RULE_grokCommand); try { enterOuterAlt(_localctx, 1); { - setState(472); + setState(485); match(GROK); - setState(473); + setState(486); primaryExpression(0); - setState(474); + setState(487); string(); } } @@ -4062,13 +4181,13 @@ public T accept(ParseTreeVisitor visitor) { public final MvExpandCommandContext mvExpandCommand() throws RecognitionException { MvExpandCommandContext _localctx = new MvExpandCommandContext(_ctx, getState()); - enterRule(_localctx, 80, RULE_mvExpandCommand); + enterRule(_localctx, 84, RULE_mvExpandCommand); try { enterOuterAlt(_localctx, 1); { - setState(476); + setState(489); match(MV_EXPAND); - setState(477); + setState(490); qualifiedName(); } } @@ -4117,30 +4236,30 @@ public T accept(ParseTreeVisitor visitor) { public final CommandOptionsContext commandOptions() throws RecognitionException { CommandOptionsContext _localctx = new CommandOptionsContext(_ctx, getState()); - enterRule(_localctx, 82, RULE_commandOptions); + enterRule(_localctx, 86, RULE_commandOptions); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(479); + setState(492); commandOption(); - setState(484); + setState(497); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,45,_ctx); + _alt = getInterpreter().adaptivePredict(_input,46,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(480); + setState(493); match(COMMA); - setState(481); + setState(494); commandOption(); } } } - setState(486); + setState(499); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,45,_ctx); + _alt = getInterpreter().adaptivePredict(_input,46,_ctx); } } } @@ -4186,15 +4305,15 @@ public T accept(ParseTreeVisitor visitor) { public final CommandOptionContext commandOption() throws RecognitionException { CommandOptionContext _localctx = new CommandOptionContext(_ctx, getState()); - enterRule(_localctx, 84, RULE_commandOption); + enterRule(_localctx, 88, RULE_commandOption); try { enterOuterAlt(_localctx, 1); { - setState(487); + setState(500); identifier(); - setState(488); + setState(501); match(ASSIGN); - setState(489); + setState(502); constant(); } } @@ -4235,12 +4354,12 @@ public T accept(ParseTreeVisitor visitor) { public final BooleanValueContext booleanValue() throws RecognitionException { BooleanValueContext _localctx = new BooleanValueContext(_ctx, getState()); - enterRule(_localctx, 86, RULE_booleanValue); + enterRule(_localctx, 90, RULE_booleanValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(491); + setState(504); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4293,22 +4412,22 @@ public T accept(ParseTreeVisitor visitor) { public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); - enterRule(_localctx, 88, RULE_numericValue); + enterRule(_localctx, 92, RULE_numericValue); try { - setState(495); + setState(508); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(493); + setState(506); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(494); + setState(507); integerValue(); } break; @@ -4352,17 +4471,17 @@ public T accept(ParseTreeVisitor visitor) { public final DecimalValueContext decimalValue() throws RecognitionException { DecimalValueContext _localctx = new DecimalValueContext(_ctx, getState()); - enterRule(_localctx, 90, RULE_decimalValue); + enterRule(_localctx, 94, RULE_decimalValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(498); + setState(511); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(497); + setState(510); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4375,7 +4494,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(500); + setState(513); match(DECIMAL_LITERAL); } } @@ -4417,17 +4536,17 @@ public T accept(ParseTreeVisitor visitor) { public final IntegerValueContext integerValue() throws RecognitionException { IntegerValueContext _localctx = new IntegerValueContext(_ctx, getState()); - enterRule(_localctx, 92, RULE_integerValue); + enterRule(_localctx, 96, RULE_integerValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(503); + setState(516); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(502); + setState(515); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4440,7 +4559,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(505); + setState(518); match(INTEGER_LITERAL); } } @@ -4480,11 +4599,11 @@ public T accept(ParseTreeVisitor visitor) { public final StringContext string() throws RecognitionException { StringContext _localctx = new StringContext(_ctx, getState()); - enterRule(_localctx, 94, RULE_string); + enterRule(_localctx, 98, RULE_string); try { enterOuterAlt(_localctx, 1); { - setState(507); + setState(520); match(QUOTED_STRING); } } @@ -4529,12 +4648,12 @@ public T accept(ParseTreeVisitor visitor) { public final ComparisonOperatorContext comparisonOperator() throws RecognitionException { ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState()); - enterRule(_localctx, 96, RULE_comparisonOperator); + enterRule(_localctx, 100, RULE_comparisonOperator); int _la; try { enterOuterAlt(_localctx, 1); { - setState(509); + setState(522); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 9007199254740992000L) != 0)) ) { _errHandler.recoverInline(this); @@ -4585,13 +4704,13 @@ public T accept(ParseTreeVisitor visitor) { public final ExplainCommandContext explainCommand() throws RecognitionException { ExplainCommandContext _localctx = new ExplainCommandContext(_ctx, getState()); - enterRule(_localctx, 98, RULE_explainCommand); + enterRule(_localctx, 102, RULE_explainCommand); try { enterOuterAlt(_localctx, 1); { - setState(511); + setState(524); match(EXPLAIN); - setState(512); + setState(525); subqueryExpression(); } } @@ -4635,15 +4754,15 @@ public T accept(ParseTreeVisitor visitor) { public final SubqueryExpressionContext subqueryExpression() throws RecognitionException { SubqueryExpressionContext _localctx = new SubqueryExpressionContext(_ctx, getState()); - enterRule(_localctx, 100, RULE_subqueryExpression); + enterRule(_localctx, 104, RULE_subqueryExpression); try { enterOuterAlt(_localctx, 1); { - setState(514); + setState(527); match(OPENING_BRACKET); - setState(515); + setState(528); query(0); - setState(516); + setState(529); match(CLOSING_BRACKET); } } @@ -4695,14 +4814,14 @@ public T accept(ParseTreeVisitor visitor) { public final ShowCommandContext showCommand() throws RecognitionException { ShowCommandContext _localctx = new ShowCommandContext(_ctx, getState()); - enterRule(_localctx, 102, RULE_showCommand); + enterRule(_localctx, 106, RULE_showCommand); try { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(518); + setState(531); match(SHOW); - setState(519); + setState(532); match(INFO); } } @@ -4754,14 +4873,14 @@ public T accept(ParseTreeVisitor visitor) { public final MetaCommandContext metaCommand() throws RecognitionException { MetaCommandContext _localctx = new MetaCommandContext(_ctx, getState()); - enterRule(_localctx, 104, RULE_metaCommand); + enterRule(_localctx, 108, RULE_metaCommand); try { _localctx = new MetaFunctionsContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(521); + setState(534); match(META); - setState(522); + setState(535); match(FUNCTIONS); } } @@ -4819,53 +4938,53 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichCommandContext enrichCommand() throws RecognitionException { EnrichCommandContext _localctx = new EnrichCommandContext(_ctx, getState()); - enterRule(_localctx, 106, RULE_enrichCommand); + enterRule(_localctx, 110, RULE_enrichCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(524); + setState(537); match(ENRICH); - setState(525); + setState(538); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(528); + setState(541); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,49,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { case 1: { - setState(526); + setState(539); match(ON); - setState(527); + setState(540); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(539); + setState(552); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,51,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,52,_ctx) ) { case 1: { - setState(530); + setState(543); match(WITH); - setState(531); + setState(544); enrichWithClause(); - setState(536); + setState(549); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,50,_ctx); + _alt = getInterpreter().adaptivePredict(_input,51,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(532); + setState(545); match(COMMA); - setState(533); + setState(546); enrichWithClause(); } } } - setState(538); + setState(551); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,50,_ctx); + _alt = getInterpreter().adaptivePredict(_input,51,_ctx); } } break; @@ -4916,23 +5035,23 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichWithClauseContext enrichWithClause() throws RecognitionException { EnrichWithClauseContext _localctx = new EnrichWithClauseContext(_ctx, getState()); - enterRule(_localctx, 108, RULE_enrichWithClause); + enterRule(_localctx, 112, RULE_enrichWithClause); try { enterOuterAlt(_localctx, 1); { - setState(544); + setState(557); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,52,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,53,_ctx) ) { case 1: { - setState(541); + setState(554); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(542); + setState(555); match(ASSIGN); } break; } - setState(546); + setState(559); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -4949,11 +5068,13 @@ public final EnrichWithClauseContext enrichWithClause() throws RecognitionExcept @SuppressWarnings("CheckReturnValue") public static class LookupCommandContext extends ParserRuleContext { - public Token tableName; + public IndexPatternContext tableName; public QualifiedNamePatternsContext matchFields; public TerminalNode LOOKUP() { return getToken(EsqlBaseParser.LOOKUP, 0); } public TerminalNode ON() { return getToken(EsqlBaseParser.ON, 0); } - public TerminalNode INDEX_UNQUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.INDEX_UNQUOTED_IDENTIFIER, 0); } + public IndexPatternContext indexPattern() { + return getRuleContext(IndexPatternContext.class,0); + } public QualifiedNamePatternsContext qualifiedNamePatterns() { return getRuleContext(QualifiedNamePatternsContext.class,0); } @@ -4979,17 +5100,17 @@ public T accept(ParseTreeVisitor visitor) { public final LookupCommandContext lookupCommand() throws RecognitionException { LookupCommandContext _localctx = new LookupCommandContext(_ctx, getState()); - enterRule(_localctx, 110, RULE_lookupCommand); + enterRule(_localctx, 114, RULE_lookupCommand); try { enterOuterAlt(_localctx, 1); { - setState(548); + setState(561); match(LOOKUP); - setState(549); - ((LookupCommandContext)_localctx).tableName = match(INDEX_UNQUOTED_IDENTIFIER); - setState(550); + setState(562); + ((LookupCommandContext)_localctx).tableName = indexPattern(); + setState(563); match(ON); - setState(551); + setState(564); ((LookupCommandContext)_localctx).matchFields = qualifiedNamePatterns(); } } @@ -5051,7 +5172,7 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in } public static final String _serializedATN = - "\u0004\u0001|\u022a\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ + "\u0004\u0001|\u0237\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ @@ -5066,343 +5187,351 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+ "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u0002"+ "2\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u0002"+ - "7\u00077\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001z\b\u0001\n\u0001"+ - "\f\u0001}\t\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0001\u0002\u0003\u0002\u0085\b\u0002\u0001\u0003\u0001\u0003\u0001"+ + "7\u00077\u00028\u00078\u00029\u00079\u0001\u0000\u0001\u0000\u0001\u0000"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0005\u0001~\b\u0001\n\u0001\f\u0001\u0081\t\u0001\u0001\u0002\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002\u0089"+ + "\b\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003"+ - "\u0003\u0095\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003"+ - "\u0005\u00a1\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0005\u0005\u00a8\b\u0005\n\u0005\f\u0005\u00ab\t\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00b2\b\u0005"+ - "\u0001\u0005\u0001\u0005\u0003\u0005\u00b6\b\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00be\b\u0005"+ - "\n\u0005\f\u0005\u00c1\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006\u00c5"+ - "\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003"+ - "\u0006\u00cc\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00d1"+ - "\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0003"+ - "\u0007\u00d8\b\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0003\b\u00de\b\b"+ - "\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0005\b\u00e6\b\b\n\b"+ - "\f\b\u00e9\t\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t"+ - "\u0001\t\u0003\t\u00f3\b\t\u0001\t\u0001\t\u0001\t\u0005\t\u00f8\b\t\n"+ - "\t\f\t\u00fb\t\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0005"+ - "\n\u0103\b\n\n\n\f\n\u0106\t\n\u0003\n\u0108\b\n\u0001\n\u0001\n\u0001"+ - "\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0005"+ - "\r\u0114\b\r\n\r\f\r\u0117\t\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ - "\u000e\u0001\u000e\u0003\u000e\u011e\b\u000e\u0001\u000f\u0001\u000f\u0001"+ - "\u000f\u0001\u000f\u0005\u000f\u0124\b\u000f\n\u000f\f\u000f\u0127\t\u000f"+ - "\u0001\u000f\u0003\u000f\u012a\b\u000f\u0001\u0010\u0001\u0010\u0001\u0011"+ - "\u0001\u0011\u0003\u0011\u0130\b\u0011\u0001\u0012\u0001\u0012\u0001\u0012"+ - "\u0001\u0012\u0005\u0012\u0136\b\u0012\n\u0012\f\u0012\u0139\t\u0012\u0001"+ - "\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001"+ - "\u0014\u0001\u0014\u0005\u0014\u0143\b\u0014\n\u0014\f\u0014\u0146\t\u0014"+ - "\u0001\u0014\u0003\u0014\u0149\b\u0014\u0001\u0014\u0001\u0014\u0003\u0014"+ - "\u014d\b\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016"+ - "\u0003\u0016\u0154\b\u0016\u0001\u0016\u0001\u0016\u0003\u0016\u0158\b"+ - "\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0003\u0017\u015e"+ - "\b\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018\u0163\b\u0018"+ - "\n\u0018\f\u0018\u0166\t\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0005"+ - "\u0019\u016b\b\u0019\n\u0019\f\u0019\u016e\t\u0019\u0001\u001a\u0001\u001a"+ - "\u0001\u001a\u0005\u001a\u0173\b\u001a\n\u001a\f\u001a\u0176\t\u001a\u0001"+ - "\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0001"+ - "\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001"+ - "\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0005\u001d\u0189"+ - "\b\u001d\n\u001d\f\u001d\u018c\t\u001d\u0001\u001d\u0001\u001d\u0001\u001d"+ - "\u0001\u001d\u0001\u001d\u0001\u001d\u0005\u001d\u0194\b\u001d\n\u001d"+ - "\f\u001d\u0197\t\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d"+ - "\u0001\u001d\u0001\u001d\u0005\u001d\u019f\b\u001d\n\u001d\f\u001d\u01a2"+ - "\t\u001d\u0001\u001d\u0001\u001d\u0003\u001d\u01a6\b\u001d\u0001\u001e"+ - "\u0001\u001e\u0003\u001e\u01aa\b\u001e\u0001\u001f\u0001\u001f\u0001\u001f"+ - "\u0001 \u0001 \u0001 \u0001 \u0005 \u01b3\b \n \f \u01b6\t \u0001!\u0001"+ - "!\u0003!\u01ba\b!\u0001!\u0001!\u0003!\u01be\b!\u0001\"\u0001\"\u0001"+ - "\"\u0001#\u0001#\u0001#\u0001$\u0001$\u0001$\u0001$\u0005$\u01ca\b$\n"+ - "$\f$\u01cd\t$\u0001%\u0001%\u0001%\u0001%\u0001&\u0001&\u0001&\u0001&"+ - "\u0003&\u01d7\b&\u0001\'\u0001\'\u0001\'\u0001\'\u0001(\u0001(\u0001("+ - "\u0001)\u0001)\u0001)\u0005)\u01e3\b)\n)\f)\u01e6\t)\u0001*\u0001*\u0001"+ - "*\u0001*\u0001+\u0001+\u0001,\u0001,\u0003,\u01f0\b,\u0001-\u0003-\u01f3"+ - "\b-\u0001-\u0001-\u0001.\u0003.\u01f8\b.\u0001.\u0001.\u0001/\u0001/\u0001"+ - "0\u00010\u00011\u00011\u00011\u00012\u00012\u00012\u00012\u00013\u0001"+ - "3\u00013\u00014\u00014\u00014\u00015\u00015\u00015\u00015\u00035\u0211"+ - "\b5\u00015\u00015\u00015\u00015\u00055\u0217\b5\n5\f5\u021a\t5\u00035"+ - "\u021c\b5\u00016\u00016\u00016\u00036\u0221\b6\u00016\u00016\u00017\u0001"+ - "7\u00017\u00017\u00017\u00017\u0000\u0004\u0002\n\u0010\u00128\u0000\u0002"+ - "\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a\u001c\u001e"+ - " \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bdfhjln\u0000\u0007\u0001\u0000?@\u0001"+ - "\u0000AC\u0001\u0000GH\u0002\u0000##\'\'\u0001\u0000*+\u0002\u0000))7"+ - "7\u0002\u000088:>\u0244\u0000p\u0001\u0000\u0000\u0000\u0002s\u0001\u0000"+ - "\u0000\u0000\u0004\u0084\u0001\u0000\u0000\u0000\u0006\u0094\u0001\u0000"+ - "\u0000\u0000\b\u0096\u0001\u0000\u0000\u0000\n\u00b5\u0001\u0000\u0000"+ - "\u0000\f\u00d0\u0001\u0000\u0000\u0000\u000e\u00d7\u0001\u0000\u0000\u0000"+ - "\u0010\u00dd\u0001\u0000\u0000\u0000\u0012\u00f2\u0001\u0000\u0000\u0000"+ - "\u0014\u00fc\u0001\u0000\u0000\u0000\u0016\u010b\u0001\u0000\u0000\u0000"+ - "\u0018\u010d\u0001\u0000\u0000\u0000\u001a\u0110\u0001\u0000\u0000\u0000"+ - "\u001c\u011d\u0001\u0000\u0000\u0000\u001e\u011f\u0001\u0000\u0000\u0000"+ - " \u012b\u0001\u0000\u0000\u0000\"\u012f\u0001\u0000\u0000\u0000$\u0131"+ - "\u0001\u0000\u0000\u0000&\u013a\u0001\u0000\u0000\u0000(\u013e\u0001\u0000"+ - "\u0000\u0000*\u014e\u0001\u0000\u0000\u0000,\u0151\u0001\u0000\u0000\u0000"+ - ".\u0159\u0001\u0000\u0000\u00000\u015f\u0001\u0000\u0000\u00002\u0167"+ - "\u0001\u0000\u0000\u00004\u016f\u0001\u0000\u0000\u00006\u0177\u0001\u0000"+ - "\u0000\u00008\u0179\u0001\u0000\u0000\u0000:\u01a5\u0001\u0000\u0000\u0000"+ - "<\u01a9\u0001\u0000\u0000\u0000>\u01ab\u0001\u0000\u0000\u0000@\u01ae"+ - "\u0001\u0000\u0000\u0000B\u01b7\u0001\u0000\u0000\u0000D\u01bf\u0001\u0000"+ - "\u0000\u0000F\u01c2\u0001\u0000\u0000\u0000H\u01c5\u0001\u0000\u0000\u0000"+ - "J\u01ce\u0001\u0000\u0000\u0000L\u01d2\u0001\u0000\u0000\u0000N\u01d8"+ - "\u0001\u0000\u0000\u0000P\u01dc\u0001\u0000\u0000\u0000R\u01df\u0001\u0000"+ - "\u0000\u0000T\u01e7\u0001\u0000\u0000\u0000V\u01eb\u0001\u0000\u0000\u0000"+ - "X\u01ef\u0001\u0000\u0000\u0000Z\u01f2\u0001\u0000\u0000\u0000\\\u01f7"+ - "\u0001\u0000\u0000\u0000^\u01fb\u0001\u0000\u0000\u0000`\u01fd\u0001\u0000"+ - "\u0000\u0000b\u01ff\u0001\u0000\u0000\u0000d\u0202\u0001\u0000\u0000\u0000"+ - "f\u0206\u0001\u0000\u0000\u0000h\u0209\u0001\u0000\u0000\u0000j\u020c"+ - "\u0001\u0000\u0000\u0000l\u0220\u0001\u0000\u0000\u0000n\u0224\u0001\u0000"+ - "\u0000\u0000pq\u0003\u0002\u0001\u0000qr\u0005\u0000\u0000\u0001r\u0001"+ - "\u0001\u0000\u0000\u0000st\u0006\u0001\uffff\uffff\u0000tu\u0003\u0004"+ - "\u0002\u0000u{\u0001\u0000\u0000\u0000vw\n\u0001\u0000\u0000wx\u0005\u001d"+ - "\u0000\u0000xz\u0003\u0006\u0003\u0000yv\u0001\u0000\u0000\u0000z}\u0001"+ - "\u0000\u0000\u0000{y\u0001\u0000\u0000\u0000{|\u0001\u0000\u0000\u0000"+ - "|\u0003\u0001\u0000\u0000\u0000}{\u0001\u0000\u0000\u0000~\u0085\u0003"+ - "b1\u0000\u007f\u0085\u0003\u001e\u000f\u0000\u0080\u0085\u0003\u0018\f"+ - "\u0000\u0081\u0085\u0003(\u0014\u0000\u0082\u0085\u0003f3\u0000\u0083"+ - "\u0085\u0003h4\u0000\u0084~\u0001\u0000\u0000\u0000\u0084\u007f\u0001"+ - "\u0000\u0000\u0000\u0084\u0080\u0001\u0000\u0000\u0000\u0084\u0081\u0001"+ - "\u0000\u0000\u0000\u0084\u0082\u0001\u0000\u0000\u0000\u0084\u0083\u0001"+ - "\u0000\u0000\u0000\u0085\u0005\u0001\u0000\u0000\u0000\u0086\u0095\u0003"+ - "*\u0015\u0000\u0087\u0095\u0003.\u0017\u0000\u0088\u0095\u0003>\u001f"+ - "\u0000\u0089\u0095\u0003n7\u0000\u008a\u0095\u0003D\"\u0000\u008b\u0095"+ - "\u0003@ \u0000\u008c\u0095\u0003,\u0016\u0000\u008d\u0095\u0003\b\u0004"+ - "\u0000\u008e\u0095\u0003F#\u0000\u008f\u0095\u0003H$\u0000\u0090\u0095"+ - "\u0003L&\u0000\u0091\u0095\u0003N\'\u0000\u0092\u0095\u0003j5\u0000\u0093"+ - "\u0095\u0003P(\u0000\u0094\u0086\u0001\u0000\u0000\u0000\u0094\u0087\u0001"+ - "\u0000\u0000\u0000\u0094\u0088\u0001\u0000\u0000\u0000\u0094\u0089\u0001"+ - "\u0000\u0000\u0000\u0094\u008a\u0001\u0000\u0000\u0000\u0094\u008b\u0001"+ - "\u0000\u0000\u0000\u0094\u008c\u0001\u0000\u0000\u0000\u0094\u008d\u0001"+ - "\u0000\u0000\u0000\u0094\u008e\u0001\u0000\u0000\u0000\u0094\u008f\u0001"+ - "\u0000\u0000\u0000\u0094\u0090\u0001\u0000\u0000\u0000\u0094\u0091\u0001"+ - "\u0000\u0000\u0000\u0094\u0092\u0001\u0000\u0000\u0000\u0094\u0093\u0001"+ - "\u0000\u0000\u0000\u0095\u0007\u0001\u0000\u0000\u0000\u0096\u0097\u0005"+ - "\u0014\u0000\u0000\u0097\u0098\u0003\n\u0005\u0000\u0098\t\u0001\u0000"+ - "\u0000\u0000\u0099\u009a\u0006\u0005\uffff\uffff\u0000\u009a\u009b\u0005"+ - "0\u0000\u0000\u009b\u00b6\u0003\n\u0005\u0007\u009c\u00b6\u0003\u000e"+ - "\u0007\u0000\u009d\u00b6\u0003\f\u0006\u0000\u009e\u00a0\u0003\u000e\u0007"+ - "\u0000\u009f\u00a1\u00050\u0000\u0000\u00a0\u009f\u0001\u0000\u0000\u0000"+ - "\u00a0\u00a1\u0001\u0000\u0000\u0000\u00a1\u00a2\u0001\u0000\u0000\u0000"+ - "\u00a2\u00a3\u0005-\u0000\u0000\u00a3\u00a4\u0005,\u0000\u0000\u00a4\u00a9"+ - "\u0003\u000e\u0007\u0000\u00a5\u00a6\u0005&\u0000\u0000\u00a6\u00a8\u0003"+ - "\u000e\u0007\u0000\u00a7\u00a5\u0001\u0000\u0000\u0000\u00a8\u00ab\u0001"+ - "\u0000\u0000\u0000\u00a9\u00a7\u0001\u0000\u0000\u0000\u00a9\u00aa\u0001"+ - "\u0000\u0000\u0000\u00aa\u00ac\u0001\u0000\u0000\u0000\u00ab\u00a9\u0001"+ - "\u0000\u0000\u0000\u00ac\u00ad\u00056\u0000\u0000\u00ad\u00b6\u0001\u0000"+ - "\u0000\u0000\u00ae\u00af\u0003\u000e\u0007\u0000\u00af\u00b1\u0005.\u0000"+ - "\u0000\u00b0\u00b2\u00050\u0000\u0000\u00b1\u00b0\u0001\u0000\u0000\u0000"+ - "\u00b1\u00b2\u0001\u0000\u0000\u0000\u00b2\u00b3\u0001\u0000\u0000\u0000"+ - "\u00b3\u00b4\u00051\u0000\u0000\u00b4\u00b6\u0001\u0000\u0000\u0000\u00b5"+ - "\u0099\u0001\u0000\u0000\u0000\u00b5\u009c\u0001\u0000\u0000\u0000\u00b5"+ - "\u009d\u0001\u0000\u0000\u0000\u00b5\u009e\u0001\u0000\u0000\u0000\u00b5"+ - "\u00ae\u0001\u0000\u0000\u0000\u00b6\u00bf\u0001\u0000\u0000\u0000\u00b7"+ - "\u00b8\n\u0004\u0000\u0000\u00b8\u00b9\u0005\"\u0000\u0000\u00b9\u00be"+ - "\u0003\n\u0005\u0005\u00ba\u00bb\n\u0003\u0000\u0000\u00bb\u00bc\u0005"+ - "3\u0000\u0000\u00bc\u00be\u0003\n\u0005\u0004\u00bd\u00b7\u0001\u0000"+ - "\u0000\u0000\u00bd\u00ba\u0001\u0000\u0000\u0000\u00be\u00c1\u0001\u0000"+ - "\u0000\u0000\u00bf\u00bd\u0001\u0000\u0000\u0000\u00bf\u00c0\u0001\u0000"+ - "\u0000\u0000\u00c0\u000b\u0001\u0000\u0000\u0000\u00c1\u00bf\u0001\u0000"+ - "\u0000\u0000\u00c2\u00c4\u0003\u000e\u0007\u0000\u00c3\u00c5\u00050\u0000"+ - "\u0000\u00c4\u00c3\u0001\u0000\u0000\u0000\u00c4\u00c5\u0001\u0000\u0000"+ - "\u0000\u00c5\u00c6\u0001\u0000\u0000\u0000\u00c6\u00c7\u0005/\u0000\u0000"+ - "\u00c7\u00c8\u0003^/\u0000\u00c8\u00d1\u0001\u0000\u0000\u0000\u00c9\u00cb"+ - "\u0003\u000e\u0007\u0000\u00ca\u00cc\u00050\u0000\u0000\u00cb\u00ca\u0001"+ - "\u0000\u0000\u0000\u00cb\u00cc\u0001\u0000\u0000\u0000\u00cc\u00cd\u0001"+ - "\u0000\u0000\u0000\u00cd\u00ce\u00055\u0000\u0000\u00ce\u00cf\u0003^/"+ - "\u0000\u00cf\u00d1\u0001\u0000\u0000\u0000\u00d0\u00c2\u0001\u0000\u0000"+ - "\u0000\u00d0\u00c9\u0001\u0000\u0000\u0000\u00d1\r\u0001\u0000\u0000\u0000"+ - "\u00d2\u00d8\u0003\u0010\b\u0000\u00d3\u00d4\u0003\u0010\b\u0000\u00d4"+ - "\u00d5\u0003`0\u0000\u00d5\u00d6\u0003\u0010\b\u0000\u00d6\u00d8\u0001"+ - "\u0000\u0000\u0000\u00d7\u00d2\u0001\u0000\u0000\u0000\u00d7\u00d3\u0001"+ - "\u0000\u0000\u0000\u00d8\u000f\u0001\u0000\u0000\u0000\u00d9\u00da\u0006"+ - "\b\uffff\uffff\u0000\u00da\u00de\u0003\u0012\t\u0000\u00db\u00dc\u0007"+ - "\u0000\u0000\u0000\u00dc\u00de\u0003\u0010\b\u0003\u00dd\u00d9\u0001\u0000"+ - "\u0000\u0000\u00dd\u00db\u0001\u0000\u0000\u0000\u00de\u00e7\u0001\u0000"+ - "\u0000\u0000\u00df\u00e0\n\u0002\u0000\u0000\u00e0\u00e1\u0007\u0001\u0000"+ - "\u0000\u00e1\u00e6\u0003\u0010\b\u0003\u00e2\u00e3\n\u0001\u0000\u0000"+ - "\u00e3\u00e4\u0007\u0000\u0000\u0000\u00e4\u00e6\u0003\u0010\b\u0002\u00e5"+ - "\u00df\u0001\u0000\u0000\u0000\u00e5\u00e2\u0001\u0000\u0000\u0000\u00e6"+ - "\u00e9\u0001\u0000\u0000\u0000\u00e7\u00e5\u0001\u0000\u0000\u0000\u00e7"+ - "\u00e8\u0001\u0000\u0000\u0000\u00e8\u0011\u0001\u0000\u0000\u0000\u00e9"+ - "\u00e7\u0001\u0000\u0000\u0000\u00ea\u00eb\u0006\t\uffff\uffff\u0000\u00eb"+ - "\u00f3\u0003:\u001d\u0000\u00ec\u00f3\u00030\u0018\u0000\u00ed\u00f3\u0003"+ - "\u0014\n\u0000\u00ee\u00ef\u0005,\u0000\u0000\u00ef\u00f0\u0003\n\u0005"+ - "\u0000\u00f0\u00f1\u00056\u0000\u0000\u00f1\u00f3\u0001\u0000\u0000\u0000"+ - "\u00f2\u00ea\u0001\u0000\u0000\u0000\u00f2\u00ec\u0001\u0000\u0000\u0000"+ - "\u00f2\u00ed\u0001\u0000\u0000\u0000\u00f2\u00ee\u0001\u0000\u0000\u0000"+ - "\u00f3\u00f9\u0001\u0000\u0000\u0000\u00f4\u00f5\n\u0001\u0000\u0000\u00f5"+ - "\u00f6\u0005%\u0000\u0000\u00f6\u00f8\u0003\u0016\u000b\u0000\u00f7\u00f4"+ - "\u0001\u0000\u0000\u0000\u00f8\u00fb\u0001\u0000\u0000\u0000\u00f9\u00f7"+ - "\u0001\u0000\u0000\u0000\u00f9\u00fa\u0001\u0000\u0000\u0000\u00fa\u0013"+ - "\u0001\u0000\u0000\u0000\u00fb\u00f9\u0001\u0000\u0000\u0000\u00fc\u00fd"+ - "\u00036\u001b\u0000\u00fd\u0107\u0005,\u0000\u0000\u00fe\u0108\u0005A"+ - "\u0000\u0000\u00ff\u0104\u0003\n\u0005\u0000\u0100\u0101\u0005&\u0000"+ - "\u0000\u0101\u0103\u0003\n\u0005\u0000\u0102\u0100\u0001\u0000\u0000\u0000"+ - "\u0103\u0106\u0001\u0000\u0000\u0000\u0104\u0102\u0001\u0000\u0000\u0000"+ - "\u0104\u0105\u0001\u0000\u0000\u0000\u0105\u0108\u0001\u0000\u0000\u0000"+ - "\u0106\u0104\u0001\u0000\u0000\u0000\u0107\u00fe\u0001\u0000\u0000\u0000"+ - "\u0107\u00ff\u0001\u0000\u0000\u0000\u0107\u0108\u0001\u0000\u0000\u0000"+ - "\u0108\u0109\u0001\u0000\u0000\u0000\u0109\u010a\u00056\u0000\u0000\u010a"+ - "\u0015\u0001\u0000\u0000\u0000\u010b\u010c\u00036\u001b\u0000\u010c\u0017"+ - "\u0001\u0000\u0000\u0000\u010d\u010e\u0005\u0010\u0000\u0000\u010e\u010f"+ - "\u0003\u001a\r\u0000\u010f\u0019\u0001\u0000\u0000\u0000\u0110\u0115\u0003"+ - "\u001c\u000e\u0000\u0111\u0112\u0005&\u0000\u0000\u0112\u0114\u0003\u001c"+ - "\u000e\u0000\u0113\u0111\u0001\u0000\u0000\u0000\u0114\u0117\u0001\u0000"+ - "\u0000\u0000\u0115\u0113\u0001\u0000\u0000\u0000\u0115\u0116\u0001\u0000"+ - "\u0000\u0000\u0116\u001b\u0001\u0000\u0000\u0000\u0117\u0115\u0001\u0000"+ - "\u0000\u0000\u0118\u011e\u0003\n\u0005\u0000\u0119\u011a\u00030\u0018"+ - "\u0000\u011a\u011b\u0005$\u0000\u0000\u011b\u011c\u0003\n\u0005\u0000"+ - "\u011c\u011e\u0001\u0000\u0000\u0000\u011d\u0118\u0001\u0000\u0000\u0000"+ - "\u011d\u0119\u0001\u0000\u0000\u0000\u011e\u001d\u0001\u0000\u0000\u0000"+ - "\u011f\u0120\u0005\u0006\u0000\u0000\u0120\u0125\u0003 \u0010\u0000\u0121"+ - "\u0122\u0005&\u0000\u0000\u0122\u0124\u0003 \u0010\u0000\u0123\u0121\u0001"+ - "\u0000\u0000\u0000\u0124\u0127\u0001\u0000\u0000\u0000\u0125\u0123\u0001"+ - "\u0000\u0000\u0000\u0125\u0126\u0001\u0000\u0000\u0000\u0126\u0129\u0001"+ - "\u0000\u0000\u0000\u0127\u0125\u0001\u0000\u0000\u0000\u0128\u012a\u0003"+ - "\"\u0011\u0000\u0129\u0128\u0001\u0000\u0000\u0000\u0129\u012a\u0001\u0000"+ - "\u0000\u0000\u012a\u001f\u0001\u0000\u0000\u0000\u012b\u012c\u0005\u0019"+ - "\u0000\u0000\u012c!\u0001\u0000\u0000\u0000\u012d\u0130\u0003$\u0012\u0000"+ - "\u012e\u0130\u0003&\u0013\u0000\u012f\u012d\u0001\u0000\u0000\u0000\u012f"+ - "\u012e\u0001\u0000\u0000\u0000\u0130#\u0001\u0000\u0000\u0000\u0131\u0132"+ - "\u0005L\u0000\u0000\u0132\u0137\u0003 \u0010\u0000\u0133\u0134\u0005&"+ - "\u0000\u0000\u0134\u0136\u0003 \u0010\u0000\u0135\u0133\u0001\u0000\u0000"+ - "\u0000\u0136\u0139\u0001\u0000\u0000\u0000\u0137\u0135\u0001\u0000\u0000"+ - "\u0000\u0137\u0138\u0001\u0000\u0000\u0000\u0138%\u0001\u0000\u0000\u0000"+ - "\u0139\u0137\u0001\u0000\u0000\u0000\u013a\u013b\u0005E\u0000\u0000\u013b"+ - "\u013c\u0003$\u0012\u0000\u013c\u013d\u0005F\u0000\u0000\u013d\'\u0001"+ - "\u0000\u0000\u0000\u013e\u013f\u0005\r\u0000\u0000\u013f\u0144\u0003 "+ - "\u0010\u0000\u0140\u0141\u0005&\u0000\u0000\u0141\u0143\u0003 \u0010\u0000"+ - "\u0142\u0140\u0001\u0000\u0000\u0000\u0143\u0146\u0001\u0000\u0000\u0000"+ - "\u0144\u0142\u0001\u0000\u0000\u0000\u0144\u0145\u0001\u0000\u0000\u0000"+ - "\u0145\u0148\u0001\u0000\u0000\u0000\u0146\u0144\u0001\u0000\u0000\u0000"+ - "\u0147\u0149\u0003\u001a\r\u0000\u0148\u0147\u0001\u0000\u0000\u0000\u0148"+ - "\u0149\u0001\u0000\u0000\u0000\u0149\u014c\u0001\u0000\u0000\u0000\u014a"+ - "\u014b\u0005!\u0000\u0000\u014b\u014d\u0003\u001a\r\u0000\u014c\u014a"+ - "\u0001\u0000\u0000\u0000\u014c\u014d\u0001\u0000\u0000\u0000\u014d)\u0001"+ - "\u0000\u0000\u0000\u014e\u014f\u0005\u0004\u0000\u0000\u014f\u0150\u0003"+ - "\u001a\r\u0000\u0150+\u0001\u0000\u0000\u0000\u0151\u0153\u0005\u0013"+ - "\u0000\u0000\u0152\u0154\u0003\u001a\r\u0000\u0153\u0152\u0001\u0000\u0000"+ - "\u0000\u0153\u0154\u0001\u0000\u0000\u0000\u0154\u0157\u0001\u0000\u0000"+ - "\u0000\u0155\u0156\u0005!\u0000\u0000\u0156\u0158\u0003\u001a\r\u0000"+ - "\u0157\u0155\u0001\u0000\u0000\u0000\u0157\u0158\u0001\u0000\u0000\u0000"+ - "\u0158-\u0001\u0000\u0000\u0000\u0159\u015a\u0005\b\u0000\u0000\u015a"+ - "\u015d\u0003\u001a\r\u0000\u015b\u015c\u0005!\u0000\u0000\u015c\u015e"+ - "\u0003\u001a\r\u0000\u015d\u015b\u0001\u0000\u0000\u0000\u015d\u015e\u0001"+ - "\u0000\u0000\u0000\u015e/\u0001\u0000\u0000\u0000\u015f\u0164\u00036\u001b"+ - "\u0000\u0160\u0161\u0005(\u0000\u0000\u0161\u0163\u00036\u001b\u0000\u0162"+ - "\u0160\u0001\u0000\u0000\u0000\u0163\u0166\u0001\u0000\u0000\u0000\u0164"+ - "\u0162\u0001\u0000\u0000\u0000\u0164\u0165\u0001\u0000\u0000\u0000\u0165"+ - "1\u0001\u0000\u0000\u0000\u0166\u0164\u0001\u0000\u0000\u0000\u0167\u016c"+ - "\u00038\u001c\u0000\u0168\u0169\u0005(\u0000\u0000\u0169\u016b\u00038"+ - "\u001c\u0000\u016a\u0168\u0001\u0000\u0000\u0000\u016b\u016e\u0001\u0000"+ - "\u0000\u0000\u016c\u016a\u0001\u0000\u0000\u0000\u016c\u016d\u0001\u0000"+ - "\u0000\u0000\u016d3\u0001\u0000\u0000\u0000\u016e\u016c\u0001\u0000\u0000"+ - "\u0000\u016f\u0174\u00032\u0019\u0000\u0170\u0171\u0005&\u0000\u0000\u0171"+ - "\u0173\u00032\u0019\u0000\u0172\u0170\u0001\u0000\u0000\u0000\u0173\u0176"+ - "\u0001\u0000\u0000\u0000\u0174\u0172\u0001\u0000\u0000\u0000\u0174\u0175"+ - "\u0001\u0000\u0000\u0000\u01755\u0001\u0000\u0000\u0000\u0176\u0174\u0001"+ - "\u0000\u0000\u0000\u0177\u0178\u0007\u0002\u0000\u0000\u01787\u0001\u0000"+ - "\u0000\u0000\u0179\u017a\u0005P\u0000\u0000\u017a9\u0001\u0000\u0000\u0000"+ - "\u017b\u01a6\u00051\u0000\u0000\u017c\u017d\u0003\\.\u0000\u017d\u017e"+ - "\u0005G\u0000\u0000\u017e\u01a6\u0001\u0000\u0000\u0000\u017f\u01a6\u0003"+ - "Z-\u0000\u0180\u01a6\u0003\\.\u0000\u0181\u01a6\u0003V+\u0000\u0182\u01a6"+ - "\u0003<\u001e\u0000\u0183\u01a6\u0003^/\u0000\u0184\u0185\u0005E\u0000"+ - "\u0000\u0185\u018a\u0003X,\u0000\u0186\u0187\u0005&\u0000\u0000\u0187"+ - "\u0189\u0003X,\u0000\u0188\u0186\u0001\u0000\u0000\u0000\u0189\u018c\u0001"+ - "\u0000\u0000\u0000\u018a\u0188\u0001\u0000\u0000\u0000\u018a\u018b\u0001"+ - "\u0000\u0000\u0000\u018b\u018d\u0001\u0000\u0000\u0000\u018c\u018a\u0001"+ - "\u0000\u0000\u0000\u018d\u018e\u0005F\u0000\u0000\u018e\u01a6\u0001\u0000"+ - "\u0000\u0000\u018f\u0190\u0005E\u0000\u0000\u0190\u0195\u0003V+\u0000"+ - "\u0191\u0192\u0005&\u0000\u0000\u0192\u0194\u0003V+\u0000\u0193\u0191"+ - "\u0001\u0000\u0000\u0000\u0194\u0197\u0001\u0000\u0000\u0000\u0195\u0193"+ - "\u0001\u0000\u0000\u0000\u0195\u0196\u0001\u0000\u0000\u0000\u0196\u0198"+ - "\u0001\u0000\u0000\u0000\u0197\u0195\u0001\u0000\u0000\u0000\u0198\u0199"+ - "\u0005F\u0000\u0000\u0199\u01a6\u0001\u0000\u0000\u0000\u019a\u019b\u0005"+ - "E\u0000\u0000\u019b\u01a0\u0003^/\u0000\u019c\u019d\u0005&\u0000\u0000"+ - "\u019d\u019f\u0003^/\u0000\u019e\u019c\u0001\u0000\u0000\u0000\u019f\u01a2"+ - "\u0001\u0000\u0000\u0000\u01a0\u019e\u0001\u0000\u0000\u0000\u01a0\u01a1"+ - "\u0001\u0000\u0000\u0000\u01a1\u01a3\u0001\u0000\u0000\u0000\u01a2\u01a0"+ - "\u0001\u0000\u0000\u0000\u01a3\u01a4\u0005F\u0000\u0000\u01a4\u01a6\u0001"+ - "\u0000\u0000\u0000\u01a5\u017b\u0001\u0000\u0000\u0000\u01a5\u017c\u0001"+ - "\u0000\u0000\u0000\u01a5\u017f\u0001\u0000\u0000\u0000\u01a5\u0180\u0001"+ - "\u0000\u0000\u0000\u01a5\u0181\u0001\u0000\u0000\u0000\u01a5\u0182\u0001"+ - "\u0000\u0000\u0000\u01a5\u0183\u0001\u0000\u0000\u0000\u01a5\u0184\u0001"+ - "\u0000\u0000\u0000\u01a5\u018f\u0001\u0000\u0000\u0000\u01a5\u019a\u0001"+ - "\u0000\u0000\u0000\u01a6;\u0001\u0000\u0000\u0000\u01a7\u01aa\u00054\u0000"+ - "\u0000\u01a8\u01aa\u0005D\u0000\u0000\u01a9\u01a7\u0001\u0000\u0000\u0000"+ - "\u01a9\u01a8\u0001\u0000\u0000\u0000\u01aa=\u0001\u0000\u0000\u0000\u01ab"+ - "\u01ac\u0005\n\u0000\u0000\u01ac\u01ad\u0005\u001f\u0000\u0000\u01ad?"+ - "\u0001\u0000\u0000\u0000\u01ae\u01af\u0005\u0012\u0000\u0000\u01af\u01b4"+ - "\u0003B!\u0000\u01b0\u01b1\u0005&\u0000\u0000\u01b1\u01b3\u0003B!\u0000"+ - "\u01b2\u01b0\u0001\u0000\u0000\u0000\u01b3\u01b6\u0001\u0000\u0000\u0000"+ - "\u01b4\u01b2\u0001\u0000\u0000\u0000\u01b4\u01b5\u0001\u0000\u0000\u0000"+ - "\u01b5A\u0001\u0000\u0000\u0000\u01b6\u01b4\u0001\u0000\u0000\u0000\u01b7"+ - "\u01b9\u0003\n\u0005\u0000\u01b8\u01ba\u0007\u0003\u0000\u0000\u01b9\u01b8"+ - "\u0001\u0000\u0000\u0000\u01b9\u01ba\u0001\u0000\u0000\u0000\u01ba\u01bd"+ - "\u0001\u0000\u0000\u0000\u01bb\u01bc\u00052\u0000\u0000\u01bc\u01be\u0007"+ - "\u0004\u0000\u0000\u01bd\u01bb\u0001\u0000\u0000\u0000\u01bd\u01be\u0001"+ - "\u0000\u0000\u0000\u01beC\u0001\u0000\u0000\u0000\u01bf\u01c0\u0005\t"+ - "\u0000\u0000\u01c0\u01c1\u00034\u001a\u0000\u01c1E\u0001\u0000\u0000\u0000"+ - "\u01c2\u01c3\u0005\u0002\u0000\u0000\u01c3\u01c4\u00034\u001a\u0000\u01c4"+ - "G\u0001\u0000\u0000\u0000\u01c5\u01c6\u0005\u000f\u0000\u0000\u01c6\u01cb"+ - "\u0003J%\u0000\u01c7\u01c8\u0005&\u0000\u0000\u01c8\u01ca\u0003J%\u0000"+ - "\u01c9\u01c7\u0001\u0000\u0000\u0000\u01ca\u01cd\u0001\u0000\u0000\u0000"+ - "\u01cb\u01c9\u0001\u0000\u0000\u0000\u01cb\u01cc\u0001\u0000\u0000\u0000"+ - "\u01ccI\u0001\u0000\u0000\u0000\u01cd\u01cb\u0001\u0000\u0000\u0000\u01ce"+ - "\u01cf\u00032\u0019\u0000\u01cf\u01d0\u0005T\u0000\u0000\u01d0\u01d1\u0003"+ - "2\u0019\u0000\u01d1K\u0001\u0000\u0000\u0000\u01d2\u01d3\u0005\u0001\u0000"+ - "\u0000\u01d3\u01d4\u0003\u0012\t\u0000\u01d4\u01d6\u0003^/\u0000\u01d5"+ - "\u01d7\u0003R)\u0000\u01d6\u01d5\u0001\u0000\u0000\u0000\u01d6\u01d7\u0001"+ - "\u0000\u0000\u0000\u01d7M\u0001\u0000\u0000\u0000\u01d8\u01d9\u0005\u0007"+ - "\u0000\u0000\u01d9\u01da\u0003\u0012\t\u0000\u01da\u01db\u0003^/\u0000"+ - "\u01dbO\u0001\u0000\u0000\u0000\u01dc\u01dd\u0005\u000e\u0000\u0000\u01dd"+ - "\u01de\u00030\u0018\u0000\u01deQ\u0001\u0000\u0000\u0000\u01df\u01e4\u0003"+ - "T*\u0000\u01e0\u01e1\u0005&\u0000\u0000\u01e1\u01e3\u0003T*\u0000\u01e2"+ - "\u01e0\u0001\u0000\u0000\u0000\u01e3\u01e6\u0001\u0000\u0000\u0000\u01e4"+ - "\u01e2\u0001\u0000\u0000\u0000\u01e4\u01e5\u0001\u0000\u0000\u0000\u01e5"+ - "S\u0001\u0000\u0000\u0000\u01e6\u01e4\u0001\u0000\u0000\u0000\u01e7\u01e8"+ - "\u00036\u001b\u0000\u01e8\u01e9\u0005$\u0000\u0000\u01e9\u01ea\u0003:"+ - "\u001d\u0000\u01eaU\u0001\u0000\u0000\u0000\u01eb\u01ec\u0007\u0005\u0000"+ - "\u0000\u01ecW\u0001\u0000\u0000\u0000\u01ed\u01f0\u0003Z-\u0000\u01ee"+ - "\u01f0\u0003\\.\u0000\u01ef\u01ed\u0001\u0000\u0000\u0000\u01ef\u01ee"+ - "\u0001\u0000\u0000\u0000\u01f0Y\u0001\u0000\u0000\u0000\u01f1\u01f3\u0007"+ - "\u0000\u0000\u0000\u01f2\u01f1\u0001\u0000\u0000\u0000\u01f2\u01f3\u0001"+ - "\u0000\u0000\u0000\u01f3\u01f4\u0001\u0000\u0000\u0000\u01f4\u01f5\u0005"+ - " \u0000\u0000\u01f5[\u0001\u0000\u0000\u0000\u01f6\u01f8\u0007\u0000\u0000"+ - "\u0000\u01f7\u01f6\u0001\u0000\u0000\u0000\u01f7\u01f8\u0001\u0000\u0000"+ - "\u0000\u01f8\u01f9\u0001\u0000\u0000\u0000\u01f9\u01fa\u0005\u001f\u0000"+ - "\u0000\u01fa]\u0001\u0000\u0000\u0000\u01fb\u01fc\u0005\u001e\u0000\u0000"+ - "\u01fc_\u0001\u0000\u0000\u0000\u01fd\u01fe\u0007\u0006\u0000\u0000\u01fe"+ - "a\u0001\u0000\u0000\u0000\u01ff\u0200\u0005\u0005\u0000\u0000\u0200\u0201"+ - "\u0003d2\u0000\u0201c\u0001\u0000\u0000\u0000\u0202\u0203\u0005E\u0000"+ - "\u0000\u0203\u0204\u0003\u0002\u0001\u0000\u0204\u0205\u0005F\u0000\u0000"+ - "\u0205e\u0001\u0000\u0000\u0000\u0206\u0207\u0005\u0011\u0000\u0000\u0207"+ - "\u0208\u0005j\u0000\u0000\u0208g\u0001\u0000\u0000\u0000\u0209\u020a\u0005"+ - "\f\u0000\u0000\u020a\u020b\u0005n\u0000\u0000\u020bi\u0001\u0000\u0000"+ - "\u0000\u020c\u020d\u0005\u0003\u0000\u0000\u020d\u0210\u0005Z\u0000\u0000"+ - "\u020e\u020f\u0005X\u0000\u0000\u020f\u0211\u00032\u0019\u0000\u0210\u020e"+ - "\u0001\u0000\u0000\u0000\u0210\u0211\u0001\u0000\u0000\u0000\u0211\u021b"+ - "\u0001\u0000\u0000\u0000\u0212\u0213\u0005Y\u0000\u0000\u0213\u0218\u0003"+ - "l6\u0000\u0214\u0215\u0005&\u0000\u0000\u0215\u0217\u0003l6\u0000\u0216"+ - "\u0214\u0001\u0000\u0000\u0000\u0217\u021a\u0001\u0000\u0000\u0000\u0218"+ - "\u0216\u0001\u0000\u0000\u0000\u0218\u0219\u0001\u0000\u0000\u0000\u0219"+ - "\u021c\u0001\u0000\u0000\u0000\u021a\u0218\u0001\u0000\u0000\u0000\u021b"+ - "\u0212\u0001\u0000\u0000\u0000\u021b\u021c\u0001\u0000\u0000\u0000\u021c"+ - "k\u0001\u0000\u0000\u0000\u021d\u021e\u00032\u0019\u0000\u021e\u021f\u0005"+ - "$\u0000\u0000\u021f\u0221\u0001\u0000\u0000\u0000\u0220\u021d\u0001\u0000"+ - "\u0000\u0000\u0220\u0221\u0001\u0000\u0000\u0000\u0221\u0222\u0001\u0000"+ - "\u0000\u0000\u0222\u0223\u00032\u0019\u0000\u0223m\u0001\u0000\u0000\u0000"+ - "\u0224\u0225\u0005\u000b\u0000\u0000\u0225\u0226\u0005\u0019\u0000\u0000"+ - "\u0226\u0227\u0005X\u0000\u0000\u0227\u0228\u00034\u001a\u0000\u0228o"+ - "\u0001\u0000\u0000\u00005{\u0084\u0094\u00a0\u00a9\u00b1\u00b5\u00bd\u00bf"+ - "\u00c4\u00cb\u00d0\u00d7\u00dd\u00e5\u00e7\u00f2\u00f9\u0104\u0107\u0115"+ - "\u011d\u0125\u0129\u012f\u0137\u0144\u0148\u014c\u0153\u0157\u015d\u0164"+ - "\u016c\u0174\u018a\u0195\u01a0\u01a5\u01a9\u01b4\u01b9\u01bd\u01cb\u01d6"+ - "\u01e4\u01ef\u01f2\u01f7\u0210\u0218\u021b\u0220"; + "\u0003\u0001\u0003\u0001\u0003\u0003\u0003\u0099\b\u0003\u0001\u0004\u0001"+ + "\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00a5\b\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00ac\b\u0005\n"+ + "\u0005\f\u0005\u00af\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0003\u0005\u00b6\b\u0005\u0001\u0005\u0001\u0005\u0003"+ + "\u0005\u00ba\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0005\u0005\u00c2\b\u0005\n\u0005\f\u0005\u00c5\t\u0005"+ + "\u0001\u0006\u0001\u0006\u0003\u0006\u00c9\b\u0006\u0001\u0006\u0001\u0006"+ + "\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00d0\b\u0006\u0001\u0006"+ + "\u0001\u0006\u0001\u0006\u0003\u0006\u00d5\b\u0006\u0001\u0007\u0001\u0007"+ + "\u0001\u0007\u0001\u0007\u0001\u0007\u0003\u0007\u00dc\b\u0007\u0001\b"+ + "\u0001\b\u0001\b\u0001\b\u0003\b\u00e2\b\b\u0001\b\u0001\b\u0001\b\u0001"+ + "\b\u0001\b\u0001\b\u0005\b\u00ea\b\b\n\b\f\b\u00ed\t\b\u0001\t\u0001\t"+ + "\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0003\t\u00f7\b\t\u0001"+ + "\t\u0001\t\u0001\t\u0005\t\u00fc\b\t\n\t\f\t\u00ff\t\t\u0001\n\u0001\n"+ + "\u0001\n\u0001\n\u0001\n\u0001\n\u0005\n\u0107\b\n\n\n\f\n\u010a\t\n\u0003"+ + "\n\u010c\b\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001"+ + "\f\u0001\r\u0001\r\u0001\r\u0005\r\u0118\b\r\n\r\f\r\u011b\t\r\u0001\u000e"+ + "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0003\u000e\u0122\b\u000e"+ + "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0005\u000f\u0128\b\u000f"+ + "\n\u000f\f\u000f\u012b\t\u000f\u0001\u000f\u0003\u000f\u012e\b\u000f\u0001"+ + "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0003\u0010\u0135"+ + "\b\u0010\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0013\u0001"+ + "\u0013\u0003\u0013\u013d\b\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001"+ + "\u0014\u0005\u0014\u0143\b\u0014\n\u0014\f\u0014\u0146\t\u0014\u0001\u0015"+ + "\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016"+ + "\u0001\u0016\u0005\u0016\u0150\b\u0016\n\u0016\f\u0016\u0153\t\u0016\u0001"+ + "\u0016\u0003\u0016\u0156\b\u0016\u0001\u0016\u0001\u0016\u0003\u0016\u015a"+ + "\b\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0003"+ + "\u0018\u0161\b\u0018\u0001\u0018\u0001\u0018\u0003\u0018\u0165\b\u0018"+ + "\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0003\u0019\u016b\b\u0019"+ + "\u0001\u001a\u0001\u001a\u0001\u001a\u0005\u001a\u0170\b\u001a\n\u001a"+ + "\f\u001a\u0173\t\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0005\u001b"+ + "\u0178\b\u001b\n\u001b\f\u001b\u017b\t\u001b\u0001\u001c\u0001\u001c\u0001"+ + "\u001c\u0005\u001c\u0180\b\u001c\n\u001c\f\u001c\u0183\t\u001c\u0001\u001d"+ + "\u0001\u001d\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001\u001f"+ + "\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f"+ + "\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u0196\b\u001f"+ + "\n\u001f\f\u001f\u0199\t\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001"+ + "\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01a1\b\u001f\n\u001f\f\u001f"+ + "\u01a4\t\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f"+ + "\u0001\u001f\u0005\u001f\u01ac\b\u001f\n\u001f\f\u001f\u01af\t\u001f\u0001"+ + "\u001f\u0001\u001f\u0003\u001f\u01b3\b\u001f\u0001 \u0001 \u0003 \u01b7"+ + "\b \u0001!\u0001!\u0001!\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01c0"+ + "\b\"\n\"\f\"\u01c3\t\"\u0001#\u0001#\u0003#\u01c7\b#\u0001#\u0001#\u0003"+ + "#\u01cb\b#\u0001$\u0001$\u0001$\u0001%\u0001%\u0001%\u0001&\u0001&\u0001"+ + "&\u0001&\u0005&\u01d7\b&\n&\f&\u01da\t&\u0001\'\u0001\'\u0001\'\u0001"+ + "\'\u0001(\u0001(\u0001(\u0001(\u0003(\u01e4\b(\u0001)\u0001)\u0001)\u0001"+ + ")\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0005+\u01f0\b+\n+\f+\u01f3"+ + "\t+\u0001,\u0001,\u0001,\u0001,\u0001-\u0001-\u0001.\u0001.\u0003.\u01fd"+ + "\b.\u0001/\u0003/\u0200\b/\u0001/\u0001/\u00010\u00030\u0205\b0\u0001"+ + "0\u00010\u00011\u00011\u00012\u00012\u00013\u00013\u00013\u00014\u0001"+ + "4\u00014\u00014\u00015\u00015\u00015\u00016\u00016\u00016\u00017\u0001"+ + "7\u00017\u00017\u00037\u021e\b7\u00017\u00017\u00017\u00017\u00057\u0224"+ + "\b7\n7\f7\u0227\t7\u00037\u0229\b7\u00018\u00018\u00018\u00038\u022e\b"+ + "8\u00018\u00018\u00019\u00019\u00019\u00019\u00019\u00019\u0000\u0004"+ + "\u0002\n\u0010\u0012:\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012"+ + "\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\"+ + "^`bdfhjlnpr\u0000\b\u0001\u0000?@\u0001\u0000AC\u0002\u0000\u0019\u0019"+ + "\u001e\u001e\u0001\u0000GH\u0002\u0000##\'\'\u0001\u0000*+\u0002\u0000"+ + "))77\u0002\u000088:>\u0250\u0000t\u0001\u0000\u0000\u0000\u0002w\u0001"+ + "\u0000\u0000\u0000\u0004\u0088\u0001\u0000\u0000\u0000\u0006\u0098\u0001"+ + "\u0000\u0000\u0000\b\u009a\u0001\u0000\u0000\u0000\n\u00b9\u0001\u0000"+ + "\u0000\u0000\f\u00d4\u0001\u0000\u0000\u0000\u000e\u00db\u0001\u0000\u0000"+ + "\u0000\u0010\u00e1\u0001\u0000\u0000\u0000\u0012\u00f6\u0001\u0000\u0000"+ + "\u0000\u0014\u0100\u0001\u0000\u0000\u0000\u0016\u010f\u0001\u0000\u0000"+ + "\u0000\u0018\u0111\u0001\u0000\u0000\u0000\u001a\u0114\u0001\u0000\u0000"+ + "\u0000\u001c\u0121\u0001\u0000\u0000\u0000\u001e\u0123\u0001\u0000\u0000"+ + "\u0000 \u0134\u0001\u0000\u0000\u0000\"\u0136\u0001\u0000\u0000\u0000"+ + "$\u0138\u0001\u0000\u0000\u0000&\u013c\u0001\u0000\u0000\u0000(\u013e"+ + "\u0001\u0000\u0000\u0000*\u0147\u0001\u0000\u0000\u0000,\u014b\u0001\u0000"+ + "\u0000\u0000.\u015b\u0001\u0000\u0000\u00000\u015e\u0001\u0000\u0000\u0000"+ + "2\u0166\u0001\u0000\u0000\u00004\u016c\u0001\u0000\u0000\u00006\u0174"+ + "\u0001\u0000\u0000\u00008\u017c\u0001\u0000\u0000\u0000:\u0184\u0001\u0000"+ + "\u0000\u0000<\u0186\u0001\u0000\u0000\u0000>\u01b2\u0001\u0000\u0000\u0000"+ + "@\u01b6\u0001\u0000\u0000\u0000B\u01b8\u0001\u0000\u0000\u0000D\u01bb"+ + "\u0001\u0000\u0000\u0000F\u01c4\u0001\u0000\u0000\u0000H\u01cc\u0001\u0000"+ + "\u0000\u0000J\u01cf\u0001\u0000\u0000\u0000L\u01d2\u0001\u0000\u0000\u0000"+ + "N\u01db\u0001\u0000\u0000\u0000P\u01df\u0001\u0000\u0000\u0000R\u01e5"+ + "\u0001\u0000\u0000\u0000T\u01e9\u0001\u0000\u0000\u0000V\u01ec\u0001\u0000"+ + "\u0000\u0000X\u01f4\u0001\u0000\u0000\u0000Z\u01f8\u0001\u0000\u0000\u0000"+ + "\\\u01fc\u0001\u0000\u0000\u0000^\u01ff\u0001\u0000\u0000\u0000`\u0204"+ + "\u0001\u0000\u0000\u0000b\u0208\u0001\u0000\u0000\u0000d\u020a\u0001\u0000"+ + "\u0000\u0000f\u020c\u0001\u0000\u0000\u0000h\u020f\u0001\u0000\u0000\u0000"+ + "j\u0213\u0001\u0000\u0000\u0000l\u0216\u0001\u0000\u0000\u0000n\u0219"+ + "\u0001\u0000\u0000\u0000p\u022d\u0001\u0000\u0000\u0000r\u0231\u0001\u0000"+ + "\u0000\u0000tu\u0003\u0002\u0001\u0000uv\u0005\u0000\u0000\u0001v\u0001"+ + "\u0001\u0000\u0000\u0000wx\u0006\u0001\uffff\uffff\u0000xy\u0003\u0004"+ + "\u0002\u0000y\u007f\u0001\u0000\u0000\u0000z{\n\u0001\u0000\u0000{|\u0005"+ + "\u001d\u0000\u0000|~\u0003\u0006\u0003\u0000}z\u0001\u0000\u0000\u0000"+ + "~\u0081\u0001\u0000\u0000\u0000\u007f}\u0001\u0000\u0000\u0000\u007f\u0080"+ + "\u0001\u0000\u0000\u0000\u0080\u0003\u0001\u0000\u0000\u0000\u0081\u007f"+ + "\u0001\u0000\u0000\u0000\u0082\u0089\u0003f3\u0000\u0083\u0089\u0003\u001e"+ + "\u000f\u0000\u0084\u0089\u0003\u0018\f\u0000\u0085\u0089\u0003,\u0016"+ + "\u0000\u0086\u0089\u0003j5\u0000\u0087\u0089\u0003l6\u0000\u0088\u0082"+ + "\u0001\u0000\u0000\u0000\u0088\u0083\u0001\u0000\u0000\u0000\u0088\u0084"+ + "\u0001\u0000\u0000\u0000\u0088\u0085\u0001\u0000\u0000\u0000\u0088\u0086"+ + "\u0001\u0000\u0000\u0000\u0088\u0087\u0001\u0000\u0000\u0000\u0089\u0005"+ + "\u0001\u0000\u0000\u0000\u008a\u0099\u0003.\u0017\u0000\u008b\u0099\u0003"+ + "2\u0019\u0000\u008c\u0099\u0003B!\u0000\u008d\u0099\u0003r9\u0000\u008e"+ + "\u0099\u0003H$\u0000\u008f\u0099\u0003D\"\u0000\u0090\u0099\u00030\u0018"+ + "\u0000\u0091\u0099\u0003\b\u0004\u0000\u0092\u0099\u0003J%\u0000\u0093"+ + "\u0099\u0003L&\u0000\u0094\u0099\u0003P(\u0000\u0095\u0099\u0003R)\u0000"+ + "\u0096\u0099\u0003n7\u0000\u0097\u0099\u0003T*\u0000\u0098\u008a\u0001"+ + "\u0000\u0000\u0000\u0098\u008b\u0001\u0000\u0000\u0000\u0098\u008c\u0001"+ + "\u0000\u0000\u0000\u0098\u008d\u0001\u0000\u0000\u0000\u0098\u008e\u0001"+ + "\u0000\u0000\u0000\u0098\u008f\u0001\u0000\u0000\u0000\u0098\u0090\u0001"+ + "\u0000\u0000\u0000\u0098\u0091\u0001\u0000\u0000\u0000\u0098\u0092\u0001"+ + "\u0000\u0000\u0000\u0098\u0093\u0001\u0000\u0000\u0000\u0098\u0094\u0001"+ + "\u0000\u0000\u0000\u0098\u0095\u0001\u0000\u0000\u0000\u0098\u0096\u0001"+ + "\u0000\u0000\u0000\u0098\u0097\u0001\u0000\u0000\u0000\u0099\u0007\u0001"+ + "\u0000\u0000\u0000\u009a\u009b\u0005\u0014\u0000\u0000\u009b\u009c\u0003"+ + "\n\u0005\u0000\u009c\t\u0001\u0000\u0000\u0000\u009d\u009e\u0006\u0005"+ + "\uffff\uffff\u0000\u009e\u009f\u00050\u0000\u0000\u009f\u00ba\u0003\n"+ + "\u0005\u0007\u00a0\u00ba\u0003\u000e\u0007\u0000\u00a1\u00ba\u0003\f\u0006"+ + "\u0000\u00a2\u00a4\u0003\u000e\u0007\u0000\u00a3\u00a5\u00050\u0000\u0000"+ + "\u00a4\u00a3\u0001\u0000\u0000\u0000\u00a4\u00a5\u0001\u0000\u0000\u0000"+ + "\u00a5\u00a6\u0001\u0000\u0000\u0000\u00a6\u00a7\u0005-\u0000\u0000\u00a7"+ + "\u00a8\u0005,\u0000\u0000\u00a8\u00ad\u0003\u000e\u0007\u0000\u00a9\u00aa"+ + "\u0005&\u0000\u0000\u00aa\u00ac\u0003\u000e\u0007\u0000\u00ab\u00a9\u0001"+ + "\u0000\u0000\u0000\u00ac\u00af\u0001\u0000\u0000\u0000\u00ad\u00ab\u0001"+ + "\u0000\u0000\u0000\u00ad\u00ae\u0001\u0000\u0000\u0000\u00ae\u00b0\u0001"+ + "\u0000\u0000\u0000\u00af\u00ad\u0001\u0000\u0000\u0000\u00b0\u00b1\u0005"+ + "6\u0000\u0000\u00b1\u00ba\u0001\u0000\u0000\u0000\u00b2\u00b3\u0003\u000e"+ + "\u0007\u0000\u00b3\u00b5\u0005.\u0000\u0000\u00b4\u00b6\u00050\u0000\u0000"+ + "\u00b5\u00b4\u0001\u0000\u0000\u0000\u00b5\u00b6\u0001\u0000\u0000\u0000"+ + "\u00b6\u00b7\u0001\u0000\u0000\u0000\u00b7\u00b8\u00051\u0000\u0000\u00b8"+ + "\u00ba\u0001\u0000\u0000\u0000\u00b9\u009d\u0001\u0000\u0000\u0000\u00b9"+ + "\u00a0\u0001\u0000\u0000\u0000\u00b9\u00a1\u0001\u0000\u0000\u0000\u00b9"+ + "\u00a2\u0001\u0000\u0000\u0000\u00b9\u00b2\u0001\u0000\u0000\u0000\u00ba"+ + "\u00c3\u0001\u0000\u0000\u0000\u00bb\u00bc\n\u0004\u0000\u0000\u00bc\u00bd"+ + "\u0005\"\u0000\u0000\u00bd\u00c2\u0003\n\u0005\u0005\u00be\u00bf\n\u0003"+ + "\u0000\u0000\u00bf\u00c0\u00053\u0000\u0000\u00c0\u00c2\u0003\n\u0005"+ + "\u0004\u00c1\u00bb\u0001\u0000\u0000\u0000\u00c1\u00be\u0001\u0000\u0000"+ + "\u0000\u00c2\u00c5\u0001\u0000\u0000\u0000\u00c3\u00c1\u0001\u0000\u0000"+ + "\u0000\u00c3\u00c4\u0001\u0000\u0000\u0000\u00c4\u000b\u0001\u0000\u0000"+ + "\u0000\u00c5\u00c3\u0001\u0000\u0000\u0000\u00c6\u00c8\u0003\u000e\u0007"+ + "\u0000\u00c7\u00c9\u00050\u0000\u0000\u00c8\u00c7\u0001\u0000\u0000\u0000"+ + "\u00c8\u00c9\u0001\u0000\u0000\u0000\u00c9\u00ca\u0001\u0000\u0000\u0000"+ + "\u00ca\u00cb\u0005/\u0000\u0000\u00cb\u00cc\u0003b1\u0000\u00cc\u00d5"+ + "\u0001\u0000\u0000\u0000\u00cd\u00cf\u0003\u000e\u0007\u0000\u00ce\u00d0"+ + "\u00050\u0000\u0000\u00cf\u00ce\u0001\u0000\u0000\u0000\u00cf\u00d0\u0001"+ + "\u0000\u0000\u0000\u00d0\u00d1\u0001\u0000\u0000\u0000\u00d1\u00d2\u0005"+ + "5\u0000\u0000\u00d2\u00d3\u0003b1\u0000\u00d3\u00d5\u0001\u0000\u0000"+ + "\u0000\u00d4\u00c6\u0001\u0000\u0000\u0000\u00d4\u00cd\u0001\u0000\u0000"+ + "\u0000\u00d5\r\u0001\u0000\u0000\u0000\u00d6\u00dc\u0003\u0010\b\u0000"+ + "\u00d7\u00d8\u0003\u0010\b\u0000\u00d8\u00d9\u0003d2\u0000\u00d9\u00da"+ + "\u0003\u0010\b\u0000\u00da\u00dc\u0001\u0000\u0000\u0000\u00db\u00d6\u0001"+ + "\u0000\u0000\u0000\u00db\u00d7\u0001\u0000\u0000\u0000\u00dc\u000f\u0001"+ + "\u0000\u0000\u0000\u00dd\u00de\u0006\b\uffff\uffff\u0000\u00de\u00e2\u0003"+ + "\u0012\t\u0000\u00df\u00e0\u0007\u0000\u0000\u0000\u00e0\u00e2\u0003\u0010"+ + "\b\u0003\u00e1\u00dd\u0001\u0000\u0000\u0000\u00e1\u00df\u0001\u0000\u0000"+ + "\u0000\u00e2\u00eb\u0001\u0000\u0000\u0000\u00e3\u00e4\n\u0002\u0000\u0000"+ + "\u00e4\u00e5\u0007\u0001\u0000\u0000\u00e5\u00ea\u0003\u0010\b\u0003\u00e6"+ + "\u00e7\n\u0001\u0000\u0000\u00e7\u00e8\u0007\u0000\u0000\u0000\u00e8\u00ea"+ + "\u0003\u0010\b\u0002\u00e9\u00e3\u0001\u0000\u0000\u0000\u00e9\u00e6\u0001"+ + "\u0000\u0000\u0000\u00ea\u00ed\u0001\u0000\u0000\u0000\u00eb\u00e9\u0001"+ + "\u0000\u0000\u0000\u00eb\u00ec\u0001\u0000\u0000\u0000\u00ec\u0011\u0001"+ + "\u0000\u0000\u0000\u00ed\u00eb\u0001\u0000\u0000\u0000\u00ee\u00ef\u0006"+ + "\t\uffff\uffff\u0000\u00ef\u00f7\u0003>\u001f\u0000\u00f0\u00f7\u0003"+ + "4\u001a\u0000\u00f1\u00f7\u0003\u0014\n\u0000\u00f2\u00f3\u0005,\u0000"+ + "\u0000\u00f3\u00f4\u0003\n\u0005\u0000\u00f4\u00f5\u00056\u0000\u0000"+ + "\u00f5\u00f7\u0001\u0000\u0000\u0000\u00f6\u00ee\u0001\u0000\u0000\u0000"+ + "\u00f6\u00f0\u0001\u0000\u0000\u0000\u00f6\u00f1\u0001\u0000\u0000\u0000"+ + "\u00f6\u00f2\u0001\u0000\u0000\u0000\u00f7\u00fd\u0001\u0000\u0000\u0000"+ + "\u00f8\u00f9\n\u0001\u0000\u0000\u00f9\u00fa\u0005%\u0000\u0000\u00fa"+ + "\u00fc\u0003\u0016\u000b\u0000\u00fb\u00f8\u0001\u0000\u0000\u0000\u00fc"+ + "\u00ff\u0001\u0000\u0000\u0000\u00fd\u00fb\u0001\u0000\u0000\u0000\u00fd"+ + "\u00fe\u0001\u0000\u0000\u0000\u00fe\u0013\u0001\u0000\u0000\u0000\u00ff"+ + "\u00fd\u0001\u0000\u0000\u0000\u0100\u0101\u0003:\u001d\u0000\u0101\u010b"+ + "\u0005,\u0000\u0000\u0102\u010c\u0005A\u0000\u0000\u0103\u0108\u0003\n"+ + "\u0005\u0000\u0104\u0105\u0005&\u0000\u0000\u0105\u0107\u0003\n\u0005"+ + "\u0000\u0106\u0104\u0001\u0000\u0000\u0000\u0107\u010a\u0001\u0000\u0000"+ + "\u0000\u0108\u0106\u0001\u0000\u0000\u0000\u0108\u0109\u0001\u0000\u0000"+ + "\u0000\u0109\u010c\u0001\u0000\u0000\u0000\u010a\u0108\u0001\u0000\u0000"+ + "\u0000\u010b\u0102\u0001\u0000\u0000\u0000\u010b\u0103\u0001\u0000\u0000"+ + "\u0000\u010b\u010c\u0001\u0000\u0000\u0000\u010c\u010d\u0001\u0000\u0000"+ + "\u0000\u010d\u010e\u00056\u0000\u0000\u010e\u0015\u0001\u0000\u0000\u0000"+ + "\u010f\u0110\u0003:\u001d\u0000\u0110\u0017\u0001\u0000\u0000\u0000\u0111"+ + "\u0112\u0005\u0010\u0000\u0000\u0112\u0113\u0003\u001a\r\u0000\u0113\u0019"+ + "\u0001\u0000\u0000\u0000\u0114\u0119\u0003\u001c\u000e\u0000\u0115\u0116"+ + "\u0005&\u0000\u0000\u0116\u0118\u0003\u001c\u000e\u0000\u0117\u0115\u0001"+ + "\u0000\u0000\u0000\u0118\u011b\u0001\u0000\u0000\u0000\u0119\u0117\u0001"+ + "\u0000\u0000\u0000\u0119\u011a\u0001\u0000\u0000\u0000\u011a\u001b\u0001"+ + "\u0000\u0000\u0000\u011b\u0119\u0001\u0000\u0000\u0000\u011c\u0122\u0003"+ + "\n\u0005\u0000\u011d\u011e\u00034\u001a\u0000\u011e\u011f\u0005$\u0000"+ + "\u0000\u011f\u0120\u0003\n\u0005\u0000\u0120\u0122\u0001\u0000\u0000\u0000"+ + "\u0121\u011c\u0001\u0000\u0000\u0000\u0121\u011d\u0001\u0000\u0000\u0000"+ + "\u0122\u001d\u0001\u0000\u0000\u0000\u0123\u0124\u0005\u0006\u0000\u0000"+ + "\u0124\u0129\u0003 \u0010\u0000\u0125\u0126\u0005&\u0000\u0000\u0126\u0128"+ + "\u0003 \u0010\u0000\u0127\u0125\u0001\u0000\u0000\u0000\u0128\u012b\u0001"+ + "\u0000\u0000\u0000\u0129\u0127\u0001\u0000\u0000\u0000\u0129\u012a\u0001"+ + "\u0000\u0000\u0000\u012a\u012d\u0001\u0000\u0000\u0000\u012b\u0129\u0001"+ + "\u0000\u0000\u0000\u012c\u012e\u0003&\u0013\u0000\u012d\u012c\u0001\u0000"+ + "\u0000\u0000\u012d\u012e\u0001\u0000\u0000\u0000\u012e\u001f\u0001\u0000"+ + "\u0000\u0000\u012f\u0130\u0003\"\u0011\u0000\u0130\u0131\u0005r\u0000"+ + "\u0000\u0131\u0132\u0003$\u0012\u0000\u0132\u0135\u0001\u0000\u0000\u0000"+ + "\u0133\u0135\u0003$\u0012\u0000\u0134\u012f\u0001\u0000\u0000\u0000\u0134"+ + "\u0133\u0001\u0000\u0000\u0000\u0135!\u0001\u0000\u0000\u0000\u0136\u0137"+ + "\u0005\u0019\u0000\u0000\u0137#\u0001\u0000\u0000\u0000\u0138\u0139\u0007"+ + "\u0002\u0000\u0000\u0139%\u0001\u0000\u0000\u0000\u013a\u013d\u0003(\u0014"+ + "\u0000\u013b\u013d\u0003*\u0015\u0000\u013c\u013a\u0001\u0000\u0000\u0000"+ + "\u013c\u013b\u0001\u0000\u0000\u0000\u013d\'\u0001\u0000\u0000\u0000\u013e"+ + "\u013f\u0005L\u0000\u0000\u013f\u0144\u0005\u0019\u0000\u0000\u0140\u0141"+ + "\u0005&\u0000\u0000\u0141\u0143\u0005\u0019\u0000\u0000\u0142\u0140\u0001"+ + "\u0000\u0000\u0000\u0143\u0146\u0001\u0000\u0000\u0000\u0144\u0142\u0001"+ + "\u0000\u0000\u0000\u0144\u0145\u0001\u0000\u0000\u0000\u0145)\u0001\u0000"+ + "\u0000\u0000\u0146\u0144\u0001\u0000\u0000\u0000\u0147\u0148\u0005E\u0000"+ + "\u0000\u0148\u0149\u0003(\u0014\u0000\u0149\u014a\u0005F\u0000\u0000\u014a"+ + "+\u0001\u0000\u0000\u0000\u014b\u014c\u0005\r\u0000\u0000\u014c\u0151"+ + "\u0003 \u0010\u0000\u014d\u014e\u0005&\u0000\u0000\u014e\u0150\u0003 "+ + "\u0010\u0000\u014f\u014d\u0001\u0000\u0000\u0000\u0150\u0153\u0001\u0000"+ + "\u0000\u0000\u0151\u014f\u0001\u0000\u0000\u0000\u0151\u0152\u0001\u0000"+ + "\u0000\u0000\u0152\u0155\u0001\u0000\u0000\u0000\u0153\u0151\u0001\u0000"+ + "\u0000\u0000\u0154\u0156\u0003\u001a\r\u0000\u0155\u0154\u0001\u0000\u0000"+ + "\u0000\u0155\u0156\u0001\u0000\u0000\u0000\u0156\u0159\u0001\u0000\u0000"+ + "\u0000\u0157\u0158\u0005!\u0000\u0000\u0158\u015a\u0003\u001a\r\u0000"+ + "\u0159\u0157\u0001\u0000\u0000\u0000\u0159\u015a\u0001\u0000\u0000\u0000"+ + "\u015a-\u0001\u0000\u0000\u0000\u015b\u015c\u0005\u0004\u0000\u0000\u015c"+ + "\u015d\u0003\u001a\r\u0000\u015d/\u0001\u0000\u0000\u0000\u015e\u0160"+ + "\u0005\u0013\u0000\u0000\u015f\u0161\u0003\u001a\r\u0000\u0160\u015f\u0001"+ + "\u0000\u0000\u0000\u0160\u0161\u0001\u0000\u0000\u0000\u0161\u0164\u0001"+ + "\u0000\u0000\u0000\u0162\u0163\u0005!\u0000\u0000\u0163\u0165\u0003\u001a"+ + "\r\u0000\u0164\u0162\u0001\u0000\u0000\u0000\u0164\u0165\u0001\u0000\u0000"+ + "\u0000\u01651\u0001\u0000\u0000\u0000\u0166\u0167\u0005\b\u0000\u0000"+ + "\u0167\u016a\u0003\u001a\r\u0000\u0168\u0169\u0005!\u0000\u0000\u0169"+ + "\u016b\u0003\u001a\r\u0000\u016a\u0168\u0001\u0000\u0000\u0000\u016a\u016b"+ + "\u0001\u0000\u0000\u0000\u016b3\u0001\u0000\u0000\u0000\u016c\u0171\u0003"+ + ":\u001d\u0000\u016d\u016e\u0005(\u0000\u0000\u016e\u0170\u0003:\u001d"+ + "\u0000\u016f\u016d\u0001\u0000\u0000\u0000\u0170\u0173\u0001\u0000\u0000"+ + "\u0000\u0171\u016f\u0001\u0000\u0000\u0000\u0171\u0172\u0001\u0000\u0000"+ + "\u0000\u01725\u0001\u0000\u0000\u0000\u0173\u0171\u0001\u0000\u0000\u0000"+ + "\u0174\u0179\u0003<\u001e\u0000\u0175\u0176\u0005(\u0000\u0000\u0176\u0178"+ + "\u0003<\u001e\u0000\u0177\u0175\u0001\u0000\u0000\u0000\u0178\u017b\u0001"+ + "\u0000\u0000\u0000\u0179\u0177\u0001\u0000\u0000\u0000\u0179\u017a\u0001"+ + "\u0000\u0000\u0000\u017a7\u0001\u0000\u0000\u0000\u017b\u0179\u0001\u0000"+ + "\u0000\u0000\u017c\u0181\u00036\u001b\u0000\u017d\u017e\u0005&\u0000\u0000"+ + "\u017e\u0180\u00036\u001b\u0000\u017f\u017d\u0001\u0000\u0000\u0000\u0180"+ + "\u0183\u0001\u0000\u0000\u0000\u0181\u017f\u0001\u0000\u0000\u0000\u0181"+ + "\u0182\u0001\u0000\u0000\u0000\u01829\u0001\u0000\u0000\u0000\u0183\u0181"+ + "\u0001\u0000\u0000\u0000\u0184\u0185\u0007\u0003\u0000\u0000\u0185;\u0001"+ + "\u0000\u0000\u0000\u0186\u0187\u0005P\u0000\u0000\u0187=\u0001\u0000\u0000"+ + "\u0000\u0188\u01b3\u00051\u0000\u0000\u0189\u018a\u0003`0\u0000\u018a"+ + "\u018b\u0005G\u0000\u0000\u018b\u01b3\u0001\u0000\u0000\u0000\u018c\u01b3"+ + "\u0003^/\u0000\u018d\u01b3\u0003`0\u0000\u018e\u01b3\u0003Z-\u0000\u018f"+ + "\u01b3\u0003@ \u0000\u0190\u01b3\u0003b1\u0000\u0191\u0192\u0005E\u0000"+ + "\u0000\u0192\u0197\u0003\\.\u0000\u0193\u0194\u0005&\u0000\u0000\u0194"+ + "\u0196\u0003\\.\u0000\u0195\u0193\u0001\u0000\u0000\u0000\u0196\u0199"+ + "\u0001\u0000\u0000\u0000\u0197\u0195\u0001\u0000\u0000\u0000\u0197\u0198"+ + "\u0001\u0000\u0000\u0000\u0198\u019a\u0001\u0000\u0000\u0000\u0199\u0197"+ + "\u0001\u0000\u0000\u0000\u019a\u019b\u0005F\u0000\u0000\u019b\u01b3\u0001"+ + "\u0000\u0000\u0000\u019c\u019d\u0005E\u0000\u0000\u019d\u01a2\u0003Z-"+ + "\u0000\u019e\u019f\u0005&\u0000\u0000\u019f\u01a1\u0003Z-\u0000\u01a0"+ + "\u019e\u0001\u0000\u0000\u0000\u01a1\u01a4\u0001\u0000\u0000\u0000\u01a2"+ + "\u01a0\u0001\u0000\u0000\u0000\u01a2\u01a3\u0001\u0000\u0000\u0000\u01a3"+ + "\u01a5\u0001\u0000\u0000\u0000\u01a4\u01a2\u0001\u0000\u0000\u0000\u01a5"+ + "\u01a6\u0005F\u0000\u0000\u01a6\u01b3\u0001\u0000\u0000\u0000\u01a7\u01a8"+ + "\u0005E\u0000\u0000\u01a8\u01ad\u0003b1\u0000\u01a9\u01aa\u0005&\u0000"+ + "\u0000\u01aa\u01ac\u0003b1\u0000\u01ab\u01a9\u0001\u0000\u0000\u0000\u01ac"+ + "\u01af\u0001\u0000\u0000\u0000\u01ad\u01ab\u0001\u0000\u0000\u0000\u01ad"+ + "\u01ae\u0001\u0000\u0000\u0000\u01ae\u01b0\u0001\u0000\u0000\u0000\u01af"+ + "\u01ad\u0001\u0000\u0000\u0000\u01b0\u01b1\u0005F\u0000\u0000\u01b1\u01b3"+ + "\u0001\u0000\u0000\u0000\u01b2\u0188\u0001\u0000\u0000\u0000\u01b2\u0189"+ + "\u0001\u0000\u0000\u0000\u01b2\u018c\u0001\u0000\u0000\u0000\u01b2\u018d"+ + "\u0001\u0000\u0000\u0000\u01b2\u018e\u0001\u0000\u0000\u0000\u01b2\u018f"+ + "\u0001\u0000\u0000\u0000\u01b2\u0190\u0001\u0000\u0000\u0000\u01b2\u0191"+ + "\u0001\u0000\u0000\u0000\u01b2\u019c\u0001\u0000\u0000\u0000\u01b2\u01a7"+ + "\u0001\u0000\u0000\u0000\u01b3?\u0001\u0000\u0000\u0000\u01b4\u01b7\u0005"+ + "4\u0000\u0000\u01b5\u01b7\u0005D\u0000\u0000\u01b6\u01b4\u0001\u0000\u0000"+ + "\u0000\u01b6\u01b5\u0001\u0000\u0000\u0000\u01b7A\u0001\u0000\u0000\u0000"+ + "\u01b8\u01b9\u0005\n\u0000\u0000\u01b9\u01ba\u0005\u001f\u0000\u0000\u01ba"+ + "C\u0001\u0000\u0000\u0000\u01bb\u01bc\u0005\u0012\u0000\u0000\u01bc\u01c1"+ + "\u0003F#\u0000\u01bd\u01be\u0005&\u0000\u0000\u01be\u01c0\u0003F#\u0000"+ + "\u01bf\u01bd\u0001\u0000\u0000\u0000\u01c0\u01c3\u0001\u0000\u0000\u0000"+ + "\u01c1\u01bf\u0001\u0000\u0000\u0000\u01c1\u01c2\u0001\u0000\u0000\u0000"+ + "\u01c2E\u0001\u0000\u0000\u0000\u01c3\u01c1\u0001\u0000\u0000\u0000\u01c4"+ + "\u01c6\u0003\n\u0005\u0000\u01c5\u01c7\u0007\u0004\u0000\u0000\u01c6\u01c5"+ + "\u0001\u0000\u0000\u0000\u01c6\u01c7\u0001\u0000\u0000\u0000\u01c7\u01ca"+ + "\u0001\u0000\u0000\u0000\u01c8\u01c9\u00052\u0000\u0000\u01c9\u01cb\u0007"+ + "\u0005\u0000\u0000\u01ca\u01c8\u0001\u0000\u0000\u0000\u01ca\u01cb\u0001"+ + "\u0000\u0000\u0000\u01cbG\u0001\u0000\u0000\u0000\u01cc\u01cd\u0005\t"+ + "\u0000\u0000\u01cd\u01ce\u00038\u001c\u0000\u01ceI\u0001\u0000\u0000\u0000"+ + "\u01cf\u01d0\u0005\u0002\u0000\u0000\u01d0\u01d1\u00038\u001c\u0000\u01d1"+ + "K\u0001\u0000\u0000\u0000\u01d2\u01d3\u0005\u000f\u0000\u0000\u01d3\u01d8"+ + "\u0003N\'\u0000\u01d4\u01d5\u0005&\u0000\u0000\u01d5\u01d7\u0003N\'\u0000"+ + "\u01d6\u01d4\u0001\u0000\u0000\u0000\u01d7\u01da\u0001\u0000\u0000\u0000"+ + "\u01d8\u01d6\u0001\u0000\u0000\u0000\u01d8\u01d9\u0001\u0000\u0000\u0000"+ + "\u01d9M\u0001\u0000\u0000\u0000\u01da\u01d8\u0001\u0000\u0000\u0000\u01db"+ + "\u01dc\u00036\u001b\u0000\u01dc\u01dd\u0005T\u0000\u0000\u01dd\u01de\u0003"+ + "6\u001b\u0000\u01deO\u0001\u0000\u0000\u0000\u01df\u01e0\u0005\u0001\u0000"+ + "\u0000\u01e0\u01e1\u0003\u0012\t\u0000\u01e1\u01e3\u0003b1\u0000\u01e2"+ + "\u01e4\u0003V+\u0000\u01e3\u01e2\u0001\u0000\u0000\u0000\u01e3\u01e4\u0001"+ + "\u0000\u0000\u0000\u01e4Q\u0001\u0000\u0000\u0000\u01e5\u01e6\u0005\u0007"+ + "\u0000\u0000\u01e6\u01e7\u0003\u0012\t\u0000\u01e7\u01e8\u0003b1\u0000"+ + "\u01e8S\u0001\u0000\u0000\u0000\u01e9\u01ea\u0005\u000e\u0000\u0000\u01ea"+ + "\u01eb\u00034\u001a\u0000\u01ebU\u0001\u0000\u0000\u0000\u01ec\u01f1\u0003"+ + "X,\u0000\u01ed\u01ee\u0005&\u0000\u0000\u01ee\u01f0\u0003X,\u0000\u01ef"+ + "\u01ed\u0001\u0000\u0000\u0000\u01f0\u01f3\u0001\u0000\u0000\u0000\u01f1"+ + "\u01ef\u0001\u0000\u0000\u0000\u01f1\u01f2\u0001\u0000\u0000\u0000\u01f2"+ + "W\u0001\u0000\u0000\u0000\u01f3\u01f1\u0001\u0000\u0000\u0000\u01f4\u01f5"+ + "\u0003:\u001d\u0000\u01f5\u01f6\u0005$\u0000\u0000\u01f6\u01f7\u0003>"+ + "\u001f\u0000\u01f7Y\u0001\u0000\u0000\u0000\u01f8\u01f9\u0007\u0006\u0000"+ + "\u0000\u01f9[\u0001\u0000\u0000\u0000\u01fa\u01fd\u0003^/\u0000\u01fb"+ + "\u01fd\u0003`0\u0000\u01fc\u01fa\u0001\u0000\u0000\u0000\u01fc\u01fb\u0001"+ + "\u0000\u0000\u0000\u01fd]\u0001\u0000\u0000\u0000\u01fe\u0200\u0007\u0000"+ + "\u0000\u0000\u01ff\u01fe\u0001\u0000\u0000\u0000\u01ff\u0200\u0001\u0000"+ + "\u0000\u0000\u0200\u0201\u0001\u0000\u0000\u0000\u0201\u0202\u0005 \u0000"+ + "\u0000\u0202_\u0001\u0000\u0000\u0000\u0203\u0205\u0007\u0000\u0000\u0000"+ + "\u0204\u0203\u0001\u0000\u0000\u0000\u0204\u0205\u0001\u0000\u0000\u0000"+ + "\u0205\u0206\u0001\u0000\u0000\u0000\u0206\u0207\u0005\u001f\u0000\u0000"+ + "\u0207a\u0001\u0000\u0000\u0000\u0208\u0209\u0005\u001e\u0000\u0000\u0209"+ + "c\u0001\u0000\u0000\u0000\u020a\u020b\u0007\u0007\u0000\u0000\u020be\u0001"+ + "\u0000\u0000\u0000\u020c\u020d\u0005\u0005\u0000\u0000\u020d\u020e\u0003"+ + "h4\u0000\u020eg\u0001\u0000\u0000\u0000\u020f\u0210\u0005E\u0000\u0000"+ + "\u0210\u0211\u0003\u0002\u0001\u0000\u0211\u0212\u0005F\u0000\u0000\u0212"+ + "i\u0001\u0000\u0000\u0000\u0213\u0214\u0005\u0011\u0000\u0000\u0214\u0215"+ + "\u0005j\u0000\u0000\u0215k\u0001\u0000\u0000\u0000\u0216\u0217\u0005\f"+ + "\u0000\u0000\u0217\u0218\u0005n\u0000\u0000\u0218m\u0001\u0000\u0000\u0000"+ + "\u0219\u021a\u0005\u0003\u0000\u0000\u021a\u021d\u0005Z\u0000\u0000\u021b"+ + "\u021c\u0005X\u0000\u0000\u021c\u021e\u00036\u001b\u0000\u021d\u021b\u0001"+ + "\u0000\u0000\u0000\u021d\u021e\u0001\u0000\u0000\u0000\u021e\u0228\u0001"+ + "\u0000\u0000\u0000\u021f\u0220\u0005Y\u0000\u0000\u0220\u0225\u0003p8"+ + "\u0000\u0221\u0222\u0005&\u0000\u0000\u0222\u0224\u0003p8\u0000\u0223"+ + "\u0221\u0001\u0000\u0000\u0000\u0224\u0227\u0001\u0000\u0000\u0000\u0225"+ + "\u0223\u0001\u0000\u0000\u0000\u0225\u0226\u0001\u0000\u0000\u0000\u0226"+ + "\u0229\u0001\u0000\u0000\u0000\u0227\u0225\u0001\u0000\u0000\u0000\u0228"+ + "\u021f\u0001\u0000\u0000\u0000\u0228\u0229\u0001\u0000\u0000\u0000\u0229"+ + "o\u0001\u0000\u0000\u0000\u022a\u022b\u00036\u001b\u0000\u022b\u022c\u0005"+ + "$\u0000\u0000\u022c\u022e\u0001\u0000\u0000\u0000\u022d\u022a\u0001\u0000"+ + "\u0000\u0000\u022d\u022e\u0001\u0000\u0000\u0000\u022e\u022f\u0001\u0000"+ + "\u0000\u0000\u022f\u0230\u00036\u001b\u0000\u0230q\u0001\u0000\u0000\u0000"+ + "\u0231\u0232\u0005\u000b\u0000\u0000\u0232\u0233\u0003 \u0010\u0000\u0233"+ + "\u0234\u0005X\u0000\u0000\u0234\u0235\u00038\u001c\u0000\u0235s\u0001"+ + "\u0000\u0000\u00006\u007f\u0088\u0098\u00a4\u00ad\u00b5\u00b9\u00c1\u00c3"+ + "\u00c8\u00cf\u00d4\u00db\u00e1\u00e9\u00eb\u00f6\u00fd\u0108\u010b\u0119"+ + "\u0121\u0129\u012d\u0134\u013c\u0144\u0151\u0155\u0159\u0160\u0164\u016a"+ + "\u0171\u0179\u0181\u0197\u01a2\u01ad\u01b2\u01b6\u01c1\u01c6\u01ca\u01d8"+ + "\u01e3\u01f1\u01fc\u01ff\u0204\u021d\u0225\u0228\u022d"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java index 0da4c187a3d43..c2c682e0eea17 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java @@ -365,13 +365,37 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { * *

    The default implementation does nothing.

    */ - @Override public void enterIndexIdentifier(EsqlBaseParser.IndexIdentifierContext ctx) { } + @Override public void enterIndexPattern(EsqlBaseParser.IndexPatternContext ctx) { } /** * {@inheritDoc} * *

    The default implementation does nothing.

    */ - @Override public void exitIndexIdentifier(EsqlBaseParser.IndexIdentifierContext ctx) { } + @Override public void exitIndexPattern(EsqlBaseParser.IndexPatternContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterClusterString(EsqlBaseParser.ClusterStringContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitClusterString(EsqlBaseParser.ClusterStringContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterIndexString(EsqlBaseParser.IndexStringContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitIndexString(EsqlBaseParser.IndexStringContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java index ea1c9aca99880..3b2675d3490a2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java @@ -221,7 +221,21 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im *

    The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

    */ - @Override public T visitIndexIdentifier(EsqlBaseParser.IndexIdentifierContext ctx) { return visitChildren(ctx); } + @Override public T visitIndexPattern(EsqlBaseParser.IndexPatternContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitClusterString(EsqlBaseParser.ClusterStringContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitIndexString(EsqlBaseParser.IndexStringContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java index 081deb03e8354..a6420e6fadebd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java @@ -336,15 +336,35 @@ public interface EsqlBaseParserListener extends ParseTreeListener { */ void exitFromCommand(EsqlBaseParser.FromCommandContext ctx); /** - * Enter a parse tree produced by {@link EsqlBaseParser#indexIdentifier}. + * Enter a parse tree produced by {@link EsqlBaseParser#indexPattern}. * @param ctx the parse tree */ - void enterIndexIdentifier(EsqlBaseParser.IndexIdentifierContext ctx); + void enterIndexPattern(EsqlBaseParser.IndexPatternContext ctx); /** - * Exit a parse tree produced by {@link EsqlBaseParser#indexIdentifier}. + * Exit a parse tree produced by {@link EsqlBaseParser#indexPattern}. * @param ctx the parse tree */ - void exitIndexIdentifier(EsqlBaseParser.IndexIdentifierContext ctx); + void exitIndexPattern(EsqlBaseParser.IndexPatternContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#clusterString}. + * @param ctx the parse tree + */ + void enterClusterString(EsqlBaseParser.ClusterStringContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#clusterString}. + * @param ctx the parse tree + */ + void exitClusterString(EsqlBaseParser.ClusterStringContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#indexString}. + * @param ctx the parse tree + */ + void enterIndexString(EsqlBaseParser.IndexStringContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#indexString}. + * @param ctx the parse tree + */ + void exitIndexString(EsqlBaseParser.IndexStringContext ctx); /** * Enter a parse tree produced by {@link EsqlBaseParser#metadata}. * @param ctx the parse tree diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java index d1ffbd5fa0b32..ec84b7234d67e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java @@ -204,11 +204,23 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { */ T visitFromCommand(EsqlBaseParser.FromCommandContext ctx); /** - * Visit a parse tree produced by {@link EsqlBaseParser#indexIdentifier}. + * Visit a parse tree produced by {@link EsqlBaseParser#indexPattern}. * @param ctx the parse tree * @return the visitor result */ - T visitIndexIdentifier(EsqlBaseParser.IndexIdentifierContext ctx); + T visitIndexPattern(EsqlBaseParser.IndexPatternContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#clusterString}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitClusterString(EsqlBaseParser.ClusterStringContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#indexString}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitIndexString(EsqlBaseParser.IndexStringContext ctx); /** * Visit a parse tree produced by {@link EsqlBaseParser#metadata}. * @param ctx the parse tree diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index 468f79d6824a1..7b0b1b166af30 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -226,7 +226,7 @@ public Literal visitStringLiteral(EsqlBaseParser.StringLiteralContext ctx) { @Override public Literal visitString(EsqlBaseParser.StringContext ctx) { Source source = source(ctx); - return new Literal(source, unquoteString(source), DataType.KEYWORD); + return new Literal(source, unquote(source), DataType.KEYWORD); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java index e626f502f5413..9ccbb00ea4b5b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java @@ -10,11 +10,12 @@ import org.antlr.v4.runtime.tree.TerminalNode; import org.elasticsearch.common.Strings; import org.elasticsearch.xpack.esql.parser.EsqlBaseParser.IdentifierContext; -import org.elasticsearch.xpack.esql.parser.EsqlBaseParser.IndexIdentifierContext; +import org.elasticsearch.xpack.esql.parser.EsqlBaseParser.IndexStringContext; +import java.util.ArrayList; import java.util.List; -import static org.elasticsearch.xpack.esql.core.parser.ParserUtils.visitList; +import static org.elasticsearch.transport.RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR; abstract class IdentifierBuilder extends AbstractBuilder { @@ -23,11 +24,6 @@ public String visitIdentifier(IdentifierContext ctx) { return ctx == null ? null : unquoteIdentifier(ctx.QUOTED_IDENTIFIER(), ctx.UNQUOTED_IDENTIFIER()); } - @Override - public String visitIndexIdentifier(IndexIdentifierContext ctx) { - return ctx == null ? null : unquoteIdentifier(null, ctx.INDEX_UNQUOTED_IDENTIFIER()); - } - protected static String unquoteIdentifier(TerminalNode quotedNode, TerminalNode unquotedNode) { String result; if (quotedNode != null) { @@ -42,7 +38,20 @@ protected static String unquoteIdString(String quotedString) { return quotedString.substring(1, quotedString.length() - 1).replace("``", "`"); } - public String visitIndexIdentifiers(List ctx) { - return Strings.collectionToDelimitedString(visitList(this, ctx, String.class), ","); + @Override + public String visitIndexString(IndexStringContext ctx) { + TerminalNode n = ctx.UNQUOTED_SOURCE(); + return n != null ? n.getText() : unquote(ctx.QUOTED_STRING().getText()); + } + + public String visitIndexPattern(List ctx) { + List patterns = new ArrayList<>(ctx.size()); + ctx.forEach(c -> { + String indexPattern = visitIndexString(c.indexString()); + patterns.add( + c.clusterString() != null ? c.clusterString().getText() + REMOTE_CLUSTER_INDEX_SEPARATOR + indexPattern : indexPattern + ); + }); + return Strings.collectionToDelimitedString(patterns, ","); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index 353e7738fccc3..266a89b9bbf81 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -224,7 +224,7 @@ public LogicalPlan visitRowCommand(EsqlBaseParser.RowCommandContext ctx) { @Override public LogicalPlan visitFromCommand(EsqlBaseParser.FromCommandContext ctx) { Source source = source(ctx); - TableIdentifier table = new TableIdentifier(source, null, visitIndexIdentifiers(ctx.indexIdentifier())); + TableIdentifier table = new TableIdentifier(source, null, visitIndexPattern(ctx.indexPattern())); Map metadataMap = new LinkedHashMap<>(); if (ctx.metadata() != null) { var deprecatedContext = ctx.metadata().deprecated_metadata(); @@ -241,8 +241,8 @@ public LogicalPlan visitFromCommand(EsqlBaseParser.FromCommandContext ctx) { metadataOptionContext = ctx.metadata().metadataOption(); } - for (var c : metadataOptionContext.indexIdentifier()) { - String id = visitIndexIdentifier(c); + for (var c : metadataOptionContext.UNQUOTED_SOURCE()) { + String id = c.getText(); Source src = source(c); if (MetadataAttribute.isSupported(id) == false) { throw new ParsingException(src, "unsupported metadata field [" + id + "]"); @@ -438,7 +438,7 @@ public LogicalPlan visitMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx) throw new IllegalArgumentException("METRICS command currently requires a snapshot build"); } Source source = source(ctx); - TableIdentifier table = new TableIdentifier(source, null, visitIndexIdentifiers(ctx.indexIdentifier())); + TableIdentifier table = new TableIdentifier(source, null, visitIndexPattern(ctx.indexPattern())); if (ctx.aggregates == null && ctx.grouping == null) { return new EsqlUnresolvedRelation(source, table, List.of(), IndexMode.STANDARD); @@ -473,7 +473,7 @@ public PlanFactory visitLookupCommand(EsqlBaseParser.LookupCommandContext ctx) { } }); - Literal tableName = new Literal(source, ctx.tableName.getText(), DataType.KEYWORD); + Literal tableName = new Literal(source, visitIndexPattern(List.of(ctx.indexPattern())), DataType.KEYWORD); return p -> new Lookup(source, p, tableName, matchFields, null /* localRelation will be resolved later*/); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 1268ffb64a848..bd4ae4ee53c10 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -71,8 +71,6 @@ //@TestLogging(value = "org.elasticsearch.xpack.esql:TRACE", reason = "debug") public class StatementParserTests extends AbstractStatementParserTests { - private static String FROM = "from test"; - public void testRowCommand() { assertEquals( new Row(EMPTY, List.of(new Alias(EMPTY, "a", integer(1)), new Alias(EMPTY, "b", integer(2)))), @@ -335,21 +333,130 @@ public void testInlineStatsWithoutGroups() { ); } - public void testIdentifiersAsIndexPattern() { - // assertIdentifierAsIndexPattern("foo", "from `foo`"); - // assertIdentifierAsIndexPattern("foo,test-*", "from `foo`,`test-*`"); - assertIdentifierAsIndexPattern("foo,test-*", "from foo,test-*"); - assertIdentifierAsIndexPattern("123-test@foo_bar+baz1", "from 123-test@foo_bar+baz1"); - // assertIdentifierAsIndexPattern("foo,test-*,abc", "from `foo`,`test-*`,abc"); - // assertIdentifierAsIndexPattern("foo, test-*, abc, xyz", "from `foo, test-*, abc, xyz`"); - // assertIdentifierAsIndexPattern("foo, test-*, abc, xyz,test123", "from `foo, test-*, abc, xyz`, test123"); - assertIdentifierAsIndexPattern("foo,test,xyz", "from foo, test,xyz"); - assertIdentifierAsIndexPattern( - "", // , - "from " // , `` + public void testStringAsIndexPattern() { + for (String command : List.of("FROM", "METRICS")) { + assertStringAsIndexPattern("foo", command + " \"foo\""); + assertStringAsIndexPattern("foo,test-*", command + """ + "foo","test-*" + """); + assertStringAsIndexPattern("foo,test-*", command + " foo,test-*"); + assertStringAsIndexPattern("123-test@foo_bar+baz1", command + " 123-test@foo_bar+baz1"); + assertStringAsIndexPattern("foo,test-*,abc", command + """ + "foo","test-*",abc + """); + assertStringAsIndexPattern("foo, test-*, abc, xyz", command + """ + "foo, test-*, abc, xyz" + """); + assertStringAsIndexPattern("foo, test-*, abc, xyz,test123", command + """ + "foo, test-*, abc, xyz", test123 + """); + assertStringAsIndexPattern("foo,test,xyz", command + " foo, test,xyz"); + assertStringAsIndexPattern( + ",", + command + " , \"\"" + ); + + assertStringAsIndexPattern("foo,test,xyz", command + " \"\"\"foo\"\"\", test,\"xyz\""); + + assertStringAsIndexPattern("`backtick`,``multiple`back``ticks```", command + " `backtick`, ``multiple`back``ticks```"); + + assertStringAsIndexPattern("test,metadata,metaata,.metadata", command + " test,\"metadata\", metaata, .metadata"); + + assertStringAsIndexPattern(".dot", command + " .dot"); + + assertStringAsIndexPattern("cluster:index", command + " cluster:index"); + assertStringAsIndexPattern("cluster:index|pattern", command + " cluster:\"index|pattern\""); + assertStringAsIndexPattern("cluster:.index", command + " cluster:.index"); + assertStringAsIndexPattern("cluster*:index*", command + " cluster*:index*"); + assertStringAsIndexPattern("cluster*:*", command + " cluster*:*"); + assertStringAsIndexPattern("*:index*", command + " *:index*"); + assertStringAsIndexPattern("*:index|pattern", command + " *:\"index|pattern\""); + assertStringAsIndexPattern("*:*", command + " *:*"); + assertStringAsIndexPattern("*:*,cluster*:index|pattern,i|p", command + " *:*, cluster*:\"index|pattern\", \"i|p\""); + } + } + + public void testStringAsLookupIndexPattern() { + assertStringAsLookupIndexPattern("foo", "ROW x = 1 | LOOKUP \"foo\" ON j"); + assertStringAsLookupIndexPattern("test-*", """ + ROW x = 1 | LOOKUP "test-*" ON j + """); + assertStringAsLookupIndexPattern("test-*", "ROW x = 1 | LOOKUP test-* ON j"); + assertStringAsLookupIndexPattern("123-test@foo_bar+baz1", "ROW x = 1 | LOOKUP 123-test@foo_bar+baz1 ON j"); + assertStringAsLookupIndexPattern("foo, test-*, abc, xyz", """ + ROW x = 1 | LOOKUP "foo, test-*, abc, xyz" ON j + """); + assertStringAsLookupIndexPattern("", "ROW x = 1 | LOOKUP ON j"); + assertStringAsLookupIndexPattern( + "", + "ROW x = 1 | LOOKUP \"\" ON j" + ); + + assertStringAsLookupIndexPattern("foo", "ROW x = 1 | LOOKUP \"\"\"foo\"\"\" ON j"); + + assertStringAsLookupIndexPattern("`backtick`", "ROW x = 1 | LOOKUP `backtick` ON j"); + assertStringAsLookupIndexPattern("``multiple`back``ticks```", "ROW x = 1 | LOOKUP ``multiple`back``ticks``` ON j"); + + assertStringAsLookupIndexPattern(".dot", "ROW x = 1 | LOOKUP .dot ON j"); + + assertStringAsLookupIndexPattern("cluster:index", "ROW x = 1 | LOOKUP cluster:index ON j"); + assertStringAsLookupIndexPattern("cluster:.index", "ROW x = 1 | LOOKUP cluster:.index ON j"); + assertStringAsLookupIndexPattern("cluster*:index*", "ROW x = 1 | LOOKUP cluster*:index* ON j"); + assertStringAsLookupIndexPattern("cluster*:*", "ROW x = 1 | LOOKUP cluster*:* ON j"); + assertStringAsLookupIndexPattern("*:index*", "ROW x = 1 | LOOKUP *:index* ON j"); + assertStringAsLookupIndexPattern("*:*", "ROW x = 1 | LOOKUP *:* ON j"); + + } + + public void testInvalidQuotingAsFromIndexPattern() { + expectError("FROM \"foo", ": token recognition error at: '\"foo'"); + expectError("FROM \"foo | LIMIT 1", ": token recognition error at: '\"foo | LIMIT 1'"); + expectError("FROM \"\"\"foo", ": token recognition error at: '\"foo'"); + + expectError("FROM foo\"", ": token recognition error at: '\"'"); + expectError("FROM foo\" | LIMIT 2", ": token recognition error at: '\" | LIMIT 2'"); + expectError("FROM foo\"\"\"", ": token recognition error at: '\"'"); + + expectError("FROM \"foo\"bar\"", ": token recognition error at: '\"'"); + expectError("FROM \"foo\"\"bar\"", ": extraneous input '\"bar\"' expecting "); + + expectError("FROM \"\"\"foo\"\"\"bar\"\"\"", ": mismatched input 'bar' expecting {, '|', ',', OPENING_BRACKET, 'metadata'}"); + expectError( + "FROM \"\"\"foo\"\"\"\"\"\"bar\"\"\"", + ": mismatched input '\"bar\"' expecting {, '|', ',', OPENING_BRACKET, 'metadata'}" ); } + public void testInvalidQuotingAsMetricsIndexPattern() { + expectError("METRICS \"foo", ": token recognition error at: '\"foo'"); + expectError("METRICS \"foo | LIMIT 1", ": token recognition error at: '\"foo | LIMIT 1'"); + expectError("METRICS \"\"\"foo", ": token recognition error at: '\"'"); + + expectError("METRICS foo\"", ": token recognition error at: '\"'"); + expectError("METRICS foo\" | LIMIT 2", ": token recognition error at: '\"'"); + expectError("METRICS foo\"\"\"", ": token recognition error at: '\"'"); + + expectError("METRICS \"foo\"bar\"", ": token recognition error at: '\"'"); + expectError("METRICS \"foo\"\"bar\"", ": token recognition error at: '\"'"); + + expectError("METRICS \"\"\"foo\"\"\"bar\"\"\"", ": token recognition error at: '\"'"); + expectError("METRICS \"\"\"foo\"\"\"\"\"\"bar\"\"\"", ": token recognition error at: '\"'"); + } + + public void testInvalidQuotingAsLookupIndexPattern() { + expectError("ROW x = 1 | LOOKUP \"foo ON j", ": token recognition error at: '\"foo ON j'"); + expectError("ROW x = 1 | LOOKUP \"\"\"foo ON j", ": token recognition error at: '\"foo ON j'"); + + expectError("ROW x = 1 | LOOKUP foo\" ON j", ": token recognition error at: '\" ON j'"); + expectError("ROW x = 1 | LOOKUP foo\"\"\" ON j", ": token recognition error at: '\" ON j'"); + + expectError("ROW x = 1 | LOOKUP \"foo\"bar\" ON j", ": token recognition error at: '\" ON j'"); + expectError("ROW x = 1 | LOOKUP \"foo\"\"bar\" ON j", ": extraneous input '\"bar\"' expecting 'on'"); + + expectError("ROW x = 1 | LOOKUP \"\"\"foo\"\"\"bar\"\"\" ON j", ": mismatched input 'bar' expecting 'on'"); + expectError("ROW x = 1 | LOOKUP \"\"\"foo\"\"\"\"\"\"bar\"\"\" ON j", "line 1:31: mismatched input '\"bar\"' expecting 'on'"); + } + public void testIdentifierAsFieldName() { String[] operators = new String[] { "==", "!=", ">", "<", ">=", "<=" }; Class[] expectedOperators = new Class[] { @@ -1166,11 +1273,18 @@ public void testQuotedName() { assertThat(Expressions.names(project.projections()), contains("count(`my-field`)")); } - private void assertIdentifierAsIndexPattern(String identifier, String statement) { + private void assertStringAsIndexPattern(String string, String statement) { LogicalPlan from = statement(statement); assertThat(from, instanceOf(EsqlUnresolvedRelation.class)); EsqlUnresolvedRelation table = (EsqlUnresolvedRelation) from; - assertThat(table.table().index(), is(identifier)); + assertThat(table.table().index(), is(string)); + } + + private void assertStringAsLookupIndexPattern(String string, String statement) { + var plan = statement(statement); + var lookup = as(plan, Lookup.class); + var tableName = as(lookup.tableName(), Literal.class); + assertThat(tableName.fold(), equalTo(string)); } public void testIdPatternUnquoted() throws Exception { From 21fb5afe67f7a9f5159676d855de7f137f87cb58 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 1 Jul 2024 06:54:04 +0100 Subject: [PATCH 065/216] Improve refcounting in `testClientCancellation` (#110309) With the changes in #109519 we now do one more async step while serving the response, so we need to acquire another ref to track the new step. Relates #109866 Relates #110118 Relates #110175 Relates #110249 --- .../netty4/Netty4ChunkedContinuationsIT.java | 24 +++++++------------ 1 file changed, 8 insertions(+), 16 deletions(-) diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4ChunkedContinuationsIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4ChunkedContinuationsIT.java index 4b6c820638b40..c4c35b410af78 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4ChunkedContinuationsIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4ChunkedContinuationsIT.java @@ -72,10 +72,8 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.test.MockLog; -import org.elasticsearch.test.ReachabilityChecker; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.LeakTracker; import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.netty4.Netty4Utils; import org.elasticsearch.xcontent.ToXContentObject; @@ -317,20 +315,14 @@ public void onFailure(Exception exception) { private static Releasable withResourceTracker() { assertNull(refs); - final ReachabilityChecker reachabilityChecker = new ReachabilityChecker(); final var latch = new CountDownLatch(1); - refs = LeakTracker.wrap(reachabilityChecker.register(AbstractRefCounted.of(latch::countDown))); + refs = AbstractRefCounted.of(latch::countDown); return () -> { refs.decRef(); - boolean success = false; try { safeAwait(latch); - success = true; } finally { refs = null; - if (success == false) { - reachabilityChecker.ensureUnreachable(); - } } }; } @@ -643,14 +635,11 @@ public void close() { @Override public void accept(RestChannel channel) { - client.execute(TYPE, new Request(), new RestActionListener<>(channel) { + localRefs.mustIncRef(); + client.execute(TYPE, new Request(), ActionListener.releaseAfter(new RestActionListener<>(channel) { @Override protected void processResponse(Response response) { - // incRef can fail if the request was already cancelled - if (localRefs.tryIncRef() == false) { - assert localRefs.hasReferences() == false : "tryIncRef failed but RefCounted not completed"; - return; - } + localRefs.mustIncRef(); channel.sendResponse(RestResponse.chunked(RestStatus.OK, response.getResponseBodyPart(), () -> { // cancellation notification only happens while processing a continuation, not while computing // the next one; prompt cancellation requires use of something like RestCancellableNodeClient @@ -659,7 +648,10 @@ protected void processResponse(Response response) { localRefs.decRef(); })); } - }); + }, () -> { + assertSame(localRefs, refs); + localRefs.decRef(); + })); } }; } else { From e3e44af4cfed5ab07edecaac1f649a940ee095fe Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 1 Jul 2024 08:27:14 +0200 Subject: [PATCH 066/216] Remove InternalAggregations#asMap and InternalAggregations#getAsMap methods (#110250) This methods are not needed as we have InternalAggregations#get to find aggregations by name. --- .../ChildrenToParentAggregatorTests.java | 2 +- .../SignificantTermsSignificanceScoreIT.java | 13 ++++++------- .../aggregations/metrics/ExtendedStatsIT.java | 2 +- .../metrics/HDRPercentileRanksIT.java | 2 +- .../aggregations/metrics/HDRPercentilesIT.java | 2 +- .../metrics/MedianAbsoluteDeviationIT.java | 2 +- .../aggregations/metrics/ScriptedMetricIT.java | 2 +- .../search/aggregations/metrics/StatsIT.java | 2 +- .../search/aggregations/metrics/SumIT.java | 2 +- .../metrics/TDigestPercentileRanksIT.java | 2 +- .../metrics/TDigestPercentilesIT.java | 2 +- .../search/aggregations/metrics/TopHitsIT.java | 2 +- .../aggregations/metrics/ValueCountIT.java | 2 +- .../search/functionscore/FunctionScoreIT.java | 7 ++----- .../aggregations/InternalAggregations.java | 12 +----------- .../bucket/nested/NestedAggregatorTests.java | 4 ++-- .../metrics/CardinalityAggregatorTests.java | 2 +- .../aggregations/metrics/MaxAggregatorTests.java | 4 ++-- .../aggregations/metrics/MinAggregatorTests.java | 16 ++++++++-------- .../SharedSignificantTermsTestMethods.java | 9 ++++----- .../metrics/CentroidAggregationTestBase.java | 2 +- .../SpatialBoundsAggregationTestBase.java | 2 +- .../boxplot/BoxplotAggregatorTests.java | 16 ++++++++-------- .../analytics/ttest/TTestAggregatorTests.java | 16 ++++++++-------- .../DownsampleActionSingleNodeTests.java | 15 +++++++++------ .../InternalCategorizationAggregation.java | 2 +- .../ml/job/persistence/JobResultsProvider.java | 16 ++++------------ .../xpack/rollup/RollupResponseTranslator.java | 14 +++++++------- 28 files changed, 77 insertions(+), 97 deletions(-) diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java index c82ba3f5f26b5..5a5ccb640f03d 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java @@ -68,7 +68,7 @@ public void testNoDocs() throws IOException { assertEquals(0, childrenToParent.getDocCount()); Aggregation parentAggregation = childrenToParent.getAggregations().get("in_parent"); assertEquals(0, childrenToParent.getDocCount()); - assertNotNull("Aggregations: " + childrenToParent.getAggregations().asMap(), parentAggregation); + assertNotNull("Aggregations: " + childrenToParent.getAggregations().asList(), parentAggregation); assertEquals(Double.POSITIVE_INFINITY, ((Min) parentAggregation).value(), Double.MIN_VALUE); assertFalse(JoinAggregationInspectionHelper.hasValue(childrenToParent)); }); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index 21a607f113f14..35a117ac8922b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -20,7 +20,6 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter; import org.elasticsearch.search.aggregations.bucket.terms.SignificantTerms; @@ -136,8 +135,8 @@ public void testXContentResponse() throws Exception { StringTerms classes = response.getAggregations().get("class"); assertThat(classes.getBuckets().size(), equalTo(2)); for (Terms.Bucket classBucket : classes.getBuckets()) { - Map aggs = classBucket.getAggregations().asMap(); - assertTrue(aggs.containsKey("sig_terms")); + InternalAggregations aggs = classBucket.getAggregations(); + assertNotNull(aggs.get("sig_terms")); SignificantTerms agg = (SignificantTerms) aggs.get("sig_terms"); assertThat(agg.getBuckets().size(), equalTo(1)); String term = agg.iterator().next().getKeyAsString(); @@ -323,21 +322,21 @@ public void testBackgroundVsSeparateSet( assertNoFailuresAndResponse(request1, response1 -> assertNoFailuresAndResponse(request2, response2 -> { StringTerms classes = response1.getAggregations().get("class"); - SignificantTerms sigTerms0 = ((SignificantTerms) (classes.getBucketByKey("0").getAggregations().asMap().get("sig_terms"))); + SignificantTerms sigTerms0 = classes.getBucketByKey("0").getAggregations().get("sig_terms"); assertThat(sigTerms0.getBuckets().size(), equalTo(2)); double score00Background = sigTerms0.getBucketByKey("0").getSignificanceScore(); double score01Background = sigTerms0.getBucketByKey("1").getSignificanceScore(); - SignificantTerms sigTerms1 = ((SignificantTerms) (classes.getBucketByKey("1").getAggregations().asMap().get("sig_terms"))); + SignificantTerms sigTerms1 = classes.getBucketByKey("1").getAggregations().get("sig_terms"); double score10Background = sigTerms1.getBucketByKey("0").getSignificanceScore(); double score11Background = sigTerms1.getBucketByKey("1").getSignificanceScore(); InternalAggregations aggs = response2.getAggregations(); - sigTerms0 = (SignificantTerms) ((InternalFilter) aggs.get("0")).getAggregations().getAsMap().get("sig_terms"); + sigTerms0 = ((InternalFilter) aggs.get("0")).getAggregations().get("sig_terms"); double score00SeparateSets = sigTerms0.getBucketByKey("0").getSignificanceScore(); double score01SeparateSets = sigTerms0.getBucketByKey("1").getSignificanceScore(); - sigTerms1 = (SignificantTerms) ((InternalFilter) aggs.get("1")).getAggregations().getAsMap().get("sig_terms"); + sigTerms1 = ((InternalFilter) aggs.get("1")).getAggregations().get("sig_terms"); double score10SeparateSets = sigTerms1.getBucketByKey("0").getSignificanceScore(); double score11SeparateSets = sigTerms1.getBucketByKey("1").getSignificanceScore(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java index e15ad15bb4e3a..750868defde97 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java @@ -290,7 +290,7 @@ public void testSingleValuedFieldGetProperty() throws Exception { assertThat(global.getName(), equalTo("global")); assertThat(global.getDocCount(), equalTo(10L)); assertThat(global.getAggregations(), notNullValue()); - assertThat(global.getAggregations().asMap().size(), equalTo(1)); + assertThat(global.getAggregations().asList().size(), equalTo(1)); ExtendedStats stats = global.getAggregations().get("stats"); assertThat(stats, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java index 3f5d8e441dc44..13d66a5cf3949 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java @@ -222,7 +222,7 @@ public void testSingleValuedFieldGetProperty() throws Exception { assertThat(global.getName(), equalTo("global")); assertThat(global.getDocCount(), equalTo(10L)); assertThat(global.getAggregations(), notNullValue()); - assertThat(global.getAggregations().asMap().size(), equalTo(1)); + assertThat(global.getAggregations().asList().size(), equalTo(1)); PercentileRanks values = global.getAggregations().get("percentile_ranks"); assertThat(values, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java index 0dbc811a7debc..cd69fb8241ef2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java @@ -199,7 +199,7 @@ public void testSingleValuedFieldGetProperty() throws Exception { assertThat(global.getName(), equalTo("global")); assertThat(global.getDocCount(), equalTo(10L)); assertThat(global.getAggregations(), notNullValue()); - assertThat(global.getAggregations().asMap().size(), equalTo(1)); + assertThat(global.getAggregations().asList().size(), equalTo(1)); Percentiles percentiles = global.getAggregations().get("percentiles"); assertThat(percentiles, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java index 06f43416eb03a..6c80931914ac6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java @@ -185,7 +185,7 @@ public void testSingleValuedFieldGetProperty() throws Exception { assertThat(global.getName(), is("global")); assertThat(global.getDocCount(), is((long) NUMBER_OF_DOCS)); assertThat(global.getAggregations(), notNullValue()); - assertThat(global.getAggregations().asMap().entrySet(), hasSize(1)); + assertThat(global.getAggregations().asList().size(), equalTo(1)); final MedianAbsoluteDeviation mad = global.getAggregations().get("mad"); assertThat(mad, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java index 02c45c4aade1b..96f6002f0d490 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java @@ -722,7 +722,7 @@ public void testInitMapCombineReduceGetProperty() throws Exception { assertThat(global.getName(), equalTo("global")); assertThat(global.getDocCount(), equalTo(numDocs)); assertThat(global.getAggregations(), notNullValue()); - assertThat(global.getAggregations().asMap().size(), equalTo(1)); + assertThat(global.getAggregations().asList().size(), equalTo(1)); ScriptedMetric scriptedMetricAggregation = global.getAggregations().get("scripted"); assertThat(scriptedMetricAggregation, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java index f97d886ae8df6..84e0bee396c9d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java @@ -127,7 +127,7 @@ public void testSingleValuedFieldGetProperty() throws Exception { assertThat(global.getName(), equalTo("global")); assertThat(global.getDocCount(), equalTo(10L)); assertThat(global.getAggregations(), notNullValue()); - assertThat(global.getAggregations().asMap().size(), equalTo(1)); + assertThat(global.getAggregations().asList().size(), equalTo(1)); Stats stats = global.getAggregations().get("stats"); assertThat(stats, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java index 37524dabe7f09..d50c101dbd5d1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java @@ -140,7 +140,7 @@ public void testSingleValuedFieldGetProperty() throws Exception { assertThat(global.getName(), equalTo("global")); assertThat(global.getDocCount(), equalTo(10L)); assertThat(global.getAggregations(), notNullValue()); - assertThat(global.getAggregations().asMap().size(), equalTo(1)); + assertThat(global.getAggregations().asList().size(), equalTo(1)); Sum sum = global.getAggregations().get("sum"); assertThat(sum, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java index 47c443a58eeda..9c737cb734f16 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java @@ -190,7 +190,7 @@ public void testSingleValuedFieldGetProperty() throws Exception { assertThat(global.getName(), equalTo("global")); assertThat(global.getDocCount(), equalTo(10L)); assertThat(global.getAggregations(), notNullValue()); - assertThat(global.getAggregations().asMap().size(), equalTo(1)); + assertThat(global.getAggregations().asList().size(), equalTo(1)); PercentileRanks values = global.getAggregations().get("percentile_ranks"); assertThat(values, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java index 98086451c3456..1c101324cd5fc 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java @@ -176,7 +176,7 @@ public void testSingleValuedFieldGetProperty() throws Exception { assertThat(global.getName(), equalTo("global")); assertThat(global.getDocCount(), equalTo(10L)); assertThat(global.getAggregations(), notNullValue()); - assertThat(global.getAggregations().asMap().size(), equalTo(1)); + assertThat(global.getAggregations().asList().size(), equalTo(1)); Percentiles percentiles = global.getAggregations().get("percentiles"); assertThat(percentiles, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java index 991fe98612e3d..fc753b0844c46 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java @@ -461,7 +461,7 @@ public void testBasicsGetProperty() throws Exception { assertThat(global, notNullValue()); assertThat(global.getName(), equalTo("global")); assertThat(global.getAggregations(), notNullValue()); - assertThat(global.getAggregations().asMap().size(), equalTo(1)); + assertThat(global.getAggregations().asList().size(), equalTo(1)); TopHits topHits = global.getAggregations().get("hits"); assertThat(topHits, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java index 7c5ab6600e365..c3feff6f3eaaa 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java @@ -99,7 +99,7 @@ public void testSingleValuedFieldGetProperty() throws Exception { assertThat(global.getName(), equalTo("global")); assertThat(global.getDocCount(), equalTo(10L)); assertThat(global.getAggregations(), notNullValue()); - assertThat(global.getAggregations().asMap().size(), equalTo(1)); + assertThat(global.getAggregations().asList().size(), equalTo(1)); ValueCount valueCount = global.getAggregations().get("count"); assertThat(valueCount, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java index bcecc49c2d463..422d6f06f2988 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java @@ -116,11 +116,8 @@ public void testScriptScoresWithAgg() throws Exception { ), response -> { assertThat(response.getHits().getAt(0).getScore(), equalTo(1.0f)); - assertThat( - ((Terms) response.getAggregations().asMap().get("score_agg")).getBuckets().get(0).getKeyAsString(), - equalTo("1.0") - ); - assertThat(((Terms) response.getAggregations().asMap().get("score_agg")).getBuckets().get(0).getDocCount(), is(1L)); + assertThat(((Terms) response.getAggregations().get("score_agg")).getBuckets().get(0).getKeyAsString(), equalTo("1.0")); + assertThat(((Terms) response.getAggregations().get("score_agg")).getBuckets().get(0).getDocCount(), is(1L)); } ); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java index 07e72404eefe9..4f234c33b13a6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java @@ -73,17 +73,7 @@ public List asList() { return aggregations; } - /** - * Returns the {@link InternalAggregation}s keyed by aggregation name. - */ - public Map asMap() { - return getAsMap(); - } - - /** - * Returns the {@link InternalAggregation}s keyed by aggregation name. - */ - public Map getAsMap() { + private Map asMap() { if (aggregationsAsMap == null) { Map newAggregationsAsMap = Maps.newMapWithExpectedSize(aggregations.size()); for (InternalAggregation aggregation : aggregations) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index 4ec2e5ab49cd3..91078c9babe3d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -766,8 +766,8 @@ public void testNestedWithPipeline() throws IOException { assertNotNull(terms); for (LongTerms.Bucket bucket : terms.getBuckets()) { - Max max = (Max) bucket.getAggregations().asMap().get(MAX_AGG_NAME); - InternalSimpleValue bucketScript = (InternalSimpleValue) bucket.getAggregations().asMap().get("bucketscript"); + Max max = (Max) bucket.getAggregations().get(MAX_AGG_NAME); + InternalSimpleValue bucketScript = (InternalSimpleValue) bucket.getAggregations().get("bucketscript"); assertNotNull(max); assertNotNull(bucketScript); assertEquals(max.value(), -bucketScript.getValue(), Double.MIN_VALUE); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java index 6efcb6c2b99e2..aea87b3394525 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java @@ -768,7 +768,7 @@ public void testSingleValuedFieldGlobalAggregation() throws IOException { assertEquals("global", global.getName()); assertEquals(numDocs * 2, global.getDocCount()); assertNotNull(global.getAggregations()); - assertEquals(1, global.getAggregations().asMap().size()); + assertEquals(1, global.getAggregations().asList().size()); final Cardinality cardinality = global.getAggregations().get("cardinality"); assertNotNull(cardinality); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java index 50e653d7e5216..5cf7b2f82b940 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java @@ -440,7 +440,7 @@ public void testSingleValuedFieldGetProperty() throws IOException { assertEquals("global", global.getName()); assertEquals(10L, global.getDocCount()); assertNotNull(global.getAggregations()); - assertEquals(1, global.getAggregations().asMap().size()); + assertEquals(1, global.getAggregations().asList().size()); Max max = global.getAggregations().get("max"); assertNotNull(max); @@ -651,7 +651,7 @@ public void testEmptyAggregation() throws Exception { assertEquals("global", global.getName()); assertEquals(0L, global.getDocCount()); assertNotNull(global.getAggregations()); - assertEquals(1, global.getAggregations().asMap().size()); + assertEquals(1, global.getAggregations().asList().size()); Max max = global.getAggregations().get("max"); assertNotNull(max); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java index def58da97c7ca..79ccdceb00613 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java @@ -297,18 +297,18 @@ public void testEmptyBucket() throws IOException { }, (Consumer) histo -> { assertThat(histo.getBuckets().size(), equalTo(3)); - assertNotNull(histo.getBuckets().get(0).getAggregations().asMap().get("min")); - Min min = (Min) histo.getBuckets().get(0).getAggregations().asMap().get("min"); + assertNotNull(histo.getBuckets().get(0).getAggregations().get("min")); + Min min = (Min) histo.getBuckets().get(0).getAggregations().get("min"); assertEquals(1.0, min.value(), 0); assertTrue(AggregationInspectionHelper.hasValue(min)); - assertNotNull(histo.getBuckets().get(1).getAggregations().asMap().get("min")); - min = (Min) histo.getBuckets().get(1).getAggregations().asMap().get("min"); + assertNotNull(histo.getBuckets().get(1).getAggregations().get("min")); + min = (Min) histo.getBuckets().get(1).getAggregations().get("min"); assertEquals(Double.POSITIVE_INFINITY, min.value(), 0); assertFalse(AggregationInspectionHelper.hasValue(min)); - assertNotNull(histo.getBuckets().get(2).getAggregations().asMap().get("min")); - min = (Min) histo.getBuckets().get(2).getAggregations().asMap().get("min"); + assertNotNull(histo.getBuckets().get(2).getAggregations().get("min")); + min = (Min) histo.getBuckets().get(2).getAggregations().get("min"); assertEquals(3.0, min.value(), 0); assertTrue(AggregationInspectionHelper.hasValue(min)); @@ -343,9 +343,9 @@ public void testGetProperty() throws IOException { }, (Consumer) global -> { assertEquals(2, global.getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(global)); - assertNotNull(global.getAggregations().asMap().get("min")); + assertNotNull(global.getAggregations().get("min")); - Min min = (Min) global.getAggregations().asMap().get("min"); + Min min = (Min) global.getAggregations().get("min"); assertEquals(1.0, min.value(), 0); assertThat(global.getProperty("min"), equalTo(min)); assertThat(global.getProperty("min.value"), equalTo(1.0)); diff --git a/server/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java b/server/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java index 93dd7bc618756..49314c5c2deab 100644 --- a/server/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java +++ b/server/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java @@ -9,7 +9,7 @@ package org.elasticsearch.test.search.aggregations.bucket; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.terms.SignificantTerms; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.Terms; @@ -20,7 +20,6 @@ import java.util.ArrayList; import java.util.List; -import java.util.Map; import java.util.concurrent.ExecutionException; import static org.elasticsearch.search.aggregations.AggregationBuilders.significantTerms; @@ -55,9 +54,9 @@ private static void checkSignificantTermsAggregationCorrect(ESIntegTestCase test StringTerms classes = response.getAggregations().get("class"); Assert.assertThat(classes.getBuckets().size(), equalTo(2)); for (Terms.Bucket classBucket : classes.getBuckets()) { - Map aggs = classBucket.getAggregations().asMap(); - Assert.assertTrue(aggs.containsKey("sig_terms")); - SignificantTerms agg = (SignificantTerms) aggs.get("sig_terms"); + InternalAggregations aggs = classBucket.getAggregations(); + Assert.assertNotNull(aggs.get("sig_terms")); + SignificantTerms agg = aggs.get("sig_terms"); Assert.assertThat(agg.getBuckets().size(), equalTo(1)); SignificantTerms.Bucket sigBucket = agg.iterator().next(); String term = sigBucket.getKeyAsString(); diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java index 664590d65c818..23b17e8f3f163 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java @@ -102,7 +102,7 @@ public void testSingleValueFieldGetProperty() { assertThat(global.getName(), equalTo("global")); assertThat(global.getDocCount(), equalTo((long) numDocs)); assertThat(global.getAggregations(), notNullValue()); - assertThat(global.getAggregations().asMap().size(), equalTo(1)); + assertThat(global.getAggregations().asList().size(), equalTo(1)); CentroidAggregation geoCentroid = global.getAggregations().get(aggName()); InternalAggregation agg = (InternalAggregation) global; diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java index 81c9c37ad4f9a..9dae49f658211 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java @@ -67,7 +67,7 @@ public void testSingleValuedField_getProperty() { assertThat(global.getName(), equalTo("global")); assertThat(global.getDocCount(), equalTo((long) numDocs)); assertThat(global.getAggregations(), notNullValue()); - assertThat(global.getAggregations().asMap().size(), equalTo(1)); + assertThat(global.getAggregations().asList().size(), equalTo(1)); SpatialBounds geobounds = global.getAggregations().get(aggName()); assertThat(geobounds, notNullValue()); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregatorTests.java index 7ab1f555a6529..297dbf5233922 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregatorTests.java @@ -270,24 +270,24 @@ public void testEmptyBucket() throws IOException { }, (Consumer) histo -> { assertThat(histo.getBuckets().size(), equalTo(3)); - assertNotNull(histo.getBuckets().get(0).getAggregations().asMap().get("boxplot")); - InternalBoxplot boxplot = (InternalBoxplot) histo.getBuckets().get(0).getAggregations().asMap().get("boxplot"); + assertNotNull(histo.getBuckets().get(0).getAggregations().get("boxplot")); + InternalBoxplot boxplot = histo.getBuckets().get(0).getAggregations().get("boxplot"); assertEquals(1, boxplot.getMin(), 0); assertEquals(3, boxplot.getMax(), 0); assertEquals(1.5, boxplot.getQ1(), 0); assertEquals(2, boxplot.getQ2(), 0); assertEquals(2.5, boxplot.getQ3(), 0); - assertNotNull(histo.getBuckets().get(1).getAggregations().asMap().get("boxplot")); - boxplot = (InternalBoxplot) histo.getBuckets().get(1).getAggregations().asMap().get("boxplot"); + assertNotNull(histo.getBuckets().get(1).getAggregations().get("boxplot")); + boxplot = histo.getBuckets().get(1).getAggregations().get("boxplot"); assertEquals(Double.POSITIVE_INFINITY, boxplot.getMin(), 0); assertEquals(Double.NEGATIVE_INFINITY, boxplot.getMax(), 0); assertEquals(Double.NaN, boxplot.getQ1(), 0); assertEquals(Double.NaN, boxplot.getQ2(), 0); assertEquals(Double.NaN, boxplot.getQ3(), 0); - assertNotNull(histo.getBuckets().get(2).getAggregations().asMap().get("boxplot")); - boxplot = (InternalBoxplot) histo.getBuckets().get(2).getAggregations().asMap().get("boxplot"); + assertNotNull(histo.getBuckets().get(2).getAggregations().get("boxplot")); + boxplot = histo.getBuckets().get(2).getAggregations().get("boxplot"); assertEquals(21, boxplot.getMin(), 0); assertEquals(23, boxplot.getMax(), 0); assertEquals(21.5, boxplot.getQ1(), 0); @@ -337,8 +337,8 @@ public void testGetProperty() throws IOException { }, (Consumer) global -> { assertEquals(5, global.getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(global)); - assertNotNull(global.getAggregations().asMap().get("boxplot")); - InternalBoxplot boxplot = (InternalBoxplot) global.getAggregations().asMap().get("boxplot"); + assertNotNull(global.getAggregations().get("boxplot")); + InternalBoxplot boxplot = global.getAggregations().get("boxplot"); assertThat(global.getProperty("boxplot"), equalTo(boxplot)); assertThat(global.getProperty("boxplot.min"), equalTo(1.0)); assertThat(global.getProperty("boxplot.max"), equalTo(5.0)); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregatorTests.java index 26c71b8af5102..dcee4d4b05c21 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregatorTests.java @@ -410,20 +410,20 @@ public void testEmptyBucket() throws IOException { ); }, (Consumer) histo -> { assertEquals(3, histo.getBuckets().size()); - assertNotNull(histo.getBuckets().get(0).getAggregations().asMap().get("t_test")); - InternalTTest tTest = (InternalTTest) histo.getBuckets().get(0).getAggregations().asMap().get("t_test"); + assertNotNull(histo.getBuckets().get(0).getAggregations().get("t_test")); + InternalTTest tTest = histo.getBuckets().get(0).getAggregations().get("t_test"); assertEquals( tTestType == TTestType.PAIRED ? 0.1939778614 : tTestType == TTestType.HOMOSCEDASTIC ? 0.05878871029 : 0.07529006595, tTest.getValue(), 0.000001 ); - assertNotNull(histo.getBuckets().get(1).getAggregations().asMap().get("t_test")); - tTest = (InternalTTest) histo.getBuckets().get(1).getAggregations().asMap().get("t_test"); + assertNotNull(histo.getBuckets().get(1).getAggregations().get("t_test")); + tTest = histo.getBuckets().get(1).getAggregations().get("t_test"); assertEquals(Double.NaN, tTest.getValue(), 0.000001); - assertNotNull(histo.getBuckets().get(2).getAggregations().asMap().get("t_test")); - tTest = (InternalTTest) histo.getBuckets().get(2).getAggregations().asMap().get("t_test"); + assertNotNull(histo.getBuckets().get(2).getAggregations().get("t_test")); + tTest = histo.getBuckets().get(2).getAggregations().get("t_test"); assertEquals( tTestType == TTestType.PAIRED ? 0.6666666667 : tTestType == TTestType.HOMOSCEDASTIC ? 0.8593081179 : 0.8594865044, tTest.getValue(), @@ -475,8 +475,8 @@ public void testGetProperty() throws IOException { }, (Consumer) global -> { assertEquals(3, global.getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(global)); - assertNotNull(global.getAggregations().asMap().get("t_test")); - InternalTTest tTest = (InternalTTest) global.getAggregations().asMap().get("t_test"); + assertNotNull(global.getAggregations().get("t_test")); + InternalTTest tTest = global.getAggregations().get("t_test"); assertEquals(tTest, global.getProperty("t_test")); assertEquals(0.1939778614, (Double) global.getProperty("t_test.value"), 0.000001); }, new AggTestConfig(globalBuilder, fieldType1, fieldType2)); diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java index 5012bacf319b6..e4db5a253f996 100644 --- a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java @@ -1270,12 +1270,15 @@ private void assertDownsampleIndexAggregations( Map labelFields ) { final AggregationBuilder aggregations = buildAggregations(config, metricFields, labelFields, config.getTimestampField()); - InternalAggregations origResp = aggregate(sourceIndex, aggregations); - InternalAggregations downsampleResp = aggregate(downsampleIndex, aggregations); - assertEquals(origResp.asMap().keySet(), downsampleResp.asMap().keySet()); + List origList = aggregate(sourceIndex, aggregations).asList(); + List downsampleList = aggregate(downsampleIndex, aggregations).asList(); + assertEquals(origList.size(), downsampleList.size()); + for (int i = 0; i < origList.size(); i++) { + assertEquals(origList.get(i).getName(), downsampleList.get(i).getName()); + } - StringTerms originalTsIdTermsAggregation = (StringTerms) origResp.getAsMap().values().stream().toList().get(0); - StringTerms downsampleTsIdTermsAggregation = (StringTerms) downsampleResp.getAsMap().values().stream().toList().get(0); + StringTerms originalTsIdTermsAggregation = (StringTerms) origList.get(0); + StringTerms downsampleTsIdTermsAggregation = (StringTerms) downsampleList.get(0); originalTsIdTermsAggregation.getBuckets().forEach(originalBucket -> { StringTerms.Bucket downsampleBucket = downsampleTsIdTermsAggregation.getBucketByKey(originalBucket.getKeyAsString()); @@ -1318,7 +1321,7 @@ private void assertDownsampleIndexAggregations( .stream() .filter(agg -> agg.getType().equals("top_hits")) .toList(); - assertEquals(topHitsDownsampleAggregations.size(), topHitsDownsampleAggregations.size()); + assertEquals(topHitsOriginalAggregations.size(), topHitsDownsampleAggregations.size()); for (int j = 0; j < topHitsDownsampleAggregations.size(); ++j) { InternalTopHits originalTopHits = (InternalTopHits) topHitsOriginalAggregations.get(j); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java index 83338453050e0..7ef7a8f4e6dd5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java @@ -198,7 +198,7 @@ public String toString() { + ", docCount=" + serializableCategory.getNumMatches() + ", aggregations=" - + aggregations.asMap() + + aggregations.asList() + "}\n"; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java index 50342a7bf99e0..f9e4e62e4e3bc 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java @@ -74,7 +74,6 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.filter.Filters; import org.elasticsearch.search.aggregations.bucket.filter.FiltersAggregator; -import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.metrics.ExtendedStats; import org.elasticsearch.search.aggregations.metrics.Stats; import org.elasticsearch.search.aggregations.metrics.TopHits; @@ -1816,20 +1815,13 @@ public void getForecastStats( handler.accept(new ForecastStats()); return; } - Map aggregationsAsMap = aggregations.asMap(); - StatsAccumulator memoryStats = StatsAccumulator.fromStatsAggregation( - (Stats) aggregationsAsMap.get(ForecastStats.Fields.MEMORY) - ); - Stats aggRecordsStats = (Stats) aggregationsAsMap.get(ForecastStats.Fields.RECORDS); + StatsAccumulator memoryStats = StatsAccumulator.fromStatsAggregation(aggregations.get(ForecastStats.Fields.MEMORY)); + Stats aggRecordsStats = aggregations.get(ForecastStats.Fields.RECORDS); // Stats already gives us all the counts and every doc as a "records" field. long totalHits = aggRecordsStats.getCount(); StatsAccumulator recordStats = StatsAccumulator.fromStatsAggregation(aggRecordsStats); - StatsAccumulator runtimeStats = StatsAccumulator.fromStatsAggregation( - (Stats) aggregationsAsMap.get(ForecastStats.Fields.RUNTIME) - ); - CountAccumulator statusCount = CountAccumulator.fromTermsAggregation( - (StringTerms) aggregationsAsMap.get(ForecastStats.Fields.STATUSES) - ); + StatsAccumulator runtimeStats = StatsAccumulator.fromStatsAggregation(aggregations.get(ForecastStats.Fields.RUNTIME)); + CountAccumulator statusCount = CountAccumulator.fromTermsAggregation(aggregations.get(ForecastStats.Fields.STATUSES)); ForecastStats forecastStats = new ForecastStats(totalHits, memoryStats, recordStats, runtimeStats, statusCount); handler.accept(forecastStats); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java index 987e3e99bb91a..ba25a774ff540 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java @@ -381,7 +381,7 @@ private static List unrollAgg( // long count = -1; if (agg instanceof InternalMultiBucketAggregation == false) { - count = getAggCount(agg, rolled.getAsMap()); + count = getAggCount(agg, rolled); } return unrollAgg(agg, original.get(agg.getName()), currentTree.get(agg.getName()), count); @@ -522,7 +522,7 @@ T extends InternalMultiBucketAggregation> InternalAggregation unrollMultiB .map(bucket -> { // Grab the value from the count agg (if it exists), which represents this bucket's doc_count - long bucketCount = getAggCount(source, bucket.getAggregations().getAsMap()); + long bucketCount = getAggCount(source, bucket.getAggregations()); // Don't generate buckets if the doc count is zero if (bucketCount == 0) { @@ -566,7 +566,7 @@ private static InternalAggregations unrollSubAggsFromMulti(InternalBucket bucket .filter(subAgg -> subAgg.getName().endsWith("." + RollupField.COUNT_FIELD) == false) .map(subAgg -> { - long count = getAggCount(subAgg, bucket.getAggregations().asMap()); + long count = getAggCount(subAgg, bucket.getAggregations()); InternalAggregation originalSubAgg = null; if (original != null && original.getAggregations() != null) { @@ -617,7 +617,7 @@ private static InternalAggregation unrollMetric(SingleValue metric, long count) } } - private static long getAggCount(Aggregation agg, Map aggMap) { + private static long getAggCount(Aggregation agg, InternalAggregations aggregations) { String countPath = null; if (agg.getType().equals(DateHistogramAggregationBuilder.NAME) @@ -630,10 +630,10 @@ private static long getAggCount(Aggregation agg, Map Date: Mon, 1 Jul 2024 10:24:17 +0300 Subject: [PATCH 067/216] Support index sorting with nested fields (#110251) This PR piggy-backs on recent changes in Lucene 9.11.1 (https://github.com/apache/lucene/pull/12829, https://github.com/apache/lucene/pull/13341/), setting the parent doc when nested fields are present. This allows moving nested documents along with parent ones during sorting. With this change, sorting is now allowed on fields outside nested objects. Sorting on fields within nested objects is still not supported (throws an exception). Fixes #107349 --- docs/changelog/110251.yaml | 13 +++ .../index-modules/index-sorting.asciidoc | 5 +- .../test/indices.sort/20_nested.yml | 99 +++++++++++++++++++ .../rest-api-spec/test/logsdb/10_settings.yml | 10 +- .../org/elasticsearch/index/IndexService.java | 3 +- .../elasticsearch/index/IndexVersions.java | 1 + .../elasticsearch/index/engine/Engine.java | 1 + .../index/engine/InternalEngine.java | 5 + .../index/mapper/DocumentMapper.java | 19 +++- .../index/mapper/MapperFeatures.java | 3 +- .../index/shard/StoreRecovery.java | 5 + .../org/elasticsearch/index/store/Store.java | 26 +++-- .../index/mapper/MapperServiceTests.java | 35 ++++++- .../elasticsearch/index/store/StoreTests.java | 2 +- .../SourceOnlySnapshotRepository.java | 2 +- 15 files changed, 204 insertions(+), 25 deletions(-) create mode 100644 docs/changelog/110251.yaml create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.sort/20_nested.yml diff --git a/docs/changelog/110251.yaml b/docs/changelog/110251.yaml new file mode 100644 index 0000000000000..a3b0c3128be35 --- /dev/null +++ b/docs/changelog/110251.yaml @@ -0,0 +1,13 @@ +pr: 110251 +summary: Support index sorting with nested fields +area: Logs +type: enhancement +issues: + - 107349 +highlight: + title: Index sorting on indexes with nested fields + body: |- + Index sorting is now supported for indexes with mappings containing nested objects. + The index sort spec (as specified by `index.sort.field`) can't contain any nested + fields, still. + notable: false diff --git a/docs/reference/index-modules/index-sorting.asciidoc b/docs/reference/index-modules/index-sorting.asciidoc index dd355eccbca2a..1334a96872459 100644 --- a/docs/reference/index-modules/index-sorting.asciidoc +++ b/docs/reference/index-modules/index-sorting.asciidoc @@ -6,9 +6,8 @@ inside each Shard will be sorted. By default Lucene does not apply any sort. The `index.sort.*` settings define which fields should be used to sort the documents inside each Segment. [WARNING] -nested fields are not compatible with index sorting because they rely on the assumption -that nested documents are stored in contiguous doc ids, which can be broken by index sorting. -An error will be thrown if index sorting is activated on an index that contains nested fields. +It is allowed to apply index sorting to mappings with nested objects, so long as the +`index.sort.*` setting contains no nested fields. For instance the following example shows how to define a sort on a single field: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.sort/20_nested.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.sort/20_nested.yml new file mode 100644 index 0000000000000..547ff096822c0 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.sort/20_nested.yml @@ -0,0 +1,99 @@ +--- +sort doc with nested object: + - requires: + cluster_features: ["mapper.index_sorting_on_nested"] + reason: uses index sorting on nested fields + - do: + indices.create: + index: test + body: + settings: + index.sort.field: name + mappings: + properties: + name: + type: keyword + nested_field: + type: nested + nested_array: + type: nested + other: + type: object + + - do: + bulk: + index: test + refresh: true + body: + - '{ "create": { } }' + - '{ "name": "aaaa", "nested_field": {"a": 1, "b": 2}, "nested_array": [{ "a": 10, "b": 20 }, { "a": 100, "b": 200 }], "other": { "value": "A" } }' + - '{ "create": { } }' + - '{ "name": "cccc", "nested_field": {"a": 3, "b": 4}, "nested_array": [{ "a": 30, "b": 40 }, { "a": 300, "b": 400 }], "other": { "value": "C"} }' + - '{ "create": { } }' + - '{ "nested_field": {"a": 7, "b": 8}, "nested_array": [{ "a": 70, "b": 80 }, { "a": 700, "b": 800 }], "other": { "value": "D"} }' + - '{ "create": { } }' + - '{ "name": "bbbb", "nested_field": {"a": 5, "b": 6}, "nested_array": [{ "a": 50, "b": 60 }, { "a": 500, "b": 600 }], "other": { "value": "B"} }' + + - do: + search: + index: test + + - match: { hits.total.value: 4 } + - match: { hits.hits.0._source.name: aaaa } + - match: { hits.hits.0._source.nested_field.a: 1 } + - match: { hits.hits.0._source.nested_field.b: 2 } + - match: { hits.hits.0._source.nested_array.0.a: 10 } + - match: { hits.hits.0._source.nested_array.0.b: 20 } + - match: { hits.hits.0._source.nested_array.1.a: 100 } + - match: { hits.hits.0._source.nested_array.1.b: 200 } + - match: { hits.hits.0._source.other.value: A } + - match: { hits.hits.1._source.name: bbbb } + - match: { hits.hits.1._source.nested_field.a: 5 } + - match: { hits.hits.1._source.nested_field.b: 6 } + - match: { hits.hits.1._source.nested_array.0.a: 50 } + - match: { hits.hits.1._source.nested_array.0.b: 60 } + - match: { hits.hits.1._source.nested_array.1.a: 500 } + - match: { hits.hits.1._source.nested_array.1.b: 600 } + - match: { hits.hits.1._source.other.value: B } + - match: { hits.hits.2._source.name: cccc } + - match: { hits.hits.2._source.nested_field.a: 3 } + - match: { hits.hits.2._source.nested_field.b: 4 } + - match: { hits.hits.2._source.nested_array.0.a: 30 } + - match: { hits.hits.2._source.nested_array.0.b: 40 } + - match: { hits.hits.2._source.nested_array.1.a: 300 } + - match: { hits.hits.2._source.nested_array.1.b: 400 } + - match: { hits.hits.2._source.other.value: C } + - is_false: hits.hits.3._source.name + - match: { hits.hits.3._source.nested_field.a: 7 } + - match: { hits.hits.3._source.nested_field.b: 8 } + - match: { hits.hits.3._source.nested_array.0.a: 70 } + - match: { hits.hits.3._source.nested_array.0.b: 80 } + - match: { hits.hits.3._source.nested_array.1.a: 700 } + - match: { hits.hits.3._source.nested_array.1.b: 800 } + - match: { hits.hits.3._source.other.value: D } + + +--- +sort doc on nested field: + - requires: + cluster_features: [ "mapper.index_sorting_on_nested" ] + reason: uses index sorting on nested fields + - do: + catch: /cannot apply index sort to field \[nested_field\.foo\] under nested object \[nested_field\]/ + indices.create: + index: test + body: + settings: + index.sort.field: nested_field.foo + index.sort.mode: min + mappings: + properties: + name: + type: keyword + nested_field: + type: nested + properties: + foo: + type: keyword + bar: + type: keyword diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml index 5e8948b7fdea3..4976e5e15adbe 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml @@ -392,6 +392,7 @@ override sort mode settings: --- override sort field using nested field type in sorting: - requires: + cluster_features: ["mapper.index_sorting_on_nested"] test_runner_features: [ capabilities ] capabilities: - method: PUT @@ -433,11 +434,12 @@ override sort field using nested field type in sorting: - match: { error.root_cause.0.type: "illegal_argument_exception" } - match: { error.type: "illegal_argument_exception" } - - match: { error.reason: "cannot have nested fields when index sort is activated" } + - match: { error.reason: "cannot apply index sort to field [nested] under nested object [nested]" } --- override sort field using nested field type: - requires: + cluster_features: ["mapper.index_sorting_on_nested"] test_runner_features: [ capabilities ] capabilities: - method: PUT @@ -446,7 +448,6 @@ override sort field using nested field type: reason: "Support for 'logs' index mode capability required" - do: - catch: bad_request indices.create: index: test-nested body: @@ -474,10 +475,7 @@ override sort field using nested field type: properties: keywords: type: keyword - - - match: { error.root_cause.0.type: "illegal_argument_exception" } - - match: { error.type: "illegal_argument_exception" } - - match: { error.reason: "cannot have nested fields when index sort is activated" } + - is_false: error --- routing path not allowed in logs mode: diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index 0605e36b2ea4b..3d81cccc92dc7 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -525,7 +525,8 @@ public synchronized IndexShard createShard( this.indexSettings, directory, lock, - new StoreCloseListener(shardId, () -> eventListener.onStoreClosed(shardId)) + new StoreCloseListener(shardId, () -> eventListener.onStoreClosed(shardId)), + this.indexSettings.getIndexSortConfig().hasIndexSort() ); eventListener.onStoreCreated(shardId); indexShard = new IndexShard( diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 0035b54c63f8d..8bebd390d3440 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -111,6 +111,7 @@ private static IndexVersion def(int id, Version luceneVersion) { public static final IndexVersion UNIQUE_TOKEN_FILTER_POS_FIX = def(8_509_00_0, Version.LUCENE_9_11_0); public static final IndexVersion ADD_SECURITY_MIGRATION = def(8_510_00_0, Version.LUCENE_9_11_0); public static final IndexVersion UPGRADE_TO_LUCENE_9_11_1 = def(8_511_00_0, Version.LUCENE_9_11_1); + public static final IndexVersion INDEX_SORTING_ON_NESTED = def(8_512_00_0, Version.LUCENE_9_11_1); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index bc6510555528a..6f4511483126f 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -128,6 +128,7 @@ public abstract class Engine implements Closeable { public static final String CAN_MATCH_SEARCH_SOURCE = "can_match"; protected static final String DOC_STATS_SOURCE = "doc_stats"; public static final long UNKNOWN_PRIMARY_TERM = -1L; + public static final String ROOT_DOC_FIELD_NAME = "__root_doc_for_nested"; protected final ShardId shardId; protected final Logger logger; diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index be64365fedd34..a991c5544a1e1 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -72,6 +72,7 @@ import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.cache.query.TrivialQueryCachingPolicy; import org.elasticsearch.index.mapper.DocumentParser; @@ -2728,6 +2729,10 @@ private IndexWriterConfig getIndexWriterConfig() { } if (config().getIndexSort() != null) { iwc.setIndexSort(config().getIndexSort()); + if (config().getIndexSettings().getIndexVersionCreated().onOrAfter(IndexVersions.INDEX_SORTING_ON_NESTED)) { + // Needed to support index sorting in the presence of nested objects. + iwc.setParentField(ROOT_DOC_FIELD_NAME); + } } // Provide a custom leaf sorter, so that index readers opened from this writer // will have its leaves sorted according the given leaf sorter. diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index 401fbe0ea967a..55a48853ee679 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -9,7 +9,9 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexSortConfig; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; @@ -21,6 +23,9 @@ public class DocumentMapper { private final MappingLookup mappingLookup; private final DocumentParser documentParser; private final MapperMetrics mapperMetrics; + private final IndexVersion indexVersion; + + static final NodeFeature INDEX_SORTING_ON_NESTED = new NodeFeature("mapper.index_sorting_on_nested"); /** * Create a new {@link DocumentMapper} that holds empty mappings. @@ -54,6 +59,7 @@ public static DocumentMapper createEmpty(MapperService mapperService) { this.mappingLookup = MappingLookup.fromMapping(mapping); this.mappingSource = source; this.mapperMetrics = mapperMetrics; + this.indexVersion = version; assert mapping.toCompressedXContent().equals(source) || isSyntheticSourceMalformed(source, version) : "provided source [" + source + "] differs from mapping [" + mapping.toCompressedXContent() + "]"; @@ -134,7 +140,18 @@ public void validate(IndexSettings settings, boolean checkLimits) { } if (settings.getIndexSortConfig().hasIndexSort() && mappers().nestedLookup() != NestedLookup.EMPTY) { - throw new IllegalArgumentException("cannot have nested fields when index sort is activated"); + if (indexVersion.before(IndexVersions.INDEX_SORTING_ON_NESTED)) { + throw new IllegalArgumentException("cannot have nested fields when index sort is activated"); + } + for (String field : settings.getValue(IndexSortConfig.INDEX_SORT_FIELD_SETTING)) { + for (NestedObjectMapper nestedObjectMapper : mappers().nestedLookup().getNestedMappers().values()) { + if (field.startsWith(nestedObjectMapper.fullPath())) { + throw new IllegalArgumentException( + "cannot apply index sort to field [" + field + "] under nested object [" + nestedObjectMapper.fullPath() + "]" + ); + } + } + } } List routingPaths = settings.getIndexMetadata().getRoutingPaths(); for (String path : routingPaths) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java index f7d9b2b4cbd28..a8a81fab654da 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java @@ -26,7 +26,8 @@ public Set getFeatures() { RangeFieldMapper.NULL_VALUES_OFF_BY_ONE_FIX, SourceFieldMapper.SYNTHETIC_SOURCE_FALLBACK, DenseVectorFieldMapper.INT4_QUANTIZATION, - DenseVectorFieldMapper.BIT_VECTORS + DenseVectorFieldMapper.BIT_VECTORS, + DocumentMapper.INDEX_SORTING_ON_NESTED ); } } diff --git a/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java b/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java index 0acddcf0e45b2..22a7249ec7237 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java +++ b/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java @@ -36,6 +36,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.seqno.SequenceNumbers; @@ -197,6 +198,10 @@ static void addIndices( .setIndexCreatedVersionMajor(luceneIndexCreatedVersionMajor); if (indexSort != null) { iwc.setIndexSort(indexSort); + if (indexMetadata != null && indexMetadata.getCreationVersion().onOrAfter(IndexVersions.INDEX_SORTING_ON_NESTED)) { + // Needed to support index sorting in the presence of nested objects. + iwc.setParentField(Engine.ROOT_DOC_FIELD_NAME); + } } try (IndexWriter writer = new IndexWriter(new StatsDirectoryWrapper(hardLinkOrCopyTarget, indexRecoveryStats), iwc)) { diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index b7bf3a68ade07..5a33084e3ea83 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -155,12 +155,20 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref private final OnClose onClose; private final AbstractRefCounted refCounter = AbstractRefCounted.of(this::closeInternal); // close us once we are done + private boolean hasIndexSort; public Store(ShardId shardId, IndexSettings indexSettings, Directory directory, ShardLock shardLock) { - this(shardId, indexSettings, directory, shardLock, OnClose.EMPTY); + this(shardId, indexSettings, directory, shardLock, OnClose.EMPTY, false); } - public Store(ShardId shardId, IndexSettings indexSettings, Directory directory, ShardLock shardLock, OnClose onClose) { + public Store( + ShardId shardId, + IndexSettings indexSettings, + Directory directory, + ShardLock shardLock, + OnClose onClose, + boolean hasIndexSort + ) { super(shardId, indexSettings); this.directory = new StoreDirectory( byteSizeDirectory(directory, indexSettings, logger), @@ -168,6 +176,7 @@ public Store(ShardId shardId, IndexSettings indexSettings, Directory directory, ); this.shardLock = shardLock; this.onClose = onClose; + this.hasIndexSort = hasIndexSort; assert onClose != null; assert shardLock != null; @@ -1541,20 +1550,25 @@ private static Map getUserData(IndexWriter writer) { return userData; } - private static IndexWriter newTemporaryAppendingIndexWriter(final Directory dir, final IndexCommit commit) throws IOException { + private IndexWriter newTemporaryAppendingIndexWriter(final Directory dir, final IndexCommit commit) throws IOException { IndexWriterConfig iwc = newTemporaryIndexWriterConfig().setIndexCommit(commit).setOpenMode(IndexWriterConfig.OpenMode.APPEND); return new IndexWriter(dir, iwc); } - private static IndexWriter newTemporaryEmptyIndexWriter(final Directory dir, final Version luceneVersion) throws IOException { + private IndexWriter newTemporaryEmptyIndexWriter(final Directory dir, final Version luceneVersion) throws IOException { IndexWriterConfig iwc = newTemporaryIndexWriterConfig().setOpenMode(IndexWriterConfig.OpenMode.CREATE) .setIndexCreatedVersionMajor(luceneVersion.major); return new IndexWriter(dir, iwc); } - private static IndexWriterConfig newTemporaryIndexWriterConfig() { + private IndexWriterConfig newTemporaryIndexWriterConfig() { // this config is only used for temporary IndexWriter instances, used to initialize the index or update the commit data, // so we don't want any merges to happen - return indexWriterConfigWithNoMerging(null).setSoftDeletesField(Lucene.SOFT_DELETES_FIELD).setCommitOnClose(false); + var iwc = indexWriterConfigWithNoMerging(null).setSoftDeletesField(Lucene.SOFT_DELETES_FIELD).setCommitOnClose(false); + if (hasIndexSort && indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.INDEX_SORTING_ON_NESTED)) { + // Needed to support index sorting in the presence of nested objects. + iwc.setParentField(Engine.ROOT_DOC_FIELD_NAME); + } + return iwc; } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index 42700e683e2e3..fbca1484cc7c7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -125,27 +125,52 @@ public void testPartitionedConstraints() throws IOException { } public void testIndexSortWithNestedFields() throws IOException { - Settings settings = Settings.builder().put("index.sort.field", "foo").build(); + IndexVersion oldVersion = IndexVersionUtils.getPreviousVersion(IndexVersions.INDEX_SORTING_ON_NESTED); IllegalArgumentException invalidNestedException = expectThrows( IllegalArgumentException.class, - () -> createMapperService(settings, mapping(b -> { + () -> createMapperService(oldVersion, settings(oldVersion).put("index.sort.field", "foo").build(), () -> true, mapping(b -> { b.startObject("nested_field").field("type", "nested").endObject(); b.startObject("foo").field("type", "keyword").endObject(); })) ); - assertThat(invalidNestedException.getMessage(), containsString("cannot have nested fields when index sort is activated")); + Settings settings = settings(IndexVersions.INDEX_SORTING_ON_NESTED).put("index.sort.field", "foo").build(); + DocumentMapper mapper = createMapperService(settings, mapping(b -> { + b.startObject("nested_field").field("type", "nested").endObject(); + b.startObject("foo").field("type", "keyword").endObject(); + })).documentMapper(); + + List docs = mapper.parse(source(b -> { + b.field("name", "foo"); + b.startObject("nested_field").field("foo", "bar").endObject(); + })).docs(); + assertEquals(2, docs.size()); + assertEquals(docs.get(1), docs.get(0).getParent()); MapperService mapperService = createMapperService( settings, mapping(b -> b.startObject("foo").field("type", "keyword").endObject()) ); - invalidNestedException = expectThrows(IllegalArgumentException.class, () -> merge(mapperService, mapping(b -> { + merge(mapperService, mapping(b -> { b.startObject("nested_field"); b.field("type", "nested"); b.endObject(); + })); + + Settings settings2 = Settings.builder().put("index.sort.field", "foo.bar").build(); + invalidNestedException = expectThrows(IllegalArgumentException.class, () -> createMapperService(settings2, mapping(b -> { + b.startObject("foo"); + { + b.field("type", "nested"); + b.startObject("properties"); + { + b.startObject("bar").field("type", "keyword").endObject(); + } + b.endObject(); + } + b.endObject(); }))); - assertThat(invalidNestedException.getMessage(), containsString("cannot have nested fields when index sort is activated")); + assertEquals("cannot apply index sort to field [foo.bar] under nested object [foo]", invalidNestedException.getMessage()); } public void testFieldAliasWithMismatchedNestedScope() throws Throwable { diff --git a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java index 3eb4675d37e97..5c1a45dba604d 100644 --- a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -744,7 +744,7 @@ public void testOnCloseCallback() throws IOException { assertEquals(shardId, theLock.getShardId()); assertEquals(lock, theLock); count.incrementAndGet(); - }); + }, false); assertEquals(count.get(), 0); final int iters = randomIntBetween(1, 10); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotRepository.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotRepository.java index de5b5e4d825a2..fd101e53cc90e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotRepository.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotRepository.java @@ -173,7 +173,7 @@ public void snapshotShard(SnapshotShardContext context) { protected void closeInternal() { // do nothing; } - }, Store.OnClose.EMPTY); + }, Store.OnClose.EMPTY, mapperService.getIndexSettings().getIndexSortConfig().hasIndexSort()); Supplier querySupplier = mapperService.hasNested() ? () -> Queries.newNestedFilter(mapperService.getIndexSettings().getIndexVersionCreated()) : null; From 78c812f845f042b143a050f6dc953effd16bb123 Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Mon, 1 Jul 2024 10:07:15 +0200 Subject: [PATCH 068/216] Fix security index settings docs (#110126) Docs tweak with a typo fix and a clarification on how the two available settings interact (essentially https://github.com/elastic/elasticsearch/issues/27871). I'm also open to including this info in the more generic settings API but feels like a simple enough callout to add to the security API. --- .../security/update-settings.asciidoc | 21 ++++++++++--------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/docs/reference/rest-api/security/update-settings.asciidoc b/docs/reference/rest-api/security/update-settings.asciidoc index 652b722b0af48..b227bb70b31d7 100644 --- a/docs/reference/rest-api/security/update-settings.asciidoc +++ b/docs/reference/rest-api/security/update-settings.asciidoc @@ -18,27 +18,28 @@ Updates the settings of the security internal indices. ==== {api-request-body-title} `security`:: -(Optional, object) Settings to be used for the index used for most security +(Optional, object) Settings to be used for the index used for most security configuration, including Native realm users and roles configured via the API. `security-tokens`:: -(Optional, object) Settings to be used for the index used to store +(Optional, object) Settings to be used for the index used to store <>. -`security`:: -(Optional, object) Settings to be used for the index used to store +`security-profile`:: +(Optional, object) Settings to be used for the index used to store <> information. [[security-api-update-settings-desc]] ==== {api-description-title} -This API allows a user to modify the settings for the Security internal indices -(`.security` and associated indices). Only a subset of settings are allowed to +This API allows a user to modify the settings for the Security internal indices +(`.security` and associated indices). Only a subset of settings are allowed to be modified. This includes: - `index.auto_expand_replicas` - `index.number_of_replicas` +NOTE: If `index.auto_expand_replicas` is set, `index.number_of_replicas` will be ignored during updates. [[security-api-update-settings-example]] ==== {api-examples-title} @@ -62,8 +63,8 @@ PUT /_security/settings ----------------------------------------------------------- // TEST[skip:making sure all the indices have been created reliably is difficult] -The configured settings can be retrieved using the -<> API. If a given index -is not in use on the system, but settings are provided for it, the request will -be rejected - this API does not yet support configuring the settings for these +The configured settings can be retrieved using the +<> API. If a given index +is not in use on the system, but settings are provided for it, the request will +be rejected - this API does not yet support configuring the settings for these indices before they are in use. From 43f5696406ad653070bc2a2733878e633a8361e9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Mon, 1 Jul 2024 10:12:16 +0200 Subject: [PATCH 069/216] [DOCS] Refactors PUT inference API docs (#109812) --- .../inference/inference-apis.asciidoc | 9 + .../inference/inference-shared.asciidoc | 34 + .../inference/put-inference.asciidoc | 860 +----------------- .../service-azure-ai-studio.asciidoc | 173 ++++ .../inference/service-azure-openai.asciidoc | 156 ++++ .../inference/service-cohere.asciidoc | 204 +++++ .../inference/service-elasticsearch.asciidoc | 122 +++ .../inference/service-elser.asciidoc | 95 ++ .../service-google-ai-studio.asciidoc | 87 ++ .../inference/service-hugging-face.asciidoc | 114 +++ .../inference/service-mistral.asciidoc | 99 ++ .../inference/service-openai.asciidoc | 147 +++ 12 files changed, 1255 insertions(+), 845 deletions(-) create mode 100644 docs/reference/inference/inference-shared.asciidoc create mode 100644 docs/reference/inference/service-azure-ai-studio.asciidoc create mode 100644 docs/reference/inference/service-azure-openai.asciidoc create mode 100644 docs/reference/inference/service-cohere.asciidoc create mode 100644 docs/reference/inference/service-elasticsearch.asciidoc create mode 100644 docs/reference/inference/service-elser.asciidoc create mode 100644 docs/reference/inference/service-google-ai-studio.asciidoc create mode 100644 docs/reference/inference/service-hugging-face.asciidoc create mode 100644 docs/reference/inference/service-mistral.asciidoc create mode 100644 docs/reference/inference/service-openai.asciidoc diff --git a/docs/reference/inference/inference-apis.asciidoc b/docs/reference/inference/inference-apis.asciidoc index 539bba3f0d61f..f9c41bc0cde81 100644 --- a/docs/reference/inference/inference-apis.asciidoc +++ b/docs/reference/inference/inference-apis.asciidoc @@ -25,3 +25,12 @@ include::delete-inference.asciidoc[] include::get-inference.asciidoc[] include::post-inference.asciidoc[] include::put-inference.asciidoc[] +include::service-azure-ai-studio.asciidoc[] +include::service-azure-openai.asciidoc[] +include::service-cohere.asciidoc[] +include::service-elasticsearch.asciidoc[] +include::service-elser.asciidoc[] +include::service-google-ai-studio.asciidoc[] +include::service-hugging-face.asciidoc[] +include::service-mistral.asciidoc[] +include::service-openai.asciidoc[] diff --git a/docs/reference/inference/inference-shared.asciidoc b/docs/reference/inference/inference-shared.asciidoc new file mode 100644 index 0000000000000..2eafa3434e89e --- /dev/null +++ b/docs/reference/inference/inference-shared.asciidoc @@ -0,0 +1,34 @@ + +tag::api-key-admonition[] +IMPORTANT: You need to provide the API key only once, during the {infer} model creation. +The <> does not retrieve your API key. +After creating the {infer} model, you cannot change the associated API key. +If you want to use a different API key, delete the {infer} model and recreate it with the same name and the updated API key. +end::api-key-admonition[] + +tag::inference-id[] +The unique identifier of the {infer} endpoint. +end::inference-id[] + +tag::request-per-minute-example[] +[source,text] +---- +"rate_limit": { + "requests_per_minute": <> +} +---- +end::request-per-minute-example[] + + +tag::service-settings[] +Settings used to install the {infer} model. +end::service-settings[] + +tag::task-settings[] +Settings to configure the {infer} task. +These settings are specific to the `` you specified. +end::task-settings[] + +tag::task-type[] +The type of the {infer} task that the model will perform. +end::task-type[] \ No newline at end of file diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 1cbe97c45549e..626721ed146e3 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -6,11 +6,10 @@ experimental[] Creates an {infer} endpoint to perform an {infer} task. -IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in -{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Mistral, Azure OpenAI, Google AI Studio or Hugging Face. +IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Mistral, Azure OpenAI, Google AI Studio or Hugging Face. For built-in models and models uploaded though Eland, the {infer} APIs offer an alternative way to use and manage trained models. -However, if you do not plan to use the {infer} APIs to use these models or if you want to use non-NLP models, use the -<>. +However, if you do not plan to use the {infer} APIs to use these models or if you want to use non-NLP models, use the <>. + [discrete] [[put-inference-api-request]] @@ -25,851 +24,22 @@ However, if you do not plan to use the {infer} APIs to use these models or if yo * Requires the `manage_inference` <> (the built-in `inference_admin` role grants this privilege) + [discrete] [[put-inference-api-desc]] ==== {api-description-title} -The create {infer} API enables you to create an {infer} endpoint and configure a -{ml} model to perform a specific {infer} task. - -The following services are available through the {infer} API: - -* Azure AI Studio -* Azure OpenAI -* Cohere -* Elasticsearch (for built-in models and models uploaded through Eland) -* ELSER -* Google AI Studio -* Hugging Face -* Mistral -* OpenAI - -[discrete] -[[put-inference-api-path-params]] -==== {api-path-parms-title} - -``:: -(Required, string) -The unique identifier of the {infer} endpoint. - -``:: -(Required, string) -The type of the {infer} task that the model will perform. -Available task types: -* `completion`, -* `rerank`, -* `sparse_embedding`, -* `text_embedding`. - -[discrete] -[[put-inference-api-request-body]] -==== {api-request-body-title} - -`service`:: -(Required, string) -The type of service supported for the specified task type. -Available services: - -* `azureopenai`: specify the `completion` or `text_embedding` task type to use the Azure OpenAI service. -* `azureaistudio`: specify the `completion` or `text_embedding` task type to use the Azure AI Studio service. -* `cohere`: specify the `completion`, `text_embedding` or the `rerank` task type to use the Cohere service. -* `elasticsearch`: specify the `text_embedding` task type to use the E5 built-in model or text embedding models uploaded by Eland. -* `elser`: specify the `sparse_embedding` task type to use the ELSER service. -* `googleaistudio`: specify the `completion` or `text_embeddig` task to use the Google AI Studio service. -* `hugging_face`: specify the `text_embedding` task type to use the Hugging Face service. -* `mistral`: specify the `text_embedding` task type to use the Mistral service. -* `openai`: specify the `completion` or `text_embedding` task type to use the OpenAI service. - - -`service_settings`:: -(Required, object) -Settings used to install the {infer} model. -These settings are specific to the -`service` you specified. -+ -.`service_settings` for the `azureaistudio` service -[%collapsible%closed] -===== - -`api_key`::: -(Required, string) -A valid API key of your Azure AI Studio model deployment. -This key can be found on the overview page for your deployment in the management section of your https://ai.azure.com/[Azure AI Studio] account. - -IMPORTANT: You need to provide the API key only once, during the {infer} model creation. -The <> does not retrieve your API key. -After creating the {infer} model, you cannot change the associated API key. -If you want to use a different API key, delete the {infer} model and recreate it with the same name and the updated API key. - -`target`::: -(Required, string) -The target URL of your Azure AI Studio model deployment. -This can be found on the overview page for your deployment in the management section of your https://ai.azure.com/[Azure AI Studio] account. - -`provider`::: -(Required, string) -The model provider for your deployment. -Note that some providers may support only certain task types. -Supported providers include: - -* `cohere` - available for `text_embedding` and `completion` task types -* `databricks` - available for `completion` task type only -* `meta` - available for `completion` task type only -* `microsoft_phi` - available for `completion` task type only -* `mistral` - available for `completion` task type only -* `openai` - available for `text_embedding` and `completion` task types - -`endpoint_type`::: -(Required, string) -One of `token` or `realtime`. -Specifies the type of endpoint that is used in your model deployment. -There are https://learn.microsoft.com/en-us/azure/ai-studio/concepts/deployments-overview#billing-for-deploying-and-inferencing-llms-in-azure-ai-studio[two endpoint types available] for deployment through Azure AI Studio. -"Pay as you go" endpoints are billed per token. -For these, you must specify `token` for your `endpoint_type`. -For "real-time" endpoints which are billed per hour of usage, specify `realtime`. - -`rate_limit`::: -(Optional, object) -By default, the `azureaistudio` service sets the number of requests allowed per minute to `240`. -This helps to minimize the number of rate limit errors returned from Azure AI Studio. -To modify this, set the `requests_per_minute` setting of this object in your service settings: -+ -[source,text] ----- -"rate_limit": { - "requests_per_minute": <> -} ----- -===== -+ -.`service_settings` for the `azureopenai` service -[%collapsible%closed] -===== - -`api_key` or `entra_id`::: -(Required, string) -You must provide _either_ an API key or an Entra ID. -If you do not provide either, or provide both, you will receive an error when trying to create your model. -See the https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#authentication[Azure OpenAI Authentication documentation] for more details on these authentication types. - -IMPORTANT: You need to provide the API key or Entra ID only once, during the {infer} model creation. -The <> does not retrieve your authentication credentials. -After creating the {infer} model, you cannot change the associated API key or Entra ID. -If you want to use a different API key or Entra ID, delete the {infer} model and recreate it with the same name and the updated API key. -You _must_ have either an `api_key` or an `entra_id` defined. -If neither are present, an error will occur. - -`resource_name`::: -(Required, string) -The name of your Azure OpenAI resource. -You can find this from the https://portal.azure.com/#view/HubsExtension/BrowseAll[list of resources] in the Azure Portal for your subscription. - -`deployment_id`::: -(Required, string) -The deployment name of your deployed models. -Your Azure OpenAI deployments can be found though the https://oai.azure.com/[Azure OpenAI Studio] portal that is linked to your subscription. - -`api_version`::: -(Required, string) -The Azure API version ID to use. -We recommend using the https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#embeddings[latest supported non-preview version]. - -`rate_limit`::: -(Optional, object) -The `azureopenai` service sets a default number of requests allowed per minute depending on the task type. -For `text_embedding` it is set to `1440`. -For `completion` it is set to `120`. -This helps to minimize the number of rate limit errors returned from Azure. -To modify this, set the `requests_per_minute` setting of this object in your service settings: -+ -[source,text] ----- -"rate_limit": { - "requests_per_minute": <> -} ----- -+ -More information about the rate limits for Azure can be found in the https://learn.microsoft.com/en-us/azure/ai-services/openai/quotas-limits[Quota limits docs] and https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/quota?tabs=rest[How to change the quotas]. -===== -+ -.`service_settings` for the `cohere` service -[%collapsible%closed] -===== -`api_key`::: -(Required, string) -A valid API key of your Cohere account. -You can find your Cohere API keys or you can create a new one -https://dashboard.cohere.com/api-keys[on the API keys settings page]. - -IMPORTANT: You need to provide the API key only once, during the {infer} model creation. -The <> does not retrieve your API key. -After creating the {infer} model, you cannot change the associated API key. -If you want to use a different API key, delete the {infer} model and recreate it with the same name and the updated API key. - -`embedding_type`:: -(Optional, string) -Only for `text_embedding`. -Specifies the types of embeddings you want to get back. -Defaults to `float`. -Valid values are: -* `byte`: use it for signed int8 embeddings (this is a synonym of `int8`). -* `float`: use it for the default float embeddings. -* `int8`: use it for signed int8 embeddings. - -`model_id`:: -(Optional, string) -The name of the model to use for the {infer} task. -To review the available `rerank` models, refer to the -https://docs.cohere.com/reference/rerank-1[Cohere docs]. - -To review the available `text_embedding` models, refer to the -https://docs.cohere.com/reference/embed[Cohere docs]. -The default value for -`text_embedding` is `embed-english-v2.0`. - -`rate_limit`::: -(Optional, object) -By default, the `cohere` service sets the number of requests allowed per minute to `10000`. -This value is the same for all task types. -This helps to minimize the number of rate limit errors returned from Cohere. -To modify this, set the `requests_per_minute` setting of this object in your service settings: -+ -[source,text] ----- -"rate_limit": { - "requests_per_minute": <> -} ----- -+ -More information about Cohere's rate limits can be found in https://docs.cohere.com/docs/going-live#production-key-specifications[Cohere's production key docs]. - -===== -+ -.`service_settings` for the `elasticsearch` service -[%collapsible%closed] -===== - -`model_id`::: -(Required, string) -The name of the model to use for the {infer} task. -It can be the ID of either a built-in model (for example, `.multilingual-e5-small` for E5) or a text embedding model already -{ml-docs}/ml-nlp-import-model.html#ml-nlp-import-script[uploaded through Eland]. - -`num_allocations`::: -(Required, integer) -The total number of allocations this model is assigned across machine learning nodes. Increasing this value generally increases the throughput. - -`num_threads`::: -(Required, integer) -Sets the number of threads used by each model allocation during inference. This generally increases the speed per inference request. The inference process is a compute-bound process; `threads_per_allocations` must not exceed the number of available allocated processors per node. -Must be a power of 2. Max allowed value is 32. - -===== -+ -.`service_settings` for the `elser` service -[%collapsible%closed] -===== - -`num_allocations`::: -(Required, integer) -The total number of allocations this model is assigned across machine learning nodes. Increasing this value generally increases the throughput. - -`num_threads`::: -(Required, integer) -Sets the number of threads used by each model allocation during inference. This generally increases the speed per inference request. The inference process is a compute-bound process; `threads_per_allocations` must not exceed the number of available allocated processors per node. -Must be a power of 2. Max allowed value is 32. - -===== -+ -.`service_settings` for the `googleiastudio` service -[%collapsible%closed] -===== - -`api_key`::: -(Required, string) -A valid API key for the Google Gemini API. - -`model_id`::: -(Required, string) -The name of the model to use for the {infer} task. -You can find the supported models at https://ai.google.dev/gemini-api/docs/models/gemini[Gemini API models]. - -`rate_limit`::: -(Optional, object) -By default, the `googleaistudio` service sets the number of requests allowed per minute to `360`. -This helps to minimize the number of rate limit errors returned from Google AI Studio. -To modify this, set the `requests_per_minute` setting of this object in your service settings: -+ --- -[source,text] ----- -"rate_limit": { - "requests_per_minute": <> -} ----- --- - -===== -+ -.`service_settings` for the `hugging_face` service -[%collapsible%closed] -===== - -`api_key`::: -(Required, string) -A valid access token of your Hugging Face account. -You can find your Hugging Face access tokens or you can create a new one -https://huggingface.co/settings/tokens[on the settings page]. - -IMPORTANT: You need to provide the API key only once, during the {infer} model creation. -The <> does not retrieve your API key. -After creating the {infer} model, you cannot change the associated API key. -If you want to use a different API key, delete the {infer} model and recreate it with the same name and the updated API key. - -`url`::: -(Required, string) -The URL endpoint to use for the requests. - -`rate_limit`::: -(Optional, object) -By default, the `huggingface` service sets the number of requests allowed per minute to `3000`. -This helps to minimize the number of rate limit errors returned from Hugging Face. -To modify this, set the `requests_per_minute` setting of this object in your service settings: -+ -[source,text] ----- -"rate_limit": { - "requests_per_minute": <> -} ----- - -===== -+ -.`service_settings` for the `mistral` service -[%collapsible%closed] -===== - -`api_key`::: -(Required, string) -A valid API key for your Mistral account. -You can find your Mistral API keys or you can create a new one -https://console.mistral.ai/api-keys/[on the API Keys page]. - -`model`::: -(Required, string) -The name of the model to use for the {infer} task. -Refer to the https://docs.mistral.ai/getting-started/models/[Mistral models documentation] -for the list of available text embedding models. - -`max_input_tokens`::: -(Optional, integer) -Allows you to specify the maximum number of tokens per input before chunking occurs. - -`rate_limit`::: -(Optional, object) -By default, the `mistral` service sets the number of requests allowed per minute to `240`. -This helps to minimize the number of rate limit errors returned from the Mistral API. -To modify this, set the `requests_per_minute` setting of this object in your service settings: -+ -[source,text] ----- -"rate_limit": { - "requests_per_minute": <> -} ----- - -===== -+ -.`service_settings` for the `openai` service -[%collapsible%closed] -===== - -`api_key`::: -(Required, string) -A valid API key of your OpenAI account. -You can find your OpenAI API keys in your OpenAI account under the -https://platform.openai.com/api-keys[API keys section]. - -IMPORTANT: You need to provide the API key only once, during the {infer} model creation. -The <> does not retrieve your API key. -After creating the {infer} model, you cannot change the associated API key. -If you want to use a different API key, delete the {infer} model and recreate it with the same name and the updated API key. - -`model_id`::: -(Required, string) -The name of the model to use for the {infer} task. -Refer to the -https://platform.openai.com/docs/guides/embeddings/what-are-embeddings[OpenAI documentation] -for the list of available text embedding models. - -`organization_id`::: -(Optional, string) -The unique identifier of your organization. -You can find the Organization ID in your OpenAI account under -https://platform.openai.com/account/organization[**Settings** > **Organizations**]. - -`url`::: -(Optional, string) -The URL endpoint to use for the requests. -Can be changed for testing purposes. -Defaults to `https://api.openai.com/v1/embeddings`. - -`rate_limit`::: -(Optional, object) -The `openai` service sets a default number of requests allowed per minute depending on the task type. -For `text_embedding` it is set to `3000`. -For `completion` it is set to `500`. -This helps to minimize the number of rate limit errors returned from Azure. -To modify this, set the `requests_per_minute` setting of this object in your service settings: -+ -[source,text] ----- -"rate_limit": { - "requests_per_minute": <> -} ----- -+ -More information about the rate limits for OpenAI can be found in your https://platform.openai.com/account/limits[Account limits]. - -===== - -`task_settings`:: -(Optional, object) -Settings to configure the {infer} task. -These settings are specific to the -`` you specified. -+ -.`task_settings` for the `completion` task type -[%collapsible%closed] -===== - -`do_sample`::: -(Optional, float) -For the `azureaistudio` service only. -Instructs the inference process to perform sampling or not. -Has not affect unless `temperature` or `top_p` is specified. - -`max_new_tokens`::: -(Optional, integer) -For the `azureaistudio` service only. -Provides a hint for the maximum number of output tokens to be generated. -Defaults to 64. - -`user`::: -(Optional, string) -For `openai` service only. -Specifies the user issuing the request, which can be used for abuse detection. - -`temperature`::: -(Optional, float) -For the `azureaistudio` service only. -A number in the range of 0.0 to 2.0 that specifies the sampling temperature to use that controls the apparent creativity of generated completions. -Should not be used if `top_p` is specified. - -`top_p`::: -(Optional, float) -For the `azureaistudio` service only. -A number in the range of 0.0 to 2.0 that is an alternative value to temperature that causes the model to consider the results of the tokens with nucleus sampling probability. -Should not be used if `temperature` is specified. - -===== -+ -.`task_settings` for the `rerank` task type -[%collapsible%closed] -===== - -`return_documents`:: -(Optional, boolean) -For `cohere` service only. -Specify whether to return doc text within the results. - -`top_n`:: -(Optional, integer) -The number of most relevant documents to return, defaults to the number of the documents. - -===== -+ -.`task_settings` for the `text_embedding` task type -[%collapsible%closed] -===== - -`input_type`::: -(Optional, string) -For `cohere` service only. -Specifies the type of input passed to the model. -Valid values are: -* `classification`: use it for embeddings passed through a text classifier. -* `clusterning`: use it for the embeddings run through a clustering algorithm. -* `ingest`: use it for storing document embeddings in a vector database. -* `search`: use it for storing embeddings of search queries run against a vector database to find relevant documents. -+ -IMPORTANT: The `input_type` field is required when using embedding models `v3` and higher. - -`truncate`::: -(Optional, string) -For `cohere` service only. -Specifies how the API handles inputs longer than the maximum token length. -Defaults to `END`. -Valid values are: -* `NONE`: when the input exceeds the maximum input token length an error is returned. -* `START`: when the input exceeds the maximum input token length the start of the input is discarded. -* `END`: when the input exceeds the maximum input token length the end of the input is discarded. - -`user`::: -(optional, string) -For `openai`, `azureopenai` and `azureaistudio` services only. -Specifies the user issuing the request, which can be used for abuse detection. - -===== -[discrete] -[[put-inference-api-example]] -==== {api-examples-title} - -This section contains example API calls for every service type. - -[discrete] -[[inference-example-azureaistudio]] -===== Azure AI Studio service - -The following example shows how to create an {infer} endpoint called -`azure_ai_studio_embeddings` to perform a `text_embedding` task type. -Note that we do not specify a model here, as it is defined already via our Azure AI Studio deployment. - -The list of embeddings models that you can choose from in your deployment can be found in the https://ai.azure.com/explore/models?selectedTask=embeddings[Azure AI Studio model explorer]. - -[source,console] ------------------------------------------------------------- -PUT _inference/text_embedding/azure_ai_studio_embeddings -{ - "service": "azureaistudio", - "service_settings": { - "api_key": "", - "target": "", - "provider": "", - "endpoint_type": "" - } -} ------------------------------------------------------------- -// TEST[skip:TBD] - -The next example shows how to create an {infer} endpoint called -`azure_ai_studio_completion` to perform a `completion` task type. - -[source,console] ------------------------------------------------------------- -PUT _inference/completion/azure_ai_studio_completion -{ - "service": "azureaistudio", - "service_settings": { - "api_key": "", - "target": "", - "provider": "", - "endpoint_type": "" - } -} ------------------------------------------------------------- -// TEST[skip:TBD] - -The list of chat completion models that you can choose from in your deployment can be found in the https://ai.azure.com/explore/models?selectedTask=chat-completion[Azure AI Studio model explorer]. - -[discrete] -[[inference-example-azureopenai]] -===== Azure OpenAI service - -The following example shows how to create an {infer} endpoint called -`azure_openai_embeddings` to perform a `text_embedding` task type. -Note that we do not specify a model here, as it is defined already via our Azure OpenAI deployment. - -The list of embeddings models that you can choose from in your deployment can be found in the https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models#embeddings[Azure models documentation]. - -[source,console] ------------------------------------------------------------- -PUT _inference/text_embedding/azure_openai_embeddings -{ - "service": "azureopenai", - "service_settings": { - "api_key": "", - "resource_name": "", - "deployment_id": "", - "api_version": "2024-02-01" - } -} ------------------------------------------------------------- -// TEST[skip:TBD] - -The next example shows how to create an {infer} endpoint called -`azure_openai_completion` to perform a `completion` task type. - -[source,console] ------------------------------------------------------------- -PUT _inference/completion/azure_openai_completion -{ - "service": "azureopenai", - "service_settings": { - "api_key": "", - "resource_name": "", - "deployment_id": "", - "api_version": "2024-02-01" - } -} ------------------------------------------------------------- -// TEST[skip:TBD] - -The list of chat completion models that you can choose from in your Azure OpenAI deployment can be found at the following places: - -* https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models#gpt-4-and-gpt-4-turbo-models[GPT-4 and GPT-4 Turbo models] -* https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models#gpt-35[GPT-3.5] - -[discrete] -[[inference-example-cohere]] -===== Cohere service - -The following example shows how to create an {infer} endpoint called -`cohere-embeddings` to perform a `text_embedding` task type. - -[source,console] ------------------------------------------------------------- -PUT _inference/text_embedding/cohere-embeddings -{ - "service": "cohere", - "service_settings": { - "api_key": "", - "model_id": "embed-english-light-v3.0", - "embedding_type": "byte" - } -} ------------------------------------------------------------- -// TEST[skip:TBD] - - -The following example shows how to create an {infer} endpoint called -`cohere-rerank` to perform a `rerank` task type. - -[source,console] ------------------------------------------------------------- -PUT _inference/rerank/cohere-rerank -{ - "service": "cohere", - "service_settings": { - "api_key": "", - "model_id": "rerank-english-v3.0" - }, - "task_settings": { - "top_n": 10, - "return_documents": true - } -} ------------------------------------------------------------- -// TEST[skip:TBD] - -For more examples, also review the -https://docs.cohere.com/docs/elasticsearch-and-cohere#rerank-search-results-with-cohere-and-elasticsearch[Cohere documentation]. - -[discrete] -[[inference-example-e5]] -===== E5 via the `elasticsearch` service - -The following example shows how to create an {infer} endpoint called -`my-e5-model` to perform a `text_embedding` task type. - -[source,console] ------------------------------------------------------------- -PUT _inference/text_embedding/my-e5-model -{ - "service": "elasticsearch", - "service_settings": { - "num_allocations": 1, - "num_threads": 1, - "model_id": ".multilingual-e5-small" <1> - } -} ------------------------------------------------------------- -// TEST[skip:TBD] -<1> The `model_id` must be the ID of one of the built-in E5 models. -Valid values are `.multilingual-e5-small` and `.multilingual-e5-small_linux-x86_64`. -For further details, refer to the {ml-docs}/ml-nlp-e5.html[E5 model documentation]. - -[discrete] -[[inference-example-elser]] -===== ELSER service - -The following example shows how to create an {infer} endpoint called -`my-elser-model` to perform a `sparse_embedding` task type. -Refer to the {ml-docs}/ml-nlp-elser.html[ELSER model documentation] for more info. - -[source,console] ------------------------------------------------------------- -PUT _inference/sparse_embedding/my-elser-model -{ - "service": "elser", - "service_settings": { - "num_allocations": 1, - "num_threads": 1 - } -} ------------------------------------------------------------- -// TEST[skip:TBD] - - -Example response: - -[source,console-result] ------------------------------------------------------------- -{ - "inference_id": "my-elser-model", - "task_type": "sparse_embedding", - "service": "elser", - "service_settings": { - "num_allocations": 1, - "num_threads": 1 - }, - "task_settings": {} -} ------------------------------------------------------------- -// NOTCONSOLE - - -[discrete] -[[inference-example-googleaistudio]] -===== Google AI Studio service - -The following example shows how to create an {infer} endpoint called -`google_ai_studio_completion` to perform a `completion` task type. - -[source,console] ------------------------------------------------------------- -PUT _inference/completion/google_ai_studio_completion -{ - "service": "googleaistudio", - "service_settings": { - "api_key": "", - "model_id": "" - } -} ------------------------------------------------------------- -// TEST[skip:TBD] - - -[discrete] -[[inference-example-hugging-face]] -===== Hugging Face service - -The following example shows how to create an {infer} endpoint called -`hugging-face-embeddings` to perform a `text_embedding` task type. - -[source,console] ------------------------------------------------------------- -PUT _inference/text_embedding/hugging-face-embeddings -{ - "service": "hugging_face", - "service_settings": { - "api_key": "", <1> - "url": "" <2> - } -} ------------------------------------------------------------- -// TEST[skip:TBD] -<1> A valid Hugging Face access token. -You can find on the -https://huggingface.co/settings/tokens[settings page of your account]. -<2> The {infer} endpoint URL you created on Hugging Face. - -Create a new {infer} endpoint on -https://ui.endpoints.huggingface.co/[the Hugging Face endpoint page] to get an endpoint URL. -Select the model you want to use on the new endpoint creation page - for example `intfloat/e5-small-v2` - then select the `Sentence Embeddings` -task under the Advanced configuration section. -Create the endpoint. -Copy the URL after the endpoint initialization has been finished. - -[discrete] -[[inference-example-hugging-face-supported-models]] -The list of recommended models for the Hugging Face service: - -* https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2[all-MiniLM-L6-v2] -* https://huggingface.co/sentence-transformers/all-MiniLM-L12-v2[all-MiniLM-L12-v2] -* https://huggingface.co/sentence-transformers/all-mpnet-base-v2[all-mpnet-base-v2] -* https://huggingface.co/intfloat/e5-base-v2[e5-base-v2] -* https://huggingface.co/intfloat/e5-small-v2[e5-small-v2] -* https://huggingface.co/intfloat/multilingual-e5-base[multilingual-e5-base] -* https://huggingface.co/intfloat/multilingual-e5-small[multilingual-e5-small] - -[discrete] -[[inference-example-eland]] -===== Models uploaded by Eland via the elasticsearch service - -The following example shows how to create an {infer} endpoint called -`my-msmarco-minilm-model` to perform a `text_embedding` task type. - -[source,console] ------------------------------------------------------------- -PUT _inference/text_embedding/my-msmarco-minilm-model -{ - "service": "elasticsearch", - "service_settings": { - "num_allocations": 1, - "num_threads": 1, - "model_id": "msmarco-MiniLM-L12-cos-v5" <1> - } -} ------------------------------------------------------------- -// TEST[skip:TBD] -<1> The `model_id` must be the ID of a text embedding model which has already been -{ml-docs}/ml-nlp-import-model.html#ml-nlp-import-script[uploaded through Eland]. - -[discrete] -[[inference-example-mistral]] -===== Mistral Service - -The following example shows how to create an {infer} endpoint called -`mistral-embeddings-test` to perform a `text_embedding` task type. - -[source,console] ------------------------------------------------------------- -PUT _inference/text_embedding/mistral-embeddings-test -{ - "service": "mistral", - "service_settings": { - "api_key": "", - "model": "mistral-embed" <1> - } -} ------------------------------------------------------------- -// TEST[skip:TBD] -<1> The `model` must be the ID of a text embedding model which can be found in the -https://docs.mistral.ai/getting-started/models/[Mistral models documentation] - -[discrete] -[[inference-example-openai]] -===== OpenAI service - -The following example shows how to create an {infer} endpoint called -`openai-embeddings` to perform a `text_embedding` task type. -[source,console] ------------------------------------------------------------- -PUT _inference/text_embedding/openai-embeddings -{ - "service": "openai", - "service_settings": { - "api_key": "", - "model_id": "text-embedding-ada-002" - } -} ------------------------------------------------------------- -// TEST[skip:TBD] +The create {infer} API enables you to create an {infer} endpoint and configure a {ml} model to perform a specific {infer} task. -The next example shows how to create an {infer} endpoint called -`openai-completion` to perform a `completion` task type. +The following services are available through the {infer} API, click the links to review the configuration details of the services: -[source,console] ------------------------------------------------------------- -PUT _inference/completion/openai-completion -{ - "service": "openai", - "service_settings": { - "api_key": "", - "model_id": "gpt-3.5-turbo" - } -} ------------------------------------------------------------- -// TEST[skip:TBD] +* <> +* <> +* <> +* <> (for built-in models and models uploaded through Eland) +* <> +* <> +* <> +* <> +* <> diff --git a/docs/reference/inference/service-azure-ai-studio.asciidoc b/docs/reference/inference/service-azure-ai-studio.asciidoc new file mode 100644 index 0000000000000..b8f84b47d68a3 --- /dev/null +++ b/docs/reference/inference/service-azure-ai-studio.asciidoc @@ -0,0 +1,173 @@ +[[infer-service-azure-ai-studio]] +=== Azure AI studio {infer} service + +Creates an {infer} endpoint to perform an {infer} task with the `azureaistudio` service. + + +[discrete] +[[infer-service-azure-ai-studio-api-request]] +==== {api-request-title} + +`PUT /_inference//` + +[discrete] +[[infer-service-azure-ai-studio-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +include::inference-shared.asciidoc[tag=inference-id] + +``:: +(Required, string) +include::inference-shared.asciidoc[tag=task-type] ++ +-- +Available task types: + +* `completion`, +* `text_embedding`. +-- + +[discrete] +[[infer-service-azure-ai-studio-api-request-body]] +==== {api-request-body-title} + +`service`:: +(Required, string) +The type of service supported for the specified task type. In this case, +`azureaistudio`. + +`service_settings`:: +(Required, object) +include::inference-shared.asciidoc[tag=service-settings] ++ +-- +These settings are specific to the `azureaistudio` service. +-- + +`api_key`::: +(Required, string) +A valid API key of your Azure AI Studio model deployment. +This key can be found on the overview page for your deployment in the management section of your https://ai.azure.com/[Azure AI Studio] account. ++ +-- +include::inference-shared.asciidoc[tag=api-key-admonition] +-- + +`target`::: +(Required, string) +The target URL of your Azure AI Studio model deployment. +This can be found on the overview page for your deployment in the management section of your https://ai.azure.com/[Azure AI Studio] account. + +`provider`::: +(Required, string) +The model provider for your deployment. +Note that some providers may support only certain task types. +Supported providers include: + +* `cohere` - available for `text_embedding` and `completion` task types +* `databricks` - available for `completion` task type only +* `meta` - available for `completion` task type only +* `microsoft_phi` - available for `completion` task type only +* `mistral` - available for `completion` task type only +* `openai` - available for `text_embedding` and `completion` task types + +`endpoint_type`::: +(Required, string) +One of `token` or `realtime`. +Specifies the type of endpoint that is used in your model deployment. +There are https://learn.microsoft.com/en-us/azure/ai-studio/concepts/deployments-overview#billing-for-deploying-and-inferencing-llms-in-azure-ai-studio[two endpoint types available] for deployment through Azure AI Studio. +"Pay as you go" endpoints are billed per token. +For these, you must specify `token` for your `endpoint_type`. +For "real-time" endpoints which are billed per hour of usage, specify `realtime`. + +`rate_limit`::: +(Optional, object) +By default, the `azureaistudio` service sets the number of requests allowed per minute to `240`. +This helps to minimize the number of rate limit errors returned from Azure AI Studio. +To modify this, set the `requests_per_minute` setting of this object in your service settings: ++ +-- +include::inference-shared.asciidoc[tag=request-per-minute-example] +-- + +`task_settings`:: +(Optional, object) +include::inference-shared.asciidoc[tag=task-settings] ++ +.`task_settings` for the `completion` task type +[%collapsible%closed] +===== +`do_sample`::: +(Optional, float) +Instructs the inference process to perform sampling or not. +Has not affect unless `temperature` or `top_p` is specified. + +`max_new_tokens`::: +(Optional, integer) +Provides a hint for the maximum number of output tokens to be generated. +Defaults to 64. + +`temperature`::: +(Optional, float) +A number in the range of 0.0 to 2.0 that specifies the sampling temperature to use that controls the apparent creativity of generated completions. +Should not be used if `top_p` is specified. + +`top_p`::: +(Optional, float) +A number in the range of 0.0 to 2.0 that is an alternative value to temperature that causes the model to consider the results of the tokens with nucleus sampling probability. +Should not be used if `temperature` is specified. +===== ++ +.`task_settings` for the `text_embedding` task type +[%collapsible%closed] +===== +`user`::: +(optional, string) +Specifies the user issuing the request, which can be used for abuse detection. +===== + + +[discrete] +[[inference-example-azureaistudio]] +==== Azure AI Studio service example + +The following example shows how to create an {infer} endpoint called `azure_ai_studio_embeddings` to perform a `text_embedding` task type. +Note that we do not specify a model here, as it is defined already via our Azure AI Studio deployment. + +The list of embeddings models that you can choose from in your deployment can be found in the https://ai.azure.com/explore/models?selectedTask=embeddings[Azure AI Studio model explorer]. + +[source,console] +------------------------------------------------------------ +PUT _inference/text_embedding/azure_ai_studio_embeddings +{ + "service": "azureaistudio", + "service_settings": { + "api_key": "", + "target": "", + "provider": "", + "endpoint_type": "" + } +} +------------------------------------------------------------ +// TEST[skip:TBD] + +The next example shows how to create an {infer} endpoint called `azure_ai_studio_completion` to perform a `completion` task type. + +[source,console] +------------------------------------------------------------ +PUT _inference/completion/azure_ai_studio_completion +{ + "service": "azureaistudio", + "service_settings": { + "api_key": "", + "target": "", + "provider": "", + "endpoint_type": "" + } +} +------------------------------------------------------------ +// TEST[skip:TBD] + +The list of chat completion models that you can choose from in your deployment can be found in the https://ai.azure.com/explore/models?selectedTask=chat-completion[Azure AI Studio model explorer]. \ No newline at end of file diff --git a/docs/reference/inference/service-azure-openai.asciidoc b/docs/reference/inference/service-azure-openai.asciidoc new file mode 100644 index 0000000000000..6f03c5966d9e6 --- /dev/null +++ b/docs/reference/inference/service-azure-openai.asciidoc @@ -0,0 +1,156 @@ +[[infer-service-azure-openai]] +=== Azure OpenAI {infer} service + +Creates an {infer} endpoint to perform an {infer} task with the `azureopenai` service. + + +[discrete] +[[infer-service-azure-openai-api-request]] +==== {api-request-title} + +`PUT /_inference//` + +[discrete] +[[infer-service-azure-openai-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +include::inference-shared.asciidoc[tag=inference-id] + +``:: +(Required, string) +include::inference-shared.asciidoc[tag=task-type] ++ +-- +Available task types: + +* `completion`, +* `text_embedding`. +-- + +[discrete] +[[infer-service-azure-openai-api-request-body]] +==== {api-request-body-title} + +`service`:: +(Required, string) +The type of service supported for the specified task type. In this case, +`azureopenai`. + +`service_settings`:: +(Required, object) +include::inference-shared.asciidoc[tag=service-settings] ++ +-- +These settings are specific to the `azureopenai` service. +-- + +`api_key` or `entra_id`::: +(Required, string) +You must provide _either_ an API key or an Entra ID. +If you do not provide either, or provide both, you will receive an error when trying to create your model. +See the https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#authentication[Azure OpenAI Authentication documentation] for more details on these authentication types. ++ +-- +include::inference-shared.asciidoc[tag=api-key-admonition] +-- + +`resource_name`::: +(Required, string) +The name of your Azure OpenAI resource. +You can find this from the https://portal.azure.com/#view/HubsExtension/BrowseAll[list of resources] in the Azure Portal for your subscription. + +`deployment_id`::: +(Required, string) +The deployment name of your deployed models. +Your Azure OpenAI deployments can be found though the https://oai.azure.com/[Azure OpenAI Studio] portal that is linked to your subscription. + +`api_version`::: +(Required, string) +The Azure API version ID to use. +We recommend using the https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#embeddings[latest supported non-preview version]. + +`rate_limit`::: +(Optional, object) +The `azureopenai` service sets a default number of requests allowed per minute depending on the task type. +For `text_embedding` it is set to `1440`. +For `completion` it is set to `120`. +This helps to minimize the number of rate limit errors returned from Azure. +To modify this, set the `requests_per_minute` setting of this object in your service settings: ++ +-- +include::inference-shared.asciidoc[tag=request-per-minute-example] + +More information about the rate limits for Azure can be found in the https://learn.microsoft.com/en-us/azure/ai-services/openai/quotas-limits[Quota limits docs] and https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/quota?tabs=rest[How to change the quotas]. +-- + +`task_settings`:: +(Optional, object) +include::inference-shared.asciidoc[tag=task-settings] ++ +.`task_settings` for the `completion` task type +[%collapsible%closed] +===== +`user`::: +(optional, string) +Specifies the user issuing the request, which can be used for abuse detection. +===== ++ +.`task_settings` for the `text_embedding` task type +[%collapsible%closed] +===== +`user`::: +(optional, string) +Specifies the user issuing the request, which can be used for abuse detection. +===== + + + +[discrete] +[[inference-example-azure-openai]] +==== Azure OpenAI service example + +The following example shows how to create an {infer} endpoint called +`azure_openai_embeddings` to perform a `text_embedding` task type. +Note that we do not specify a model here, as it is defined already via our Azure OpenAI deployment. + +The list of embeddings models that you can choose from in your deployment can be found in the https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models#embeddings[Azure models documentation]. + +[source,console] +------------------------------------------------------------ +PUT _inference/text_embedding/azure_openai_embeddings +{ + "service": "azureopenai", + "service_settings": { + "api_key": "", + "resource_name": "", + "deployment_id": "", + "api_version": "2024-02-01" + } +} +------------------------------------------------------------ +// TEST[skip:TBD] + +The next example shows how to create an {infer} endpoint called +`azure_openai_completion` to perform a `completion` task type. + +[source,console] +------------------------------------------------------------ +PUT _inference/completion/azure_openai_completion +{ + "service": "azureopenai", + "service_settings": { + "api_key": "", + "resource_name": "", + "deployment_id": "", + "api_version": "2024-02-01" + } +} +------------------------------------------------------------ +// TEST[skip:TBD] + +The list of chat completion models that you can choose from in your Azure OpenAI deployment can be found at the following places: + +* https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models#gpt-4-and-gpt-4-turbo-models[GPT-4 and GPT-4 Turbo models] +* https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models#gpt-35[GPT-3.5] \ No newline at end of file diff --git a/docs/reference/inference/service-cohere.asciidoc b/docs/reference/inference/service-cohere.asciidoc new file mode 100644 index 0000000000000..52d71e0bc02a5 --- /dev/null +++ b/docs/reference/inference/service-cohere.asciidoc @@ -0,0 +1,204 @@ +[[infer-service-cohere]] +=== Cohere {infer} service + +Creates an {infer} endpoint to perform an {infer} task with the `cohere` service. + + +[discrete] +[[infer-service-cohere-api-request]] +==== {api-request-title} + +`PUT /_inference//` + +[discrete] +[[infer-service-cohere-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +include::inference-shared.asciidoc[tag=inference-id] + +``:: +(Required, string) +include::inference-shared.asciidoc[tag=task-type] ++ +-- +Available task types: + +* `completion`, +* `rerank`, +* `text_embedding`. +-- + +[discrete] +[[infer-service-cohere-api-request-body]] +==== {api-request-body-title} + +`service`:: +(Required, string) +The type of service supported for the specified task type. In this case, +`cohere`. + +`service_settings`:: +(Required, object) +include::inference-shared.asciidoc[tag=service-settings] ++ +-- +These settings are specific to the `cohere` service. +-- + +`api_key`::: +(Required, string) +A valid API key of your Cohere account. +You can find your Cohere API keys or you can create a new one +https://dashboard.cohere.com/api-keys[on the API keys settings page]. ++ +-- +include::inference-shared.asciidoc[tag=api-key-admonition] +-- + +`rate_limit`::: +(Optional, object) +By default, the `cohere` service sets the number of requests allowed per minute to `10000`. +This value is the same for all task types. +This helps to minimize the number of rate limit errors returned from Cohere. +To modify this, set the `requests_per_minute` setting of this object in your service settings: ++ +-- +include::inference-shared.asciidoc[tag=request-per-minute-example] + +More information about Cohere's rate limits can be found in https://docs.cohere.com/docs/going-live#production-key-specifications[Cohere's production key docs]. +-- ++ +.`service_settings` for the `completion` task type +[%collapsible%closed] +===== +`model_id`:: +(Optional, string) +The name of the model to use for the {infer} task. +To review the available `completion` models, refer to the +https://docs.cohere.com/docs/models#command[Cohere docs]. +===== ++ +.`service_settings` for the `rerank` task type +[%collapsible%closed] +===== +`model_id`:: +(Optional, string) +The name of the model to use for the {infer} task. +To review the available `rerank` models, refer to the +https://docs.cohere.com/reference/rerank-1[Cohere docs]. +===== ++ +.`service_settings` for the `text_embedding` task type +[%collapsible%closed] +===== +`embedding_type`::: +(Optional, string) +Specifies the types of embeddings you want to get back. +Defaults to `float`. +Valid values are: +* `byte`: use it for signed int8 embeddings (this is a synonym of `int8`). +* `float`: use it for the default float embeddings. +* `int8`: use it for signed int8 embeddings. + +`model_id`::: +(Optional, string) +The name of the model to use for the {infer} task. +To review the available `text_embedding` models, refer to the +https://docs.cohere.com/reference/embed[Cohere docs]. +The default value for `text_embedding` is `embed-english-v2.0`. + +`similarity`::: +(Optional, string) +Similarity measure. One of `cosine`, `dot_product`, `l2_norm`. +Defaults based on the `embedding_type` (`float` -> `dot_product`, `int8/byte` -> `cosine`). +===== + + + +`task_settings`:: +(Optional, object) +include::inference-shared.asciidoc[tag=task-settings] ++ +.`task_settings` for the `rerank` task type +[%collapsible%closed] +===== +`return_documents`:: +(Optional, boolean) +Specify whether to return doc text within the results. + +`top_n`:: +(Optional, integer) +The number of most relevant documents to return, defaults to the number of the documents. +===== ++ +.`task_settings` for the `text_embedding` task type +[%collapsible%closed] +===== +`input_type`::: +(Optional, string) +Specifies the type of input passed to the model. +Valid values are: +* `classification`: use it for embeddings passed through a text classifier. +* `clusterning`: use it for the embeddings run through a clustering algorithm. +* `ingest`: use it for storing document embeddings in a vector database. +* `search`: use it for storing embeddings of search queries run against a vector database to find relevant documents. ++ +IMPORTANT: The `input_type` field is required when using embedding models `v3` and higher. + +`truncate`::: +(Optional, string) +Specifies how the API handles inputs longer than the maximum token length. +Defaults to `END`. +Valid values are: +* `NONE`: when the input exceeds the maximum input token length an error is returned. +* `START`: when the input exceeds the maximum input token length the start of the input is discarded. +* `END`: when the input exceeds the maximum input token length the end of the input is discarded. +===== + + +[discrete] +[[inference-example-cohere]] +==== Cohere service examples + +The following example shows how to create an {infer} endpoint called +`cohere-embeddings` to perform a `text_embedding` task type. + +[source,console] +------------------------------------------------------------ +PUT _inference/text_embedding/cohere-embeddings +{ + "service": "cohere", + "service_settings": { + "api_key": "", + "model_id": "embed-english-light-v3.0", + "embedding_type": "byte" + } +} +------------------------------------------------------------ +// TEST[skip:TBD] + + +The following example shows how to create an {infer} endpoint called +`cohere-rerank` to perform a `rerank` task type. + +[source,console] +------------------------------------------------------------ +PUT _inference/rerank/cohere-rerank +{ + "service": "cohere", + "service_settings": { + "api_key": "", + "model_id": "rerank-english-v3.0" + }, + "task_settings": { + "top_n": 10, + "return_documents": true + } +} +------------------------------------------------------------ +// TEST[skip:TBD] + +For more examples, also review the +https://docs.cohere.com/docs/elasticsearch-and-cohere#rerank-search-results-with-cohere-and-elasticsearch[Cohere documentation]. \ No newline at end of file diff --git a/docs/reference/inference/service-elasticsearch.asciidoc b/docs/reference/inference/service-elasticsearch.asciidoc new file mode 100644 index 0000000000000..3b9b5b1928d7b --- /dev/null +++ b/docs/reference/inference/service-elasticsearch.asciidoc @@ -0,0 +1,122 @@ +[[infer-service-elasticsearch]] +=== Elasticsearch {infer} service + +Creates an {infer} endpoint to perform an {infer} task with the `elasticsearch` service. + + +[discrete] +[[infer-service-elasticsearch-api-request]] +==== {api-request-title} + +`PUT /_inference//` + +[discrete] +[[infer-service-elasticsearch-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +include::inference-shared.asciidoc[tag=inference-id] + +``:: +(Required, string) +include::inference-shared.asciidoc[tag=task-type] ++ +-- +Available task types: + +* `rerank`, +* `text_embedding`. +-- + +[discrete] +[[infer-service-elasticsearch-api-request-body]] +==== {api-request-body-title} + +`service`:: +(Required, string) +The type of service supported for the specified task type. In this case, +`elasticsearch`. + +`service_settings`:: +(Required, object) +include::inference-shared.asciidoc[tag=service-settings] ++ +-- +These settings are specific to the `elasticsearch` service. +-- + +`model_id`::: +(Required, string) +The name of the model to use for the {infer} task. +It can be the ID of either a built-in model (for example, `.multilingual-e5-small` for E5) or a text embedding model already +{ml-docs}/ml-nlp-import-model.html#ml-nlp-import-script[uploaded through Eland]. + +`num_allocations`::: +(Required, integer) +The total number of allocations this model is assigned across machine learning nodes. Increasing this value generally increases the throughput. + +`num_threads`::: +(Required, integer) +Sets the number of threads used by each model allocation during inference. This generally increases the speed per inference request. The inference process is a compute-bound process; `threads_per_allocations` must not exceed the number of available allocated processors per node. +Must be a power of 2. Max allowed value is 32. + +`task_settings`:: +(Optional, object) +include::inference-shared.asciidoc[tag=task-settings] ++ +.`task_settings` for the `rerank` task type +[%collapsible%closed] +===== +`return_documents`::: +(Optional, Boolean) +Returns the document instead of only the index. Defaults to `true`. +===== + + +[discrete] +[[inference-example-elasticsearch]] +==== E5 via the `elasticsearch` service + +The following example shows how to create an {infer} endpoint called +`my-e5-model` to perform a `text_embedding` task type. + +[source,console] +------------------------------------------------------------ +PUT _inference/text_embedding/my-e5-model +{ + "service": "elasticsearch", + "service_settings": { + "num_allocations": 1, + "num_threads": 1, + "model_id": ".multilingual-e5-small" <1> + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +<1> The `model_id` must be the ID of one of the built-in E5 models. +Valid values are `.multilingual-e5-small` and `.multilingual-e5-small_linux-x86_64`. +For further details, refer to the {ml-docs}/ml-nlp-e5.html[E5 model documentation]. + +[discrete] +[[inference-example-eland]] +==== Models uploaded by Eland via the elasticsearch service + +The following example shows how to create an {infer} endpoint called +`my-msmarco-minilm-model` to perform a `text_embedding` task type. + +[source,console] +------------------------------------------------------------ +PUT _inference/text_embedding/my-msmarco-minilm-model +{ + "service": "elasticsearch", + "service_settings": { + "num_allocations": 1, + "num_threads": 1, + "model_id": "msmarco-MiniLM-L12-cos-v5" <1> + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +<1> The `model_id` must be the ID of a text embedding model which has already been +{ml-docs}/ml-nlp-import-model.html#ml-nlp-import-script[uploaded through Eland]. \ No newline at end of file diff --git a/docs/reference/inference/service-elser.asciidoc b/docs/reference/inference/service-elser.asciidoc new file mode 100644 index 0000000000000..829ff4968c5be --- /dev/null +++ b/docs/reference/inference/service-elser.asciidoc @@ -0,0 +1,95 @@ +[[infer-service-elser]] +=== ELSER {infer} service + +Creates an {infer} endpoint to perform an {infer} task with the `elser` service. + + +[discrete] +[[infer-service-elser-api-request]] +==== {api-request-title} + +`PUT /_inference//` + +[discrete] +[[infer-service-elser-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +include::inference-shared.asciidoc[tag=inference-id] + +``:: +(Required, string) +include::inference-shared.asciidoc[tag=task-type] ++ +-- +Available task types: + +* `sparse_embedding`. +-- + +[discrete] +[[infer-service-elser-api-request-body]] +==== {api-request-body-title} + +`service`:: +(Required, string) +The type of service supported for the specified task type. In this case, +`elser`. + +`service_settings`:: +(Required, object) +include::inference-shared.asciidoc[tag=service-settings] ++ +-- +These settings are specific to the `elser` service. +-- + +`num_allocations`::: +(Required, integer) +The total number of allocations this model is assigned across machine learning nodes. Increasing this value generally increases the throughput. + +`num_threads`::: +(Required, integer) +Sets the number of threads used by each model allocation during inference. This generally increases the speed per inference request. The inference process is a compute-bound process; `threads_per_allocations` must not exceed the number of available allocated processors per node. +Must be a power of 2. Max allowed value is 32. + + +[discrete] +[[inference-example-elser]] +==== ELSER service example + +The following example shows how to create an {infer} endpoint called +`my-elser-model` to perform a `sparse_embedding` task type. +Refer to the {ml-docs}/ml-nlp-elser.html[ELSER model documentation] for more info. + +[source,console] +------------------------------------------------------------ +PUT _inference/sparse_embedding/my-elser-model +{ + "service": "elser", + "service_settings": { + "num_allocations": 1, + "num_threads": 1 + } +} +------------------------------------------------------------ +// TEST[skip:TBD] + + +Example response: + +[source,console-result] +------------------------------------------------------------ +{ + "inference_id": "my-elser-model", + "task_type": "sparse_embedding", + "service": "elser", + "service_settings": { + "num_allocations": 1, + "num_threads": 1 + }, + "task_settings": {} +} +------------------------------------------------------------ +// NOTCONSOLE \ No newline at end of file diff --git a/docs/reference/inference/service-google-ai-studio.asciidoc b/docs/reference/inference/service-google-ai-studio.asciidoc new file mode 100644 index 0000000000000..25aa89cd49110 --- /dev/null +++ b/docs/reference/inference/service-google-ai-studio.asciidoc @@ -0,0 +1,87 @@ +[[infer-service-google-ai-studio]] +=== Google AI Studio {infer} service + +Creates an {infer} endpoint to perform an {infer} task with the `googleaistudio` service. + + +[discrete] +[[infer-service-google-ai-studio-api-request]] +==== {api-request-title} + +`PUT /_inference//` + +[discrete] +[[infer-service-google-ai-studio-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +include::inference-shared.asciidoc[tag=inference-id] + +``:: +(Required, string) +include::inference-shared.asciidoc[tag=task-type] ++ +-- +Available task types: + +* `completion`, +* `text_embedding`. +-- + +[discrete] +[[infer-service-google-ai-studio-api-request-body]] +==== {api-request-body-title} + +`service`:: +(Required, string) +The type of service supported for the specified task type. In this case, +`googleaistudio`. + +`service_settings`:: +(Required, object) +include::inference-shared.asciidoc[tag=service-settings] ++ +-- +These settings are specific to the `googleaistudio` service. +-- + +`api_key`::: +(Required, string) +A valid API key for the Google Gemini API. + +`model_id`::: +(Required, string) +The name of the model to use for the {infer} task. +You can find the supported models at https://ai.google.dev/gemini-api/docs/models/gemini[Gemini API models]. + +`rate_limit`::: +(Optional, object) +By default, the `googleaistudio` service sets the number of requests allowed per minute to `360`. +This helps to minimize the number of rate limit errors returned from Google AI Studio. +To modify this, set the `requests_per_minute` setting of this object in your service settings: ++ +-- +include::inference-shared.asciidoc[tag=request-per-minute-example] +-- + + +[discrete] +[[inference-example-google-ai-studio]] +==== Google AI Studio service example + +The following example shows how to create an {infer} endpoint called +`google_ai_studio_completion` to perform a `completion` task type. + +[source,console] +------------------------------------------------------------ +PUT _inference/completion/google_ai_studio_completion +{ + "service": "googleaistudio", + "service_settings": { + "api_key": "", + "model_id": "" + } +} +------------------------------------------------------------ +// TEST[skip:TBD] \ No newline at end of file diff --git a/docs/reference/inference/service-hugging-face.asciidoc b/docs/reference/inference/service-hugging-face.asciidoc new file mode 100644 index 0000000000000..177a15177d21f --- /dev/null +++ b/docs/reference/inference/service-hugging-face.asciidoc @@ -0,0 +1,114 @@ +[[infer-service-hugging-face]] +=== HuggingFace {infer} service + +Creates an {infer} endpoint to perform an {infer} task with the `hugging_face` service. + + +[discrete] +[[infer-service-hugging-face-api-request]] +==== {api-request-title} + +`PUT /_inference//` + +[discrete] +[[infer-service-hugging-face-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +include::inference-shared.asciidoc[tag=inference-id] + +``:: +(Required, string) +include::inference-shared.asciidoc[tag=task-type] ++ +-- +Available task types: + +* `text_embedding`. +-- + +[discrete] +[[infer-service-hugging-face-api-request-body]] +==== {api-request-body-title} + +`service`:: +(Required, string) +The type of service supported for the specified task type. In this case, +`hugging_face`. + +`service_settings`:: +(Required, object) +include::inference-shared.asciidoc[tag=service-settings] ++ +-- +These settings are specific to the `hugging_face` service. +-- + +`api_key`::: +(Required, string) +A valid access token of your Hugging Face account. +You can find your Hugging Face access tokens or you can create a new one +https://huggingface.co/settings/tokens[on the settings page]. ++ +-- +include::inference-shared.asciidoc[tag=api-key-admonition] +-- + +`url`::: +(Required, string) +The URL endpoint to use for the requests. + +`rate_limit`::: +(Optional, object) +By default, the `huggingface` service sets the number of requests allowed per minute to `3000`. +This helps to minimize the number of rate limit errors returned from Hugging Face. +To modify this, set the `requests_per_minute` setting of this object in your service settings: ++ +-- +include::inference-shared.asciidoc[tag=request-per-minute-example] +-- + + +[discrete] +[[inference-example-hugging-face]] +==== Hugging Face service example + +The following example shows how to create an {infer} endpoint called +`hugging-face-embeddings` to perform a `text_embedding` task type. + +[source,console] +------------------------------------------------------------ +PUT _inference/text_embedding/hugging-face-embeddings +{ + "service": "hugging_face", + "service_settings": { + "api_key": "", <1> + "url": "" <2> + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +<1> A valid Hugging Face access token. +You can find on the +https://huggingface.co/settings/tokens[settings page of your account]. +<2> The {infer} endpoint URL you created on Hugging Face. + +Create a new {infer} endpoint on +https://ui.endpoints.huggingface.co/[the Hugging Face endpoint page] to get an endpoint URL. +Select the model you want to use on the new endpoint creation page - for example `intfloat/e5-small-v2` - then select the `Sentence Embeddings` +task under the Advanced configuration section. +Create the endpoint. +Copy the URL after the endpoint initialization has been finished. + +[discrete] +[[inference-example-hugging-face-supported-models]] +The list of recommended models for the Hugging Face service: + +* https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2[all-MiniLM-L6-v2] +* https://huggingface.co/sentence-transformers/all-MiniLM-L12-v2[all-MiniLM-L12-v2] +* https://huggingface.co/sentence-transformers/all-mpnet-base-v2[all-mpnet-base-v2] +* https://huggingface.co/intfloat/e5-base-v2[e5-base-v2] +* https://huggingface.co/intfloat/e5-small-v2[e5-small-v2] +* https://huggingface.co/intfloat/multilingual-e5-base[multilingual-e5-base] +* https://huggingface.co/intfloat/multilingual-e5-small[multilingual-e5-small] \ No newline at end of file diff --git a/docs/reference/inference/service-mistral.asciidoc b/docs/reference/inference/service-mistral.asciidoc new file mode 100644 index 0000000000000..077e610191705 --- /dev/null +++ b/docs/reference/inference/service-mistral.asciidoc @@ -0,0 +1,99 @@ +[[infer-service-mistral]] +=== Mistral {infer} service + +Creates an {infer} endpoint to perform an {infer} task with the `mistral` service. + + +[discrete] +[[infer-service-mistral-api-request]] +==== {api-request-title} + +`PUT /_inference//` + +[discrete] +[[infer-service-mistral-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +include::inference-shared.asciidoc[tag=inference-id] + +``:: +(Required, string) +include::inference-shared.asciidoc[tag=task-type] ++ +-- +Available task types: + +* `text_embedding`. +-- + +[discrete] +[[infer-service-mistral-api-request-body]] +==== {api-request-body-title} + +`service`:: +(Required, string) +The type of service supported for the specified task type. In this case, +`mistral`. + +`service_settings`:: +(Required, object) +include::inference-shared.asciidoc[tag=service-settings] ++ +-- +These settings are specific to the `mistral` service. +-- + +`api_key`::: +(Required, string) +A valid API key for your Mistral account. +You can find your Mistral API keys or you can create a new one +https://console.mistral.ai/api-keys/[on the API Keys page]. ++ +-- +include::inference-shared.asciidoc[tag=api-key-admonition] +-- + +`model`::: +(Required, string) +The name of the model to use for the {infer} task. +Refer to the https://docs.mistral.ai/getting-started/models/[Mistral models documentation] +for the list of available text embedding models. + +`max_input_tokens`::: +(Optional, integer) +Allows you to specify the maximum number of tokens per input before chunking occurs. + +`rate_limit`::: +(Optional, object) +By default, the `mistral` service sets the number of requests allowed per minute to `240`. +This helps to minimize the number of rate limit errors returned from the Mistral API. +To modify this, set the `requests_per_minute` setting of this object in your service settings: ++ +-- +include::inference-shared.asciidoc[tag=request-per-minute-example] +-- + + +[discrete] +[[inference-example-mistral]] +==== Mistral service example + +The following example shows how to create an {infer} endpoint called +`mistral-embeddings-test` to perform a `text_embedding` task type. + +[source,console] +------------------------------------------------------------ +PUT _inference/text_embedding/mistral-embeddings-test +{ + "service": "mistral", + "service_settings": { + "api_key": "", + "model": "mistral-embed" <1> + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +<1> The `model` must be the ID of a text embedding model which can be found in the +https://docs.mistral.ai/getting-started/models/[Mistral models documentation]. \ No newline at end of file diff --git a/docs/reference/inference/service-openai.asciidoc b/docs/reference/inference/service-openai.asciidoc new file mode 100644 index 0000000000000..075e76dc7d741 --- /dev/null +++ b/docs/reference/inference/service-openai.asciidoc @@ -0,0 +1,147 @@ +[[infer-service-openai]] +=== OpenAI {infer} service + +Creates an {infer} endpoint to perform an {infer} task with the `openai` service. + + +[discrete] +[[infer-service-openai-api-request]] +==== {api-request-title} + +`PUT /_inference//` + +[discrete] +[[infer-service-openai-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +include::inference-shared.asciidoc[tag=inference-id] + +``:: +(Required, string) +include::inference-shared.asciidoc[tag=task-type] ++ +-- +Available task types: + +* `completion`, +* `text_embedding`. +-- + +[discrete] +[[infer-service-openai-api-request-body]] +==== {api-request-body-title} + +`service`:: +(Required, string) +The type of service supported for the specified task type. In this case, +`openai`. + +`service_settings`:: +(Required, object) +include::inference-shared.asciidoc[tag=service-settings] ++ +-- +These settings are specific to the `openai` service. +-- + +`api_key`::: +(Required, string) +A valid API key of your OpenAI account. +You can find your OpenAI API keys in your OpenAI account under the +https://platform.openai.com/api-keys[API keys section]. ++ +-- +include::inference-shared.asciidoc[tag=api-key-admonition] +-- + +`model_id`::: +(Required, string) +The name of the model to use for the {infer} task. +Refer to the +https://platform.openai.com/docs/guides/embeddings/what-are-embeddings[OpenAI documentation] +for the list of available text embedding models. + +`organization_id`::: +(Optional, string) +The unique identifier of your organization. +You can find the Organization ID in your OpenAI account under +https://platform.openai.com/account/organization[**Settings** > **Organizations**]. + +`url`::: +(Optional, string) +The URL endpoint to use for the requests. +Can be changed for testing purposes. +Defaults to `https://api.openai.com/v1/embeddings`. + +`rate_limit`::: +(Optional, object) +The `openai` service sets a default number of requests allowed per minute depending on the task type. +For `text_embedding` it is set to `3000`. +For `completion` it is set to `500`. +This helps to minimize the number of rate limit errors returned from OpenAI. +To modify this, set the `requests_per_minute` setting of this object in your service settings: ++ +-- +include::inference-shared.asciidoc[tag=request-per-minute-example] + +More information about the rate limits for OpenAI can be found in your https://platform.openai.com/account/limits[Account limits]. +-- + +`task_settings`:: +(Optional, object) +include::inference-shared.asciidoc[tag=task-settings] ++ +.`task_settings` for the `completion` task type +[%collapsible%closed] +===== +`user`::: +(Optional, string) +Specifies the user issuing the request, which can be used for abuse detection. +===== ++ +.`task_settings` for the `text_embedding` task type +[%collapsible%closed] +===== +`user`::: +(optional, string) +Specifies the user issuing the request, which can be used for abuse detection. +===== + + +[discrete] +[[inference-example-openai]] +==== OpenAI service example + +The following example shows how to create an {infer} endpoint called +`openai-embeddings` to perform a `text_embedding` task type. + +[source,console] +------------------------------------------------------------ +PUT _inference/text_embedding/openai-embeddings +{ + "service": "openai", + "service_settings": { + "api_key": "", + "model_id": "text-embedding-ada-002" + } +} +------------------------------------------------------------ +// TEST[skip:TBD] + +The next example shows how to create an {infer} endpoint called +`openai-completion` to perform a `completion` task type. + +[source,console] +------------------------------------------------------------ +PUT _inference/completion/openai-completion +{ + "service": "openai", + "service_settings": { + "api_key": "", + "model_id": "gpt-3.5-turbo" + } +} +------------------------------------------------------------ +// TEST[skip:TBD] \ No newline at end of file From 35eae4029a0e77642580d2aa46db0d1f079bdbbc Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Mon, 1 Jul 2024 10:13:48 +0200 Subject: [PATCH 070/216] Fix typo in get-inference docs (retrives -> retrieves) (#110320) --- docs/reference/inference/get-inference.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/inference/get-inference.asciidoc b/docs/reference/inference/get-inference.asciidoc index 339146adfece9..0b45ad92322f6 100644 --- a/docs/reference/inference/get-inference.asciidoc +++ b/docs/reference/inference/get-inference.asciidoc @@ -65,7 +65,7 @@ The type of {infer} task that the model performs. [[get-inference-api-example]] ==== {api-examples-title} -The following API call retrives information about the `my-elser-model` {infer} +The following API call retrieves information about the `my-elser-model` {infer} model that can perform `sparse_embedding` tasks. From 42502fe7778223ad1a5bff378f7552e9e4009db8 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 1 Jul 2024 10:40:52 +0200 Subject: [PATCH 071/216] Avoid bucket copies in Aggregations (#110261) Motivated by heap dumps for aggregations often containing mostly duplicate bucket instances. Note that This is a start, but there's a lot of duplication we can remove in follow-ups. But for this one, there's simply a lot of straight forward spots where we can avoid copying a bucket/list-of-buckets and derived instances. --- .../aggregations/InternalAggregations.java | 31 ++++++++++++++++--- .../InternalMultiBucketAggregation.java | 29 ++++++++++++----- .../AbstractHistogramAggregator.java | 3 ++ .../histogram/DateHistogramAggregator.java | 3 ++ .../bucket/histogram/InternalHistogram.java | 18 ++++++----- 5 files changed, 66 insertions(+), 18 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java index 4f234c33b13a6..297bb81b27b25 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java @@ -226,11 +226,34 @@ public static InternalAggregations topLevelReduce(List agg } if (context.isFinalReduce()) { List reducedInternalAggs = reduced.getInternalAggregations(); - reducedInternalAggs = reducedInternalAggs.stream() - .map(agg -> agg.reducePipelines(agg, context, context.pipelineTreeRoot().subTree(agg.getName()))) - .collect(Collectors.toCollection(ArrayList::new)); + List internalAggregations = null; + for (int i = 0; i < reducedInternalAggs.size(); i++) { + InternalAggregation agg = reducedInternalAggs.get(i); + InternalAggregation internalAggregation = agg.reducePipelines( + agg, + context, + context.pipelineTreeRoot().subTree(agg.getName()) + ); + if (internalAggregation.equals(agg) == false) { + if (internalAggregations == null) { + internalAggregations = new ArrayList<>(reducedInternalAggs); + } + internalAggregations.set(i, internalAggregation); + } + } - for (PipelineAggregator pipelineAggregator : context.pipelineTreeRoot().aggregators()) { + var pipelineAggregators = context.pipelineTreeRoot().aggregators(); + if (pipelineAggregators.isEmpty()) { + if (internalAggregations == null) { + return reduced; + } + return from(internalAggregations); + } + if (internalAggregations != null) { + reducedInternalAggs = internalAggregations; + } + reducedInternalAggs = new ArrayList<>(reducedInternalAggs); + for (PipelineAggregator pipelineAggregator : pipelineAggregators) { SiblingPipelineAggregator sib = (SiblingPipelineAggregator) pipelineAggregator; InternalAggregation newAgg = sib.doReduce(from(reducedInternalAggs), context); reducedInternalAggs.add(newAgg); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java index de19c26daff92..e046b5fc9244c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java @@ -207,16 +207,31 @@ public void forEachBucket(Consumer consumer) { } private List reducePipelineBuckets(AggregationReduceContext reduceContext, PipelineTree pipelineTree) { - List reducedBuckets = new ArrayList<>(); - for (B bucket : getBuckets()) { - List aggs = new ArrayList<>(); - for (Aggregation agg : bucket.getAggregations()) { + List reducedBuckets = null; + var buckets = getBuckets(); + for (int bucketIndex = 0; bucketIndex < buckets.size(); bucketIndex++) { + B bucket = buckets.get(bucketIndex); + List aggs = null; + int aggIndex = 0; + for (InternalAggregation agg : bucket.getAggregations()) { PipelineTree subTree = pipelineTree.subTree(agg.getName()); - aggs.add(((InternalAggregation) agg).reducePipelines((InternalAggregation) agg, reduceContext, subTree)); + var reduced = agg.reducePipelines(agg, reduceContext, subTree); + if (reduced.equals(agg) == false) { + if (aggs == null) { + aggs = bucket.getAggregations().copyResults(); + } + aggs.set(aggIndex, reduced); + } + aggIndex++; + } + if (aggs != null) { + if (reducedBuckets == null) { + reducedBuckets = new ArrayList<>(buckets); + } + reducedBuckets.set(bucketIndex, createBucket(InternalAggregations.from(aggs), bucket)); } - reducedBuckets.add(createBucket(InternalAggregations.from(aggs), bucket)); } - return reducedBuckets; + return reducedBuckets == null ? buckets : reducedBuckets; } public abstract static class InternalBucket implements Bucket, Writeable { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java index 62b7a0747ca00..04028de5656ca 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java @@ -84,6 +84,9 @@ public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws I double key = roundKey * interval + offset; return new InternalHistogram.Bucket(key, docCount, keyed, formatter, subAggregationResults); }, (owningBucketOrd, buckets) -> { + if (buckets.isEmpty()) { + return buildEmptyAggregation(); + } // the contract of the histogram aggregation is that shards must return buckets ordered by key in ascending order CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java index 2c57bd4b38a04..cb01aa5a31a9a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java @@ -340,6 +340,9 @@ public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws I return buildAggregationsForVariableBuckets(owningBucketOrds, bucketOrds, (bucketValue, docCount, subAggregationResults) -> { return new InternalDateHistogram.Bucket(bucketValue, docCount, keyed, formatter, subAggregationResults); }, (owningBucketOrd, buckets) -> { + if (buckets.isEmpty()) { + return buildEmptyAggregation(); + } // the contract of the histogram aggregation is that shards must return buckets ordered by key in ascending order CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java index 098bd5ebc7b3d..b44d8fec4030a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java @@ -282,11 +282,17 @@ BucketOrder getOrder() { @Override public InternalHistogram create(List buckets) { + if (this.buckets.equals(buckets)) { + return this; + } return new InternalHistogram(name, buckets, order, minDocCount, emptyBucketInfo, format, keyed, metadata); } @Override public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { + if (prototype.aggregations.equals(aggregations)) { + return prototype; + } return new Bucket(prototype.key, prototype.docCount, prototype.keyed, prototype.format, aggregations); } @@ -475,6 +481,9 @@ public InternalAggregation get() { CollectionUtil.introSort(reducedBuckets, order.comparator()); } } + if (reducedBuckets.equals(buckets)) { + return InternalHistogram.this; + } return new InternalHistogram(getName(), reducedBuckets, order, minDocCount, emptyBucketInfo, format, keyed, getMetadata()); } }; @@ -520,14 +529,9 @@ public Number getKey(MultiBucketsAggregation.Bucket bucket) { } @Override + @SuppressWarnings({ "rawtypes", "unchecked" }) public InternalAggregation createAggregation(List buckets) { - // convert buckets to the right type - List buckets2 = new ArrayList<>(buckets.size()); - for (Object b : buckets) { - buckets2.add((Bucket) b); - } - buckets2 = Collections.unmodifiableList(buckets2); - return new InternalHistogram(name, buckets2, order, minDocCount, emptyBucketInfo, format, keyed, getMetadata()); + return new InternalHistogram(name, (List) buckets, order, minDocCount, emptyBucketInfo, format, keyed, getMetadata()); } @Override From 5fa92812cff5929d5267e079cf3a3c80aa311901 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Mon, 1 Jul 2024 12:08:04 +0300 Subject: [PATCH 072/216] Add test for nested array, fix sort on nested test. (#110325) --- .../indices.create/20_synthetic_source.yml | 49 +++++++++++++++++++ .../test/indices.sort/20_nested.yml | 1 + 2 files changed, 50 insertions(+) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml index dcd1f93e35da8..22deb7012c4ed 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/20_synthetic_source.yml @@ -1155,3 +1155,52 @@ doubly nested object: - match: { hits.hits.2._source.nested_field.1.sub_nested_field.1.number: 42 } - length: { hits.hits.3._source: 1 } - match: { hits.hits.3._source.id: 3 } + + +--- +nested object with stored array: + - requires: + cluster_features: ["mapper.track_ignored_source"] + reason: requires tracking ignored source + + - do: + indices.create: + index: test + body: + mappings: + _source: + mode: synthetic + properties: + name: + type: keyword + nested_array_regular: + type: nested + nested_array_stored: + type: nested + store_array_source: true + + - do: + bulk: + index: test + refresh: true + body: + - '{ "create": { } }' + - '{ "name": "A", "nested_array_regular": [ { "b": [ { "c": 10 }, { "c": 100 } ] }, { "b": [ { "c": 20 }, { "c": 200 } ] } ] }' + - '{ "create": { } }' + - '{ "name": "B", "nested_array_stored": [ { "b": [ { "c": 10 }, { "c": 100 } ] }, { "b": [ { "c": 20 }, { "c": 200 } ] } ] }' + + - match: { errors: false } + + - do: + search: + index: test + sort: name + - match: { hits.total.value: 2 } + - match: { hits.hits.0._source.name: A } + - match: { hits.hits.0._source.nested_array_regular.0.b.c: [ 10, 100] } + - match: { hits.hits.0._source.nested_array_regular.1.b.c: [ 20, 200] } + - match: { hits.hits.1._source.name: B } + - match: { hits.hits.1._source.nested_array_stored.0.b.0.c: 10 } + - match: { hits.hits.1._source.nested_array_stored.0.b.1.c: 100 } + - match: { hits.hits.1._source.nested_array_stored.1.b.0.c: 20 } + - match: { hits.hits.1._source.nested_array_stored.1.b.1.c: 200 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.sort/20_nested.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.sort/20_nested.yml index 547ff096822c0..35e8a270ca6a2 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.sort/20_nested.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.sort/20_nested.yml @@ -37,6 +37,7 @@ sort doc with nested object: - do: search: index: test + sort: name - match: { hits.total.value: 4 } - match: { hits.hits.0._source.name: aaaa } From c13aa98841b49c3914fbc4f7b72499921545ee16 Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Mon, 1 Jul 2024 12:04:44 +0200 Subject: [PATCH 073/216] Implement lucene pushdown on ST_DISTANCE for GT and GTE (#110253) * Support ST_DISTANCE >= distance * Support multiple ST_DISTANCE in same predicate * Improved logic, so we don't re-write unnecessarily * Support ST_DISTANCE with EQ predicate We do a range query from Math.nextDown(value) to value. * Revert "Support ST_DISTANCE with EQ predicate" This reverts commit 07565e47bdb9e75c6d775e0d80254907d8bf6bdf. --- .../src/main/resources/spatial.csv-spec | 41 +++++ .../optimizer/LocalPhysicalPlanOptimizer.java | 74 +++++---- .../querydsl/query/SpatialRelatesQuery.java | 2 +- .../optimizer/PhysicalPlanOptimizerTests.java | 149 +++++++++++++----- 4 files changed, 197 insertions(+), 69 deletions(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index f3604a90a35c7..aeb95da1ddaa0 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -966,6 +966,29 @@ ARN | Arlanda | POINT(17.9307299016916 59.6511203397372) | SVG | Stavanger Sola | POINT (5.6298103297218 58.8821564842185) | Norway | Sandnes | POINT (5.7361 58.8517) | 548.26 | 541.35 ; +airportsWithinDistanceBandFromCopenhagenTrainStation +required_capability: st_distance + +FROM airports +| WHERE ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) < 600000 + AND ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) > 400000 +| EVAL distance = ROUND(ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)"))/1000,2) +| EVAL city_distance = ROUND(ST_DISTANCE(city_location, TO_GEOPOINT("POINT(12.565 55.673)"))/1000,2) +| KEEP abbrev, name, location, country, city, city_location, distance, city_distance +| SORT distance ASC +; + +abbrev:k | name:text | location:geo_point | country:k | city:k | city_location:geo_point | distance:d | city_distance:d +GDN | Gdansk Lech Walesa | POINT(18.4684422165911 54.3807025352925) | Poland | Gdańsk | POINT(18.6453 54.3475) | 402.61 | 414.59 +NYO | Stockholm-Skavsta | POINT(16.9216055584254 58.7851041303448) | Sweden | Nyköping | POINT(17.0086 58.7531) | 433.99 | 434.43 +OSL | Oslo Gardermoen | POINT(11.0991032762581 60.1935783171386) | Norway | Oslo | POINT(10.7389 59.9133) | 510.03 | 483.71 +DRS | Dresden | POINT(13.7649671440047 51.1250912428871) | Germany | Dresden | POINT(13.74 51.05) | 511.9 | 519.91 +BMA | Bromma | POINT(17.9456175406145 59.3555902065112) | Sweden | Stockholm | POINT(18.0686 59.3294) | 520.18 | 522.54 +PLQ | Palanga Int'l | POINT(21.0974463986251 55.9713426235358) | Lithuania | Klaipėda | POINT(21.1667 55.75) | 533.67 | 538.56 +ARN | Arlanda | POINT(17.9307299016916 59.6511203397372) | Sweden | Stockholm | POINT(18.0686 59.3294) | 545.09 | 522.54 +SVG | Stavanger Sola | POINT (5.6298103297218 58.8821564842185) | Norway | Sandnes | POINT (5.7361 58.8517) | 548.26 | 541.35 +; + airportsWithinDistanceCopenhagenTrainStationCount required_capability: st_distance @@ -982,6 +1005,24 @@ count:long | country:k 1 | Poland ; +airportsWithinDistanceBandCopenhagenTrainStationCount +required_capability: st_distance + +FROM airports +| WHERE ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) < 600000 + AND ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) > 400000 +| STATS count=COUNT() BY country +| SORT count DESC, country ASC +; + +count:long | country:k +3 | Sweden +2 | Norway +1 | Germany +1 | Lithuania +1 | Poland +; + airportsSortDistanceFromCopenhagenTrainStation required_capability: st_distance diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java index 2a70ccdd3705c..21c8ddb62bf86 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java @@ -49,6 +49,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialAggregateFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialDisjoint; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils; @@ -598,54 +599,71 @@ public static class EnableSpatialDistancePushdown extends PhysicalOptimizerRules protected PhysicalPlan rule(FilterExec filterExec, LocalPhysicalOptimizerContext ctx) { PhysicalPlan plan = filterExec; if (filterExec.child() instanceof EsQueryExec) { - if (filterExec.condition() instanceof EsqlBinaryComparison comparison) { - ComparisonType comparisonType = ComparisonType.from(comparison.getFunctionType()); - if (comparison.left() instanceof StDistance dist && comparison.right().foldable()) { - plan = rewriteComparison(filterExec, dist, comparison.right(), comparisonType); - } else if (comparison.right() instanceof StDistance dist && comparison.left().foldable()) { - plan = rewriteComparison(filterExec, dist, comparison.right(), ComparisonType.invert(comparisonType)); + List rewritten = new ArrayList<>(); + List notRewritten = new ArrayList<>(); + for (Expression exp : splitAnd(filterExec.condition())) { + boolean didRewrite = false; + if (exp instanceof EsqlBinaryComparison comparison) { + ComparisonType comparisonType = ComparisonType.from(comparison.getFunctionType()); + if (comparison.left() instanceof StDistance dist && comparison.right().foldable()) { + didRewrite = rewriteComparison(rewritten, dist, comparison.right(), comparisonType); + } else if (comparison.right() instanceof StDistance dist && comparison.left().foldable()) { + didRewrite = rewriteComparison(rewritten, dist, comparison.left(), ComparisonType.invert(comparisonType)); + } } + if (didRewrite == false) { + notRewritten.add(exp); + } + } + if (rewritten.isEmpty() == false) { + rewritten.addAll(notRewritten); + plan = new FilterExec(filterExec.source(), filterExec.child(), Predicates.combineAnd(rewritten)); } } return plan; } - private FilterExec rewriteComparison(FilterExec filterExec, StDistance dist, Expression literal, ComparisonType comparisonType) { - // We currently only support spatial distance within a minimum range - if (comparisonType.lt) { + private boolean rewriteComparison(List rewritten, StDistance dist, Expression literal, ComparisonType comparisonType) { + // Currently we do not support Equals + if (comparisonType.lt || comparisonType.gt) { Object value = literal.fold(); if (value instanceof Number number) { if (dist.right().foldable()) { - return rewriteDistanceFilter(filterExec, dist.source(), dist.left(), dist.right(), number, comparisonType.eq); + return rewriteDistanceFilter(rewritten, dist.source(), dist.left(), dist.right(), number, comparisonType); } else if (dist.left().foldable()) { - return rewriteDistanceFilter(filterExec, dist.source(), dist.right(), dist.left(), number, comparisonType.eq); + return rewriteDistanceFilter(rewritten, dist.source(), dist.right(), dist.left(), number, comparisonType); } } } - return filterExec; + return false; } - private FilterExec rewriteDistanceFilter( - FilterExec filterExec, + private boolean rewriteDistanceFilter( + List rewritten, Source source, Expression spatialExpression, Expression literalExpression, Number number, - boolean inclusive + ComparisonType comparisonType ) { Geometry geometry = SpatialRelatesUtils.makeGeometryFromLiteral(literalExpression); if (geometry instanceof Point point) { double distance = number.doubleValue(); - if (inclusive == false) { - distance = Math.nextDown(distance); + if (comparisonType.eq == false) { + distance = comparisonType.lt ? Math.nextDown(distance) : Math.nextUp(distance); } var circle = new Circle(point.getX(), point.getY(), distance); var wkb = WellKnownBinary.toWKB(circle, ByteOrder.LITTLE_ENDIAN); var cExp = new Literal(literalExpression.source(), new BytesRef(wkb), DataType.GEO_SHAPE); - return new FilterExec(filterExec.source(), filterExec.child(), new SpatialIntersects(source, spatialExpression, cExp)); + rewritten.add( + comparisonType.lt + ? new SpatialIntersects(source, spatialExpression, cExp) + : new SpatialDisjoint(source, spatialExpression, cExp) + ); + return true; } - return filterExec; + return false; } /** @@ -655,20 +673,20 @@ private FilterExec rewriteDistanceFilter( * field disables those. */ enum ComparisonType { - LTE(true, true, true), - LT(true, false, true), + LTE(true, false, true), + LT(true, false, false), GTE(false, true, true), - GT(false, false, true), - UNSUPPORTED(false, false, false); + GT(false, true, false), + EQ(false, false, true); private final boolean lt; + private final boolean gt; private final boolean eq; - private final boolean supported; - ComparisonType(boolean lt, boolean eq, boolean supported) { + ComparisonType(boolean lt, boolean gt, boolean eq) { this.lt = lt; + this.gt = gt; this.eq = eq; - this.supported = supported; } static ComparisonType from(EsqlBinaryComparison.BinaryComparisonOperation op) { @@ -677,7 +695,7 @@ static ComparisonType from(EsqlBinaryComparison.BinaryComparisonOperation op) { case LTE -> LTE; case GT -> GT; case GTE -> GTE; - default -> UNSUPPORTED; + default -> EQ; }; } @@ -687,7 +705,7 @@ static ComparisonType invert(ComparisonType comparisonType) { case LTE -> GTE; case GT -> LT; case GTE -> LTE; - default -> UNSUPPORTED; + default -> EQ; }; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java index 23d2126e39706..23de36d6d3d77 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java @@ -63,7 +63,7 @@ public QueryBuilder asBuilder() { @Override protected String innerToString() { - throw new IllegalArgumentException("SpatialRelatesQuery.innerToString() not implemented"); + return "field:" + field + ", dataType:" + dataType + ", queryRelation:" + queryRelation + ", shape:" + shape; } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 43e806b9a55cb..51cc2483d73b7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.xpack.esql.analysis.EnrichResolution; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.Literal; @@ -113,6 +114,7 @@ import org.junit.Before; import java.util.Arrays; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -3483,50 +3485,117 @@ public void testPushSpatialDistanceToSource() { "ST_DISTANCE(location, TO_GEOPOINT(\"POINT(12.565 55.673)\"))", "ST_DISTANCE(TO_GEOPOINT(\"POINT(12.565 55.673)\"), location)" }) { - for (String op : new String[] { "<", "<=", ">", ">=" }) { - var eq = op.contains("="); - var lt = op.contains("<"); - var predicate = lt ? distanceFunction + " " + op + " 600000" : "600000 " + op + " " + distanceFunction; - var query = "FROM airports | WHERE " + predicate + " AND scalerank > 1"; - var plan = this.physicalPlan(query, airports); - var limit = as(plan, LimitExec.class); - var exchange = as(limit.child(), ExchangeExec.class); - var fragment = as(exchange.child(), FragmentExec.class); - var limit2 = as(fragment.fragment(), Limit.class); - var filter = as(limit2.child(), Filter.class); - var and = as(filter.condition(), And.class); - var comp = as(and.left(), EsqlBinaryComparison.class); - var expectedComp = eq ? LessThanOrEqual.class : LessThan.class; // normalized to less than - assertThat("filter contains expected binary comparison for " + predicate, comp, instanceOf(expectedComp)); - assertThat("filter contains ST_DISTANCE", comp.left(), instanceOf(StDistance.class)); - - var optimized = optimizedPlan(plan); - var topLimit = as(optimized, LimitExec.class); - exchange = as(topLimit.child(), ExchangeExec.class); - var project = as(exchange.child(), ProjectExec.class); - var fieldExtract = as(project.child(), FieldExtractExec.class); - var source = source(fieldExtract.child()); - // TODO: bring back SingleValueQuery once it can handle LeafShapeFieldData - // var condition = as(sv(source.query(), "location"), AbstractGeometryQueryBuilder.class); - var bool = as(source.query(), BoolQueryBuilder.class); - var rangeQueryBuilders = bool.filter().stream().filter(p -> p instanceof SingleValueQuery.Builder).toList(); - assertThat("Expected one range query builder", rangeQueryBuilders.size(), equalTo(1)); - assertThat(((SingleValueQuery.Builder) rangeQueryBuilders.get(0)).field(), equalTo("scalerank")); - var shapeQueryBuilders = bool.filter().stream().filter(p -> p instanceof SpatialRelatesQuery.ShapeQueryBuilder).toList(); - assertThat("Expected one shape query builder", shapeQueryBuilders.size(), equalTo(1)); - var condition = as(shapeQueryBuilders.get(0), SpatialRelatesQuery.ShapeQueryBuilder.class); - assertThat("Geometry field name", condition.fieldName(), equalTo("location")); - assertThat("Spatial relationship", condition.relation(), equalTo(ShapeRelation.INTERSECTS)); - assertThat("Geometry is Circle", condition.shape().type(), equalTo(ShapeType.CIRCLE)); - var circle = as(condition.shape(), Circle.class); - assertThat("Circle center-x", circle.getX(), equalTo(12.565)); - assertThat("Circle center-y", circle.getY(), equalTo(55.673)); - var expected = eq ? 600000.0 : Math.nextDown(600000.0); - assertThat("Circle radius", circle.getRadiusMeters(), equalTo(expected)); + for (boolean reverse : new Boolean[] { false, true }) { + for (String op : new String[] { "<", "<=", ">", ">=" }) { + var expected = ExpectedComparison.from(op, reverse, 600000.0); + var predicate = reverse ? "600000 " + op + " " + distanceFunction : distanceFunction + " " + op + " 600000"; + var query = "FROM airports | WHERE " + predicate + " AND scalerank > 1"; + var plan = this.physicalPlan(query, airports); + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var limit2 = as(fragment.fragment(), Limit.class); + var filter = as(limit2.child(), Filter.class); + var and = as(filter.condition(), And.class); + var comp = as(and.left(), EsqlBinaryComparison.class); + assertThat("filter contains expected binary comparison for " + predicate, comp, instanceOf(expected.comp)); + assertThat("filter contains ST_DISTANCE", comp.left(), instanceOf(StDistance.class)); + + var optimized = optimizedPlan(plan); + var topLimit = as(optimized, LimitExec.class); + exchange = as(topLimit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var fieldExtract = as(project.child(), FieldExtractExec.class); + var source = source(fieldExtract.child()); + var bool = as(source.query(), BoolQueryBuilder.class); + var rangeQueryBuilders = bool.filter().stream().filter(p -> p instanceof SingleValueQuery.Builder).toList(); + assertThat("Expected one range query builder", rangeQueryBuilders.size(), equalTo(1)); + assertThat(((SingleValueQuery.Builder) rangeQueryBuilders.get(0)).field(), equalTo("scalerank")); + var shapeQueryBuilders = bool.filter() + .stream() + .filter(p -> p instanceof SpatialRelatesQuery.ShapeQueryBuilder) + .toList(); + assertThat("Expected one shape query builder", shapeQueryBuilders.size(), equalTo(1)); + var condition = as(shapeQueryBuilders.get(0), SpatialRelatesQuery.ShapeQueryBuilder.class); + assertThat("Geometry field name", condition.fieldName(), equalTo("location")); + assertThat("Spatial relationship", condition.relation(), equalTo(expected.shapeRelation())); + assertThat("Geometry is Circle", condition.shape().type(), equalTo(ShapeType.CIRCLE)); + var circle = as(condition.shape(), Circle.class); + assertThat("Circle center-x", circle.getX(), equalTo(12.565)); + assertThat("Circle center-y", circle.getY(), equalTo(55.673)); + assertThat("Circle radius for predicate " + predicate, circle.getRadiusMeters(), equalTo(expected.value)); + } } } } + public void testPushSpatialDistanceBandToSource() { + var query = """ + FROM airports + | WHERE ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) <= 600000 + AND ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) >= 400000 + """; + var plan = this.physicalPlan(query, airports); + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var limit2 = as(fragment.fragment(), Limit.class); + var filter = as(limit2.child(), Filter.class); + var and = as(filter.condition(), And.class); + for (Expression expression : and.arguments()) { + var comp = as(expression, EsqlBinaryComparison.class); + var expectedComp = comp.equals(and.left()) ? LessThanOrEqual.class : GreaterThanOrEqual.class; + assertThat("filter contains expected binary comparison", comp, instanceOf(expectedComp)); + assertThat("filter contains ST_DISTANCE", comp.left(), instanceOf(StDistance.class)); + } + + var optimized = optimizedPlan(plan); + var topLimit = as(optimized, LimitExec.class); + exchange = as(topLimit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var fieldExtract = as(project.child(), FieldExtractExec.class); + var source = source(fieldExtract.child()); + var bool = as(source.query(), BoolQueryBuilder.class); + var rangeQueryBuilders = bool.filter().stream().filter(p -> p instanceof SingleValueQuery.Builder).toList(); + assertThat("Expected zero range query builder", rangeQueryBuilders.size(), equalTo(0)); + var shapeQueryBuilders = bool.must().stream().filter(p -> p instanceof SpatialRelatesQuery.ShapeQueryBuilder).toList(); + assertThat("Expected two shape query builders", shapeQueryBuilders.size(), equalTo(2)); + var relationStats = new HashMap(); + for (var builder : shapeQueryBuilders) { + var condition = as(builder, SpatialRelatesQuery.ShapeQueryBuilder.class); + var expected = condition.relation() == ShapeRelation.INTERSECTS ? 600000.0 : 400000.0; + relationStats.compute(condition.relation(), (r, c) -> c == null ? 1 : c + 1); + assertThat("Geometry field name", condition.fieldName(), equalTo("location")); + assertThat("Geometry is Circle", condition.shape().type(), equalTo(ShapeType.CIRCLE)); + var circle = as(condition.shape(), Circle.class); + assertThat("Circle center-x", circle.getX(), equalTo(12.565)); + assertThat("Circle center-y", circle.getY(), equalTo(55.673)); + assertThat("Circle radius for shape relation " + condition.relation(), circle.getRadiusMeters(), equalTo(expected)); + } + } + + private record ExpectedComparison(Class comp, double value) { + ShapeRelation shapeRelation() { + return comp.getSimpleName().startsWith("GreaterThan") ? ShapeRelation.DISJOINT : ShapeRelation.INTERSECTS; + } + + static ExpectedComparison from(String op, boolean reverse, double value) { + double up = Math.nextUp(value); + double down = Math.nextDown(value); + return switch (op) { + case "<" -> reverse ? from(GreaterThan.class, up) : from(LessThan.class, down); + case "<=" -> reverse ? from(GreaterThanOrEqual.class, value) : from(LessThanOrEqual.class, value); + case ">" -> reverse ? from(LessThan.class, down) : from(GreaterThan.class, up); + case ">=" -> reverse ? from(LessThanOrEqual.class, value) : from(GreaterThanOrEqual.class, value); + default -> from(Equals.class, value); + }; + } + + static ExpectedComparison from(Class comp, double value) { + return new ExpectedComparison(comp, value); + } + } + public void testPushCartesianSpatialIntersectsToSource() { for (String query : new String[] { """ FROM airports_web From 4e4060b10e9ae05cc83b6767e2a284a38b8430d1 Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Mon, 1 Jul 2024 13:13:24 +0300 Subject: [PATCH 074/216] Improve data stream lifecycle test by verifying template creation (#110319) --- .../yamlRestTest/resources/rest-api-spec/test/dlm/10_usage.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/dlm/10_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/dlm/10_usage.yml index 7ad16faae2314..07ed83e1f8863 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/dlm/10_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/dlm/10_usage.yml @@ -31,6 +31,7 @@ lifecycle: data_retention: 10d data_stream: {} + - is_true: acknowledged - do: indices.create_data_stream: From a145b64c7855f4f6bb1347721f690d84d905c95f Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Mon, 1 Jul 2024 12:25:54 +0200 Subject: [PATCH 075/216] [Gradle] Rework sysprop setup for test tasks to be CC compatible (#110296) --- .../internal/ElasticsearchTestBasePlugin.java | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java index d344b4694a5b5..689c8ddecb057 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java @@ -25,6 +25,7 @@ import org.gradle.api.artifacts.Configuration; import org.gradle.api.file.FileCollection; import org.gradle.api.plugins.JavaPlugin; +import org.gradle.api.provider.ProviderFactory; import org.gradle.api.tasks.SourceSet; import org.gradle.api.tasks.SourceSetContainer; import org.gradle.api.tasks.testing.Test; @@ -33,16 +34,21 @@ import java.util.List; import java.util.Map; +import javax.inject.Inject; + import static org.elasticsearch.gradle.util.FileUtils.mkdirs; import static org.elasticsearch.gradle.util.GradleUtils.maybeConfigure; /** * Applies commonly used settings to all Test tasks in the project */ -public class ElasticsearchTestBasePlugin implements Plugin { +public abstract class ElasticsearchTestBasePlugin implements Plugin { public static final String DUMP_OUTPUT_ON_FAILURE_PROP_NAME = "dumpOutputOnFailure"; + @Inject + protected abstract ProviderFactory getProviderFactory(); + @Override public void apply(Project project) { project.getPluginManager().apply(GradleTestPolicySetupPlugin.class); @@ -150,13 +156,11 @@ public void execute(Task t) { // we use 'temp' relative to CWD since this is per JVM and tests are forbidden from writing to CWD nonInputProperties.systemProperty("java.io.tmpdir", test.getWorkingDir().toPath().resolve("temp")); + test.systemProperties(getProviderFactory().systemPropertiesPrefixedBy("tests.").get()); + test.systemProperties(getProviderFactory().systemPropertiesPrefixedBy("es.").get()); + // TODO: remove setting logging level via system property test.systemProperty("tests.logger.level", "WARN"); - System.getProperties().entrySet().forEach(entry -> { - if ((entry.getKey().toString().startsWith("tests.") || entry.getKey().toString().startsWith("es."))) { - test.systemProperty(entry.getKey().toString(), entry.getValue()); - } - }); // TODO: remove this once ctx isn't added to update script params in 7.0 test.systemProperty("es.scripting.update.ctx_in_params", "false"); From 3726186bcd1a180f1d5c82a27f6e5f52d783572b Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Mon, 1 Jul 2024 13:00:36 +0200 Subject: [PATCH 076/216] ESQL: in tests, make only left joins, even with the new contructor (#110284) Fix https://github.com/elastic/elasticsearch/issues/110272 Fix https://github.com/elastic/elasticsearch/issues/110292 Fix https://github.com/elastic/elasticsearch/issues/110293 We recently introduced a new constructor for `Join`s that takes the `JoinConfig` components directly. In the `EsqlNodeSubclassTests`, we need to make sure that this constructor only receives `JoinType.LEFT` as the other types are forbidden for now. --- muted-tests.yml | 11 ++++------- .../esql/core/tree/NodeSubclassTests.java | 19 ++++++++++++++++++- .../esql/tree/EsqlNodeSubclassTests.java | 14 ++------------ 3 files changed, 24 insertions(+), 20 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 6d3d060a51bfd..4d447bcab7dcb 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -85,9 +85,6 @@ tests: - class: org.elasticsearch.compute.lucene.ValueSourceReaderTypeConversionTests method: testLoadAll issue: https://github.com/elastic/elasticsearch/issues/110244 -- class: org.elasticsearch.xpack.esql.tree.EsqlNodeSubclassTests - method: testReplaceChildren {class org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec} - issue: https://github.com/elastic/elasticsearch/issues/110272 - class: org.elasticsearch.painless.LangPainlessClientYamlTestSuiteIT method: test {yaml=painless/146_dense_vector_bit_basic/Cosine Similarity is not supported} issue: https://github.com/elastic/elasticsearch/issues/110290 @@ -107,11 +104,11 @@ tests: method: testMinVersionAsOldVersion issue: https://github.com/elastic/elasticsearch/issues/109454 - class: org.elasticsearch.xpack.esql.tree.EsqlNodeSubclassTests - method: testInfoParameters {class org.elasticsearch.xpack.esql.plan.physical.ExchangeSinkExec} - issue: https://github.com/elastic/elasticsearch/issues/110292 + method: testReplaceChildren {class org.elasticsearch.xpack.esql.expression.function.aggregate.ToPartial} + issue: https://github.com/elastic/elasticsearch/issues/110310 - class: org.elasticsearch.xpack.esql.tree.EsqlNodeSubclassTests - method: testInfoParameters {class org.elasticsearch.xpack.esql.plan.physical.ExchangeExec} - issue: https://github.com/elastic/elasticsearch/issues/110293 + method: testInfoParameters {class org.elasticsearch.xpack.esql.expression.function.aggregate.ToPartial} + issue: https://github.com/elastic/elasticsearch/issues/110310 # Examples: # diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/tree/NodeSubclassTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/tree/NodeSubclassTests.java index 80f63b1293e61..8d424f8694b97 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/tree/NodeSubclassTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/tree/NodeSubclassTests.java @@ -52,6 +52,7 @@ import java.util.Objects; import java.util.Set; import java.util.function.Predicate; +import java.util.function.Supplier; import java.util.jar.JarEntry; import java.util.jar.JarInputStream; @@ -127,7 +128,12 @@ public void testTransform() throws Exception { Object originalArgValue = nodeCtorArgs[changedArgOffset]; Type changedArgType = argTypes[changedArgOffset]; - Object changedArgValue = randomValueOtherThan(nodeCtorArgs[changedArgOffset], () -> makeArg(changedArgType)); + Object changedArgValue = randomValueOtherThanMaxTries( + nodeCtorArgs[changedArgOffset], + () -> makeArg(changedArgType), + // JoinType has only 1 permitted enum element. Limit the number of retries. + 3 + ); B transformed = node.transformNodeProps(Object.class, prop -> Objects.equals(prop, originalArgValue) ? changedArgValue : prop); @@ -708,4 +714,15 @@ protected static Class testClassFor(Class nodeSubclass) { return null; } } + + private static T randomValueOtherThanManyMaxTries(Predicate input, Supplier randomSupplier, int maxTries) { + int[] maxTriesHolder = { maxTries }; + Predicate inputWithMaxTries = t -> input.test(t) && maxTriesHolder[0]-- > 0; + + return ESTestCase.randomValueOtherThanMany(inputWithMaxTries, randomSupplier); + } + + public static T randomValueOtherThanMaxTries(T input, Supplier randomSupplier, int maxTries) { + return randomValueOtherThanManyMaxTries(v -> Objects.equals(input, v), randomSupplier, maxTries); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java index af09b61adaf45..adacc80ea12d2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java @@ -9,11 +9,9 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.compute.data.Page; import org.elasticsearch.dissect.DissectParser; import org.elasticsearch.xpack.esql.core.capabilities.UnresolvedException; -import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.Literal; @@ -32,7 +30,6 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Grok; -import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.Stat; @@ -50,9 +47,7 @@ import java.util.function.Consumer; import java.util.function.Predicate; -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/110272") public class EsqlNodeSubclassTests> extends NodeSubclassTests { - private static final List> CLASSES_WITH_MIN_TWO_CHILDREN = List.of(Concat.class, CIDRMatch.class); // List of classes that are "unresolved" NamedExpression subclasses, therefore not suitable for use with logical/physical plan nodes. @@ -88,13 +83,8 @@ protected Object pluggableMakeArg(Class> toBuildClass, Class (Attribute) makeArg(Attribute.class)), - randomList(0, 10, () -> (Attribute) makeArg(Attribute.class)), - randomList(0, 10, () -> (Attribute) makeArg(Attribute.class)) - ); + } else if (argClass == JoinType.class) { + return JoinType.LEFT; } return null; From 6accd6e247363b2418fc113bdb5f1d36f455181d Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Mon, 1 Jul 2024 13:37:30 +0200 Subject: [PATCH 077/216] [Inference API] Fix wording in delete-inference docs (#110321) --- docs/reference/inference/delete-inference.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/inference/delete-inference.asciidoc b/docs/reference/inference/delete-inference.asciidoc index dca800c98ca2e..c4fcb3b7f8379 100644 --- a/docs/reference/inference/delete-inference.asciidoc +++ b/docs/reference/inference/delete-inference.asciidoc @@ -50,11 +50,11 @@ The type of {infer} task that the model performs. `dry_run`:: (Optional, Boolean) When `true`, checks the {infer} processors that reference the endpoint and -returns them in a list, but does not deletes the endpoint. Defaults to `false`. +returns them in a list, but does not delete the endpoint. Defaults to `false`. `force`:: (Optional, Boolean) -Deletes the endpoint regardless if it's used in an {infer} pipeline or a in a +Deletes the endpoint regardless if it's used in an {infer} pipeline or in a `semantic_text` field. From f3cad241ae9b01d231055600d1bda0e96afcb8d9 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Mon, 1 Jul 2024 13:57:05 +0200 Subject: [PATCH 078/216] [Gradle] Cleanup build libs vesion catalogue and its usage (#110298) * [Gradle] Cleanup build libs vesion catalogue and its usage * Snake yaml version in version.properties still used --- build-tools-internal/build.gradle | 7 ++----- gradle/build.versions.toml | 2 ++ 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/build-tools-internal/build.gradle b/build-tools-internal/build.gradle index 84e56bbaf03ad..a8d1110ff4736 100644 --- a/build-tools-internal/build.gradle +++ b/build-tools-internal/build.gradle @@ -274,10 +274,7 @@ dependencies { // ensuring brought asm version brought in by spock is up-to-date testImplementation buildLibs.asm integTestImplementation buildLibs.asm - integTestImplementation('org.ow2.asm:asm:9.6') - api("org.yaml:snakeyaml") { - version { strictly(versions.snakeyaml) } - } + api(buildLibs.snakeyaml) } // Forcefully downgrade the jackson platform as used in production api enforcedPlatform(buildLibs.jackson.platform) @@ -314,7 +311,7 @@ dependencies { compileOnly buildLibs.checkstyle compileOnly buildLibs.reflections - implementation 'com.github.javaparser:javaparser-core:3.18.0' + implementation buildLibs.javaparser runtimeOnly "org.elasticsearch.gradle:reaper:$version" testImplementation buildLibs.checkstyle diff --git a/gradle/build.versions.toml b/gradle/build.versions.toml index 5a32d2e0a58cd..792330fd3613b 100644 --- a/gradle/build.versions.toml +++ b/gradle/build.versions.toml @@ -22,6 +22,7 @@ hamcrest = "org.hamcrest:hamcrest:2.1" httpcore = "org.apache.httpcomponents:httpcore:4.4.12" httpclient = "org.apache.httpcomponents:httpclient:4.5.14" idea-ext = "gradle.plugin.org.jetbrains.gradle.plugin.idea-ext:gradle-idea-ext:1.1.4" +javaparser = "com.github.javaparser:javaparser-core:3.18.0" json-schema-validator = "com.networknt:json-schema-validator:1.0.72" json-assert = "org.skyscreamer:jsonassert:1.5.0" jackson-core = { group = "com.fasterxml.jackson.core", name="jackson-core", version.ref="jackson" } @@ -39,6 +40,7 @@ mockito-core = "org.mockito:mockito-core:1.9.5" nebula-info = "com.netflix.nebula:gradle-info-plugin:11.3.3" reflections = "org.reflections:reflections:0.9.12" shadow-plugin = "com.github.breskeby:shadow:3b035f2" +snakeyaml = { group = "org.yaml", name = "snakeyaml", version = { strictly = "2.0" } } spock-core = { group = "org.spockframework", name="spock-core", version.ref="spock" } spock-junit4 = { group = "org.spockframework", name="spock-junit4", version.ref="spock" } spock-platform = { group = "org.spockframework", name="spock-bom", version.ref="spock" } From e3caeed2b62b0f5f189f52a751855502f6752306 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Mon, 1 Jul 2024 15:00:15 +0300 Subject: [PATCH 079/216] Fix sort on nested test (#110331) * Add test for nested array, fix sort on nested test. * Fix sort on nested test. --- .../rest-api-spec/test/indices.sort/20_nested.yml | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.sort/20_nested.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.sort/20_nested.yml index 35e8a270ca6a2..c88d638199dba 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.sort/20_nested.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.sort/20_nested.yml @@ -30,8 +30,6 @@ sort doc with nested object: - '{ "create": { } }' - '{ "name": "cccc", "nested_field": {"a": 3, "b": 4}, "nested_array": [{ "a": 30, "b": 40 }, { "a": 300, "b": 400 }], "other": { "value": "C"} }' - '{ "create": { } }' - - '{ "nested_field": {"a": 7, "b": 8}, "nested_array": [{ "a": 70, "b": 80 }, { "a": 700, "b": 800 }], "other": { "value": "D"} }' - - '{ "create": { } }' - '{ "name": "bbbb", "nested_field": {"a": 5, "b": 6}, "nested_array": [{ "a": 50, "b": 60 }, { "a": 500, "b": 600 }], "other": { "value": "B"} }' - do: @@ -39,7 +37,7 @@ sort doc with nested object: index: test sort: name - - match: { hits.total.value: 4 } + - match: { hits.total.value: 3 } - match: { hits.hits.0._source.name: aaaa } - match: { hits.hits.0._source.nested_field.a: 1 } - match: { hits.hits.0._source.nested_field.b: 2 } @@ -64,14 +62,6 @@ sort doc with nested object: - match: { hits.hits.2._source.nested_array.1.a: 300 } - match: { hits.hits.2._source.nested_array.1.b: 400 } - match: { hits.hits.2._source.other.value: C } - - is_false: hits.hits.3._source.name - - match: { hits.hits.3._source.nested_field.a: 7 } - - match: { hits.hits.3._source.nested_field.b: 8 } - - match: { hits.hits.3._source.nested_array.0.a: 70 } - - match: { hits.hits.3._source.nested_array.0.b: 80 } - - match: { hits.hits.3._source.nested_array.1.a: 700 } - - match: { hits.hits.3._source.nested_array.1.b: 800 } - - match: { hits.hits.3._source.other.value: D } --- From 6b38bfb6122738c20c0c093c5110c0acd502e8ae Mon Sep 17 00:00:00 2001 From: David Kyle Date: Mon, 1 Jul 2024 13:31:35 +0100 Subject: [PATCH 080/216] [ML] Set default similarity for all Cohere models and element types to Dot Product (#110254) --- .../services/cohere/CohereService.java | 28 +++++-------------- .../services/cohere/CohereServiceTests.java | 5 +--- 2 files changed, 8 insertions(+), 25 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java index 76ef15568d448..dec46817be7be 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java @@ -33,7 +33,6 @@ import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.cohere.completion.CohereCompletionModel; -import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingType; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModel; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsServiceSettings; import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankModel; @@ -280,10 +279,8 @@ public void checkModelConfig(Model model, ActionListener listener) { } private CohereEmbeddingsModel updateModelWithEmbeddingDetails(CohereEmbeddingsModel model, int embeddingSize) { - var similarityFromModel = model.getServiceSettings().similarity(); - var similarityToUse = similarityFromModel == null - ? defaultSimilarity(model.getServiceSettings().getEmbeddingType()) - : similarityFromModel; + var userDefinedSimilarity = model.getServiceSettings().similarity(); + var similarityToUse = userDefinedSimilarity == null ? defaultSimilarity() : userDefinedSimilarity; CohereEmbeddingsServiceSettings serviceSettings = new CohereEmbeddingsServiceSettings( new CohereServiceSettings( @@ -302,25 +299,14 @@ private CohereEmbeddingsModel updateModelWithEmbeddingDetails(CohereEmbeddingsMo /** * Return the default similarity measure for the embedding type. - * Cohere embeddings are normalized to unit vectors so Dot Product - * can be used. However, Elasticsearch rejects the byte vectors with - * Dot Product similarity complaining they are not normalized so - * Cosine is used for bytes. - * TODO investigate why the byte vectors are not normalized. + * Cohere embeddings are normalized to unit vectors therefor Dot + * Product similarity can be used and is the default for all Cohere + * models. * - * @param embeddingType The embedding type (can be null) * @return The default similarity. */ - static SimilarityMeasure defaultSimilarity(@Nullable CohereEmbeddingType embeddingType) { - if (embeddingType == null) { - return SimilarityMeasure.DOT_PRODUCT; - } - - return switch (embeddingType) { - case FLOAT -> SimilarityMeasure.DOT_PRODUCT; - case BYTE -> SimilarityMeasure.COSINE; - case INT8 -> SimilarityMeasure.COSINE; - }; + static SimilarityMeasure defaultSimilarity() { + return SimilarityMeasure.DOT_PRODUCT; } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index e28ca71c30ff8..174bb4dfed109 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -1344,10 +1344,7 @@ public void testChunkedInfer_BatchesCalls_Bytes() throws IOException { } public void testDefaultSimilarity() { - assertEquals(SimilarityMeasure.DOT_PRODUCT, CohereService.defaultSimilarity(null)); - assertEquals(SimilarityMeasure.DOT_PRODUCT, CohereService.defaultSimilarity(CohereEmbeddingType.FLOAT)); - assertEquals(SimilarityMeasure.COSINE, CohereService.defaultSimilarity(CohereEmbeddingType.INT8)); - assertEquals(SimilarityMeasure.COSINE, CohereService.defaultSimilarity(CohereEmbeddingType.BYTE)); + assertEquals(SimilarityMeasure.DOT_PRODUCT, CohereService.defaultSimilarity()); } private Map getRequestConfigMap( From 99749aa277be693bdc6a8dc012b9cb9dbbab496d Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Mon, 1 Jul 2024 14:37:56 +0200 Subject: [PATCH 081/216] [Inference API] Fix wording in Azure AI Studio docs (#110322) --- docs/reference/inference/service-azure-ai-studio.asciidoc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/reference/inference/service-azure-ai-studio.asciidoc b/docs/reference/inference/service-azure-ai-studio.asciidoc index b8f84b47d68a3..0d711a0d6171f 100644 --- a/docs/reference/inference/service-azure-ai-studio.asciidoc +++ b/docs/reference/inference/service-azure-ai-studio.asciidoc @@ -35,7 +35,7 @@ Available task types: `service`:: (Required, string) -The type of service supported for the specified task type. In this case, +The type of service supported for the specified task type. In this case, `azureaistudio`. `service_settings`:: @@ -102,7 +102,7 @@ include::inference-shared.asciidoc[tag=task-settings] `do_sample`::: (Optional, float) Instructs the inference process to perform sampling or not. -Has not affect unless `temperature` or `top_p` is specified. +Has no effect unless `temperature` or `top_p` is specified. `max_new_tokens`::: (Optional, integer) @@ -170,4 +170,4 @@ PUT _inference/completion/azure_ai_studio_completion ------------------------------------------------------------ // TEST[skip:TBD] -The list of chat completion models that you can choose from in your deployment can be found in the https://ai.azure.com/explore/models?selectedTask=chat-completion[Azure AI Studio model explorer]. \ No newline at end of file +The list of chat completion models that you can choose from in your deployment can be found in the https://ai.azure.com/explore/models?selectedTask=chat-completion[Azure AI Studio model explorer]. From 9935212f6ad9822ff4969934dcacb8193528760b Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 1 Jul 2024 14:48:03 +0200 Subject: [PATCH 082/216] Some small improvements around routing BulkRequest (#109911) We don't need to do the expensive lookup of the index abstraction, we already have it. Also, we can skip the empty string check mostly. --- .../action/bulk/BulkOperation.java | 4 +-- .../action/bulk/TransportBulkAction.java | 10 ++---- .../cluster/routing/IndexRouting.java | 9 +++-- .../action/bulk/TransportBulkActionTests.java | 33 ++++++++++++------- 4 files changed, 30 insertions(+), 26 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java index b9f753189c077..258e5b4c9a58d 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java @@ -294,8 +294,8 @@ private Map> groupRequestsByShards( ia = concreteIndices.resolveIfAbsent(docWriteRequest); indexOperationValidator.accept(ia, docWriteRequest); - TransportBulkAction.prohibitCustomRoutingOnDataStream(docWriteRequest, metadata); - TransportBulkAction.prohibitAppendWritesInBackingIndices(docWriteRequest, metadata); + TransportBulkAction.prohibitCustomRoutingOnDataStream(docWriteRequest, ia); + TransportBulkAction.prohibitAppendWritesInBackingIndices(docWriteRequest, ia); docWriteRequest.routing(metadata.resolveWriteIndexRouting(docWriteRequest.routing(), docWriteRequest.index())); final Index concreteIndex = docWriteRequest.getConcreteWriteIndex(ia, metadata); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 02a374044f864..d9d5bc92a24d1 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -417,13 +417,12 @@ private static void failRequestsWhenPrerequisiteActionFailed( } } - static void prohibitAppendWritesInBackingIndices(DocWriteRequest writeRequest, Metadata metadata) { + static void prohibitAppendWritesInBackingIndices(DocWriteRequest writeRequest, IndexAbstraction indexAbstraction) { DocWriteRequest.OpType opType = writeRequest.opType(); if ((opType == OpType.CREATE || opType == OpType.INDEX) == false) { // op type not create or index, then bail early return; } - IndexAbstraction indexAbstraction = metadata.getIndicesLookup().get(writeRequest.index()); if (indexAbstraction == null) { return; } @@ -452,9 +451,7 @@ static void prohibitAppendWritesInBackingIndices(DocWriteRequest writeRequest + "] instead" ); } - if (opType == DocWriteRequest.OpType.INDEX - && writeRequest.ifPrimaryTerm() == UNASSIGNED_PRIMARY_TERM - && writeRequest.ifSeqNo() == UNASSIGNED_SEQ_NO) { + if (writeRequest.ifPrimaryTerm() == UNASSIGNED_PRIMARY_TERM && writeRequest.ifSeqNo() == UNASSIGNED_SEQ_NO) { throw new IllegalArgumentException( "index request with op_type=index and no if_primary_term and if_seq_no set " + "targeting backing indices is disallowed, target corresponding data stream [" @@ -464,8 +461,7 @@ static void prohibitAppendWritesInBackingIndices(DocWriteRequest writeRequest } } - static void prohibitCustomRoutingOnDataStream(DocWriteRequest writeRequest, Metadata metadata) { - IndexAbstraction indexAbstraction = metadata.getIndicesLookup().get(writeRequest.index()); + static void prohibitCustomRoutingOnDataStream(DocWriteRequest writeRequest, IndexAbstraction indexAbstraction) { if (indexAbstraction == null) { return; } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/IndexRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/IndexRouting.java index fb2fcf1a02ad0..6f15eb5f6e49d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/IndexRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/IndexRouting.java @@ -151,13 +151,12 @@ private abstract static class IdAndRoutingOnly extends IndexRouting { @Override public void process(IndexRequest indexRequest) { - if ("".equals(indexRequest.id())) { - throw new IllegalArgumentException("if _id is specified it must not be empty"); - } - // generate id if not already provided - if (indexRequest.id() == null) { + final String id = indexRequest.id(); + if (id == null) { indexRequest.autoGenerateId(); + } else if (id.isEmpty()) { + throw new IllegalArgumentException("if _id is specified it must not be empty"); } } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java index 776174b7cf502..ca1d1ac49832e 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java @@ -199,7 +199,10 @@ public void testProhibitAppendWritesInBackingIndices() throws Exception { IndexRequest invalidRequest1 = new IndexRequest(backingIndexName).opType(DocWriteRequest.OpType.CREATE); Exception e = expectThrows( IllegalArgumentException.class, - () -> TransportBulkAction.prohibitAppendWritesInBackingIndices(invalidRequest1, metadata) + () -> TransportBulkAction.prohibitAppendWritesInBackingIndices( + invalidRequest1, + metadata.getIndicesLookup().get(invalidRequest1.index()) + ) ); assertThat( e.getMessage(), @@ -213,7 +216,10 @@ public void testProhibitAppendWritesInBackingIndices() throws Exception { IndexRequest invalidRequest2 = new IndexRequest(backingIndexName).opType(DocWriteRequest.OpType.INDEX); e = expectThrows( IllegalArgumentException.class, - () -> TransportBulkAction.prohibitAppendWritesInBackingIndices(invalidRequest2, metadata) + () -> TransportBulkAction.prohibitAppendWritesInBackingIndices( + invalidRequest2, + metadata.getIndicesLookup().get(invalidRequest2.index()) + ) ); assertThat( e.getMessage(), @@ -227,28 +233,28 @@ public void testProhibitAppendWritesInBackingIndices() throws Exception { DocWriteRequest validRequest = new IndexRequest(backingIndexName).opType(DocWriteRequest.OpType.INDEX) .setIfSeqNo(1) .setIfPrimaryTerm(1); - TransportBulkAction.prohibitAppendWritesInBackingIndices(validRequest, metadata); + TransportBulkAction.prohibitAppendWritesInBackingIndices(validRequest, metadata.getIndicesLookup().get(validRequest.index())); validRequest = new DeleteRequest(backingIndexName); - TransportBulkAction.prohibitAppendWritesInBackingIndices(validRequest, metadata); + TransportBulkAction.prohibitAppendWritesInBackingIndices(validRequest, metadata.getIndicesLookup().get(validRequest.index())); validRequest = new UpdateRequest(backingIndexName, "_id"); - TransportBulkAction.prohibitAppendWritesInBackingIndices(validRequest, metadata); + TransportBulkAction.prohibitAppendWritesInBackingIndices(validRequest, metadata.getIndicesLookup().get(validRequest.index())); // Testing append only write via ds name validRequest = new IndexRequest(dataStreamName).opType(DocWriteRequest.OpType.CREATE); - TransportBulkAction.prohibitAppendWritesInBackingIndices(validRequest, metadata); + TransportBulkAction.prohibitAppendWritesInBackingIndices(validRequest, metadata.getIndicesLookup().get(validRequest.index())); validRequest = new IndexRequest(dataStreamName).opType(DocWriteRequest.OpType.INDEX); - TransportBulkAction.prohibitAppendWritesInBackingIndices(validRequest, metadata); + TransportBulkAction.prohibitAppendWritesInBackingIndices(validRequest, metadata.getIndicesLookup().get(validRequest.index())); // Append only for a backing index that doesn't exist is allowed: validRequest = new IndexRequest(DataStream.getDefaultBackingIndexName("logs-barbaz", 1)).opType(DocWriteRequest.OpType.CREATE); - TransportBulkAction.prohibitAppendWritesInBackingIndices(validRequest, metadata); + TransportBulkAction.prohibitAppendWritesInBackingIndices(validRequest, metadata.getIndicesLookup().get(validRequest.index())); // Some other index names: validRequest = new IndexRequest("my-index").opType(DocWriteRequest.OpType.CREATE); - TransportBulkAction.prohibitAppendWritesInBackingIndices(validRequest, metadata); + TransportBulkAction.prohibitAppendWritesInBackingIndices(validRequest, metadata.getIndicesLookup().get(validRequest.index())); validRequest = new IndexRequest("foobar").opType(DocWriteRequest.OpType.CREATE); - TransportBulkAction.prohibitAppendWritesInBackingIndices(validRequest, metadata); + TransportBulkAction.prohibitAppendWritesInBackingIndices(validRequest, metadata.getIndicesLookup().get(validRequest.index())); } public void testProhibitCustomRoutingOnDataStream() throws Exception { @@ -261,7 +267,10 @@ public void testProhibitCustomRoutingOnDataStream() throws Exception { .routing("custom"); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, - () -> prohibitCustomRoutingOnDataStream(writeRequestAgainstDataStream, metadata) + () -> prohibitCustomRoutingOnDataStream( + writeRequestAgainstDataStream, + metadata.getIndicesLookup().get(writeRequestAgainstDataStream.index()) + ) ); assertThat( exception.getMessage(), @@ -275,7 +284,7 @@ public void testProhibitCustomRoutingOnDataStream() throws Exception { DocWriteRequest writeRequestAgainstIndex = new IndexRequest(DataStream.getDefaultBackingIndexName(dataStreamName, 1L)).opType( DocWriteRequest.OpType.INDEX ).routing("custom"); - prohibitCustomRoutingOnDataStream(writeRequestAgainstIndex, metadata); + prohibitCustomRoutingOnDataStream(writeRequestAgainstIndex, metadata.getIndicesLookup().get(writeRequestAgainstIndex.index())); } public void testOnlySystem() { From 99280b41e4b494882916f2175b03516ffd8e12a9 Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Mon, 1 Jul 2024 15:01:30 +0200 Subject: [PATCH 083/216] [test] Increase timeout for PrevalidateShardPathIT#testCheckShards (#110267) The default timeout for deleting shards to 30s to get more diagnostics in figuring out why the shard doesn't get removed from the node where it was moved from. See #104807, #110129 --- .../cluster/PrevalidateShardPathIT.java | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/PrevalidateShardPathIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/PrevalidateShardPathIT.java index 3ff7e66d25639..c7d1d49f6e451 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/PrevalidateShardPathIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/PrevalidateShardPathIT.java @@ -22,11 +22,12 @@ import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.junit.annotations.TestLogging; +import org.elasticsearch.test.junit.annotations.TestIssueLogging; import java.util.HashSet; import java.util.List; import java.util.Set; +import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import static org.hamcrest.Matchers.equalTo; @@ -40,9 +41,12 @@ @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) public class PrevalidateShardPathIT extends ESIntegTestCase { - @TestLogging( - value = "org.elasticsearch.cluster.service.MasterService:DEBUG", - reason = "https://github.com/elastic/elasticsearch/issues/104807" + @TestIssueLogging( + value = "org.elasticsearch.cluster.service.MasterService:DEBUG, " + + "org.elasticsearch.indices.store.IndicesStore:TRACE," + + "org.elasticsearch.indices.cluster.IndicesClusterStateService:DEBUG," + + "org.elasticsearch.indices.IndicesService:TRACE", + issueUrl = "https://github.com/elastic/elasticsearch/issues/104807" ) public void testCheckShards() throws Exception { internalCluster().startMasterOnlyNode(); @@ -130,6 +134,6 @@ public void testCheckShards() throws Exception { ); throw e; } - }); + }, 30, TimeUnit.SECONDS); } } From 10e2cc3c117142dace38054ecc52eeb87896c310 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 1 Jul 2024 15:10:02 +0200 Subject: [PATCH 084/216] Dry up Bucket types in o.e.search.aggregations.bucket.histogram (#110303) It's in the title, lots of duplication here that deserves cleanup in isolation. Also, bucket instances are a perpetual source of memory consuption in aggs. There are lots of possible improvements we can make to reduce their footprint, drying up this code enables cleaner PRs for these improvements. --- .../pipeline/DateDerivativeIT.java | 14 ++-- .../aggregations/pipeline/SerialDiffIT.java | 8 +- .../histogram/InternalAutoDateHistogram.java | 39 +++------- .../InternalAutoDateHistogramTests.java | 2 +- .../indices/IndicesRequestCacheIT.java | 2 +- .../aggregations/bucket/DateHistogramIT.java | 68 ++++++++--------- .../aggregations/bucket/HistogramIT.java | 76 +++++++++---------- .../metrics/ScriptedMetricIT.java | 4 +- .../pipeline/ExtendedStatsBucketIT.java | 4 +- .../histogram/AbstractHistogramBucket.java | 43 +++++++++++ .../bucket/histogram/Histogram.java | 15 ---- .../histogram/InternalDateHistogram.java | 33 ++------ .../bucket/histogram/InternalHistogram.java | 33 ++------ .../InternalVariableWidthHistogram.java | 39 +++------- .../aggregation/AggregationTestUtils.java | 7 +- .../AggregationToJsonProcessorTests.java | 12 +-- 16 files changed, 172 insertions(+), 227 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramBucket.java diff --git a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/pipeline/DateDerivativeIT.java b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/pipeline/DateDerivativeIT.java index ce7e4c63dc69c..c306e0fbcba06 100644 --- a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/pipeline/DateDerivativeIT.java +++ b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/pipeline/DateDerivativeIT.java @@ -15,9 +15,9 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; +import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; -import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.pipeline.SimpleValue; import org.elasticsearch.search.aggregations.support.AggregationPath; @@ -127,7 +127,7 @@ public void testSingleValuedField() throws Exception { assertThat(buckets.size(), equalTo(3)); ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); - Histogram.Bucket bucket = buckets.get(0); + Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); @@ -171,7 +171,7 @@ public void testSingleValuedFieldNormalised() throws Exception { assertThat(buckets.size(), equalTo(3)); ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); - Histogram.Bucket bucket = buckets.get(0); + Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat(bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); @@ -383,7 +383,7 @@ private static void addNTimes(int amount, String index, ZonedDateTime dateTime, } private static void assertBucket( - Histogram.Bucket bucket, + Bucket bucket, ZonedDateTime expectedKey, long expectedDocCount, Matcher derivativeMatcher, @@ -421,7 +421,7 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { Object[] propertiesCounts = (Object[]) ((InternalAggregation) histo).getProperty("sum.value"); ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); - Histogram.Bucket bucket = buckets.get(0); + Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); @@ -500,7 +500,7 @@ public void testMultiValuedField() throws Exception { assertThat(buckets.size(), equalTo(4)); ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); - Histogram.Bucket bucket = buckets.get(0); + Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); @@ -574,7 +574,7 @@ public void testPartiallyUnmapped() throws Exception { assertThat(buckets.size(), equalTo(3)); ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); - Histogram.Bucket bucket = buckets.get(0); + Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(1L)); diff --git a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/pipeline/SerialDiffIT.java b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/pipeline/SerialDiffIT.java index 7cbb298f49931..430f71879cb78 100644 --- a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/pipeline/SerialDiffIT.java +++ b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/pipeline/SerialDiffIT.java @@ -12,8 +12,8 @@ import org.elasticsearch.aggregations.AggregationIntegTestCase; import org.elasticsearch.common.collect.EvictingQueue; import org.elasticsearch.common.util.Maps; +import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; -import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers; import org.elasticsearch.search.aggregations.pipeline.SimpleValue; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; @@ -91,7 +91,7 @@ private void assertValidIterators(Iterator expectedBucketIter, Iterator ex } } - private void assertBucketContents(Histogram.Bucket actual, Double expectedCount, Double expectedValue) { + private void assertBucketContents(Bucket actual, Double expectedCount, Double expectedValue) { // This is a gap bucket SimpleValue countDiff = actual.getAggregations().get("diff_counts"); if (expectedCount == null) { @@ -239,7 +239,7 @@ public void testBasicDiff() { List expectedCounts = testValues.get(MetricTarget.COUNT.toString()); List expectedValues = testValues.get(MetricTarget.VALUE.toString()); - Iterator actualIter = buckets.iterator(); + Iterator actualIter = buckets.iterator(); Iterator expectedBucketIter = mockHisto.iterator(); Iterator expectedCountsIter = expectedCounts.iterator(); Iterator expectedValuesIter = expectedValues.iterator(); @@ -247,7 +247,7 @@ public void testBasicDiff() { while (actualIter.hasNext()) { assertValidIterators(expectedBucketIter, expectedCountsIter, expectedValuesIter); - Histogram.Bucket actual = actualIter.next(); + Bucket actual = actualIter.next(); PipelineAggregationHelperTests.MockBucket expected = expectedBucketIter.next(); Double expectedCount = expectedCountsIter.next(); Double expectedValue = expectedValuesIter.next(); diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java index cfaf4b77a07be..1e3042f8cf1e4 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java @@ -23,6 +23,7 @@ import org.elasticsearch.search.aggregations.bucket.BucketReducer; import org.elasticsearch.search.aggregations.bucket.IteratorAndCurrent; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; +import org.elasticsearch.search.aggregations.bucket.histogram.AbstractHistogramBucket; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramFactory; @@ -48,28 +49,20 @@ public final class InternalAutoDateHistogram extends InternalMultiBucketAggregat InternalAutoDateHistogram, InternalAutoDateHistogram.Bucket> implements Histogram, HistogramFactory { - public static class Bucket extends InternalMultiBucketAggregation.InternalBucket implements Histogram.Bucket, KeyComparable { + public static class Bucket extends AbstractHistogramBucket implements KeyComparable { final long key; - final long docCount; - final InternalAggregations aggregations; - protected final transient DocValueFormat format; public Bucket(long key, long docCount, DocValueFormat format, InternalAggregations aggregations) { - this.format = format; + super(docCount, aggregations, format); this.key = key; - this.docCount = docCount; - this.aggregations = aggregations; } /** * Read from a stream. */ - public Bucket(StreamInput in, DocValueFormat format) throws IOException { - this.format = format; - key = in.readLong(); - docCount = in.readVLong(); - aggregations = InternalAggregations.readFrom(in); + public static Bucket readFrom(StreamInput in, DocValueFormat format) throws IOException { + return new Bucket(in.readLong(), in.readVLong(), format, InternalAggregations.readFrom(in)); } @Override @@ -105,16 +98,6 @@ public Object getKey() { return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC); } - @Override - public long getDocCount() { - return docCount; - } - - @Override - public InternalAggregations getAggregations() { - return aggregations; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { String keyAsString = format.format(key).toString(); @@ -222,7 +205,7 @@ public InternalAutoDateHistogram(StreamInput in) throws IOException { super(in); bucketInfo = new BucketInfo(in); format = in.readNamedWriteable(DocValueFormat.class); - buckets = in.readCollectionAsList(stream -> new Bucket(stream, format)); + buckets = in.readCollectionAsList(stream -> Bucket.readFrom(stream, format)); this.targetBuckets = in.readVInt(); if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_3_0)) { bucketInnerInterval = in.readVLong(); @@ -286,7 +269,7 @@ public InternalAutoDateHistogram create(List buckets) { @Override public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { - return new Bucket(prototype.key, prototype.docCount, prototype.format, aggregations); + return new Bucket(prototype.key, prototype.getDocCount(), prototype.getFormatter(), aggregations); } /** @@ -376,14 +359,14 @@ private List mergeBuckets( long roundedBucketKey = reduceRounding.round(bucket.key); if (Double.isNaN(key)) { key = roundedBucketKey; - sameKeyedBuckets.add(createBucket(key, bucket.docCount, bucket.aggregations)); + sameKeyedBuckets.add(createBucket(key, bucket.getDocCount(), bucket.getAggregations())); } else if (roundedBucketKey == key) { - sameKeyedBuckets.add(createBucket(key, bucket.docCount, bucket.aggregations)); + sameKeyedBuckets.add(createBucket(key, bucket.getDocCount(), bucket.getAggregations())); } else { mergedBuckets.add(reduceBucket(sameKeyedBuckets, reduceContext)); sameKeyedBuckets.clear(); key = roundedBucketKey; - sameKeyedBuckets.add(createBucket(key, bucket.docCount, bucket.aggregations)); + sameKeyedBuckets.add(createBucket(key, bucket.getDocCount(), bucket.getAggregations())); } } if (sameKeyedBuckets.isEmpty() == false) { @@ -594,7 +577,7 @@ private BucketReduceResult mergeConsecutiveBuckets( sameKeyedBuckets.clear(); key = current.preparedRounding.round(bucket.key); } - sameKeyedBuckets.add(new Bucket(Math.round(key), bucket.docCount, format, bucket.aggregations)); + sameKeyedBuckets.add(new Bucket(Math.round(key), bucket.getDocCount(), format, bucket.getAggregations())); } if (sameKeyedBuckets.isEmpty() == false) { mergedBuckets.add(reduceBucket(sameKeyedBuckets, reduceContext)); diff --git a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java index e7af9f5745d6d..9b72f30542c54 100644 --- a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java +++ b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java @@ -501,7 +501,7 @@ public void testReadFromPre830() throws IOException { assertEquals(1, deserialized.getBuckets().size()); InternalAutoDateHistogram.Bucket bucket = deserialized.getBuckets().iterator().next(); assertEquals(10, bucket.key); - assertEquals(100, bucket.docCount); + assertEquals(100, bucket.getDocCount()); } } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java index 62e6cb59994b2..08ce9af14ab13 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java @@ -16,10 +16,10 @@ import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.index.cache.request.RequestCacheStats; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; -import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index a9ff9f15a7e92..efb283f047bb2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -24,9 +24,9 @@ import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; -import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.LongBounds; import org.elasticsearch.search.aggregations.metrics.Avg; @@ -241,7 +241,7 @@ public void testSingleValuedField() throws Exception { assertThat(buckets.size(), equalTo(3)); ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); - Histogram.Bucket bucket = buckets.get(0); + Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); @@ -283,7 +283,7 @@ public void testSingleValuedFieldWithTimeZone() throws Exception { assertThat(buckets.size(), equalTo(6)); ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 23, 0, 0, 0, ZoneOffset.UTC); - Histogram.Bucket bucket = buckets.get(0); + Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz))); assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); @@ -354,7 +354,7 @@ public void testSingleValued_timeZone_epoch() throws Exception { expectedKeys.add(ZonedDateTime.of(2012, 3, 22, 23, 0, 0, 0, ZoneOffset.UTC)); Iterator keyIterator = expectedKeys.iterator(); - for (Histogram.Bucket bucket : buckets) { + for (Bucket bucket : buckets) { assertThat(bucket, notNullValue()); ZonedDateTime expectedKey = keyIterator.next(); String bucketKey = bucket.getKeyAsString(); @@ -380,7 +380,7 @@ public void testSingleValuedFieldOrderedByKeyAsc() throws Exception { assertThat(buckets.size(), equalTo(3)); int i = 0; - for (Histogram.Bucket bucket : buckets) { + for (Bucket bucket : buckets) { assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC))); i++; } @@ -400,7 +400,7 @@ public void testSingleValuedFieldOrderedByKeyDesc() throws Exception { assertThat(histo.getBuckets().size(), equalTo(3)); int i = 2; - for (Histogram.Bucket bucket : histo.getBuckets()) { + for (Bucket bucket : histo.getBuckets()) { assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC))); i--; } @@ -420,7 +420,7 @@ public void testSingleValuedFieldOrderedByCountAsc() throws Exception { assertThat(histo.getBuckets().size(), equalTo(3)); int i = 0; - for (Histogram.Bucket bucket : histo.getBuckets()) { + for (Bucket bucket : histo.getBuckets()) { assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC))); i++; } @@ -440,7 +440,7 @@ public void testSingleValuedFieldOrderedByCountDesc() throws Exception { assertThat(histo.getBuckets().size(), equalTo(3)); int i = 2; - for (Histogram.Bucket bucket : histo.getBuckets()) { + for (Bucket bucket : histo.getBuckets()) { assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC))); i--; } @@ -465,7 +465,7 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { Object[] propertiesCounts = (Object[]) ((InternalAggregation) histo).getProperty("sum.value"); ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); - Histogram.Bucket bucket = buckets.get(0); + Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); @@ -521,7 +521,7 @@ public void testSingleValuedFieldOrderedBySubAggregationAsc() throws Exception { assertThat(histo.getBuckets().size(), equalTo(3)); int i = 0; - for (Histogram.Bucket bucket : histo.getBuckets()) { + for (Bucket bucket : histo.getBuckets()) { assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC))); i++; } @@ -544,7 +544,7 @@ public void testSingleValuedFieldOrderedBySubAggregationDesc() throws Exception assertThat(histo.getBuckets().size(), equalTo(3)); int i = 2; - for (Histogram.Bucket bucket : histo.getBuckets()) { + for (Bucket bucket : histo.getBuckets()) { assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC))); i--; } @@ -567,7 +567,7 @@ public void testSingleValuedFieldOrderedByMultiValuedSubAggregationDesc() throws assertThat(histo.getBuckets().size(), equalTo(3)); int i = 2; - for (Histogram.Bucket bucket : histo.getBuckets()) { + for (Bucket bucket : histo.getBuckets()) { assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC))); i--; } @@ -590,7 +590,7 @@ public void testSingleValuedFieldOrderedByTieBreaker() throws Exception { assertThat(histo.getBuckets().size(), equalTo(3)); int i = 1; - for (Histogram.Bucket bucket : histo.getBuckets()) { + for (Bucket bucket : histo.getBuckets()) { assertThat(bucket.getKey(), equalTo(date(i, 1))); i++; } @@ -645,7 +645,7 @@ public void testSingleValuedFieldWithValueScript() throws Exception { assertThat(buckets.size(), equalTo(3)); ZonedDateTime key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC); - Histogram.Bucket bucket = buckets.get(0); + Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); @@ -688,7 +688,7 @@ public void testMultiValuedField() throws Exception { assertThat(buckets.size(), equalTo(4)); ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); - Histogram.Bucket bucket = buckets.get(0); + Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); @@ -729,9 +729,9 @@ public void testMultiValuedFieldOrderedByCountDesc() throws Exception { assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(4)); - List buckets = new ArrayList<>(histo.getBuckets()); + List buckets = new ArrayList<>(histo.getBuckets()); - Histogram.Bucket bucket = buckets.get(0); + Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat(bucket.getKey(), equalTo(date(3, 1))); assertThat(bucket.getDocCount(), equalTo(5L)); @@ -781,7 +781,7 @@ public void testMultiValuedFieldWithValueScript() throws Exception { assertThat(buckets.size(), equalTo(4)); ZonedDateTime key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC); - Histogram.Bucket bucket = buckets.get(0); + Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); @@ -835,7 +835,7 @@ public void testScriptSingleValue() throws Exception { assertThat(buckets.size(), equalTo(3)); ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); - Histogram.Bucket bucket = buckets.get(0); + Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); @@ -874,7 +874,7 @@ public void testScriptMultiValued() throws Exception { assertThat(buckets.size(), equalTo(4)); ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); - Histogram.Bucket bucket = buckets.get(0); + Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); @@ -940,7 +940,7 @@ public void testPartiallyUnmapped() throws Exception { assertThat(buckets.size(), equalTo(3)); ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); - Histogram.Bucket bucket = buckets.get(0); + Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); @@ -976,10 +976,10 @@ public void testEmptyAggregation() throws Exception { assertThat(response.getHits().getTotalHits().value, equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); - List buckets = histo.getBuckets(); + List buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(3)); - Histogram.Bucket bucket = buckets.get(1); + Bucket bucket = buckets.get(1); assertThat(bucket, Matchers.notNullValue()); assertThat(bucket.getKeyAsString(), equalTo("1.0")); @@ -1013,10 +1013,10 @@ public void testSingleValueWithTimeZone() throws Exception { assertThat(response.getHits().getTotalHits().value, equalTo(5L)); Histogram histo = response.getAggregations().get("date_histo"); - List buckets = histo.getBuckets(); + List buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(2)); - Histogram.Bucket bucket = buckets.get(0); + Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo("2014-03-10:00-00-00-02:00")); assertThat(bucket.getDocCount(), equalTo(2L)); @@ -1118,7 +1118,7 @@ public void testSingleValueFieldWithExtendedBounds() throws Exception { ZonedDateTime key = baseKey.isBefore(boundsMinKey) ? baseKey : boundsMinKey; for (int i = 0; i < bucketsCount; i++) { - Histogram.Bucket bucket = buckets.get(i); + Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); assertThat(bucket.getKeyAsString(), equalTo(format(key, pattern))); @@ -1185,7 +1185,7 @@ public void testSingleValueFieldWithExtendedBoundsTimezone() throws Exception { assertThat(buckets.size(), equalTo(24)); for (int i = 0; i < buckets.size(); i++) { - Histogram.Bucket bucket = buckets.get(i); + Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); ZonedDateTime zonedDateTime = timeZoneStartToday.plus(i * 60 * 60 * 1000, ChronoUnit.MILLIS); assertThat("InternalBucket " + i + " had wrong key", (ZonedDateTime) bucket.getKey(), equalTo(zonedDateTime)); @@ -1283,11 +1283,11 @@ public void testSingleValueWithMultipleDateFormatsFromMapping() throws Exception assertSearchHits(response, "0", "1", "2", "3", "4"); Histogram histo = response.getAggregations().get("date_histo"); - List buckets = histo.getBuckets(); + List buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(1)); ZonedDateTime key = ZonedDateTime.of(2014, 3, 10, 0, 0, 0, 0, ZoneOffset.UTC); - Histogram.Bucket bucket = buckets.get(0); + Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key))); assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); @@ -1315,7 +1315,7 @@ public void testIssue6965() { assertThat(buckets.size(), equalTo(3)); ZonedDateTime key = ZonedDateTime.of(2011, 12, 31, 23, 0, 0, 0, ZoneOffset.UTC); - Histogram.Bucket bucket = buckets.get(0); + Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz))); assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key)); @@ -1683,7 +1683,7 @@ private void assertMultiSortResponse(int[] expectedDays, BucketOrder... order) { assertThat(histogram.getBuckets().size(), equalTo(expectedKeys.length)); int i = 0; - for (Histogram.Bucket bucket : histogram.getBuckets()) { + for (Bucket bucket : histogram.getBuckets()) { assertThat(bucket, notNullValue()); assertThat(key(bucket), equalTo(expectedKeys[i])); assertThat(bucket.getDocCount(), equalTo(expectedMultiSortBuckets.get(expectedKeys[i]).get("_count"))); @@ -1699,7 +1699,7 @@ private void assertMultiSortResponse(int[] expectedDays, BucketOrder... order) { ); } - private ZonedDateTime key(Histogram.Bucket bucket) { + private ZonedDateTime key(Bucket bucket) { return (ZonedDateTime) bucket.getKey(); } @@ -1753,7 +1753,7 @@ public void testDateKeyFormatting() { ), response -> { InternalDateHistogram histogram = response.getAggregations().get("histo"); - List buckets = histogram.getBuckets(); + List buckets = histogram.getBuckets(); assertThat(buckets.get(0).getKeyAsString(), equalTo("2012-01-01T00:00:00.000-07:00")); assertThat(buckets.get(1).getKeyAsString(), equalTo("2012-02-01T00:00:00.000-07:00")); assertThat(buckets.get(2).getKeyAsString(), equalTo("2012-03-01T00:00:00.000-07:00")); @@ -1770,7 +1770,7 @@ public void testHardBoundsOnDates() { ), response -> { InternalDateHistogram histogram = response.getAggregations().get("histo"); - List buckets = histogram.getBuckets(); + List buckets = histogram.getBuckets(); assertThat(buckets.size(), equalTo(30)); assertThat(buckets.get(1).getKeyAsString(), equalTo("2012-02-03T00:00:00.000Z")); assertThat(buckets.get(29).getKeyAsString(), equalTo("2012-03-02T00:00:00.000Z")); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java index 421c1475eb5bc..5894837e257bf 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java @@ -19,10 +19,10 @@ import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.histogram.DoubleBounds; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; -import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.metrics.Avg; import org.elasticsearch.search.aggregations.metrics.Max; import org.elasticsearch.search.aggregations.metrics.Stats; @@ -252,7 +252,7 @@ public void testSingleValuedField() throws Exception { assertThat(buckets.size(), equalTo(numValueBuckets)); for (int i = 0; i < numValueBuckets; ++i) { - Histogram.Bucket bucket = buckets.get(i); + Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); @@ -276,7 +276,7 @@ public void singleValuedField_withOffset() throws Exception { assertThat(histo.getBuckets().size(), equalTo(expectedNumberOfBuckets)); // first bucket should start at -5, contain 4 documents - Histogram.Bucket bucket = histo.getBuckets().get(0); + Bucket bucket = histo.getBuckets().get(0); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo(-5L)); assertThat(bucket.getDocCount(), equalTo(4L)); @@ -310,7 +310,7 @@ public void testSingleValuedFieldWithRandomOffset() throws Exception { long docsCounted = 0; for (int i = 0; i < expectedNumberOfBuckets; ++i) { - Histogram.Bucket bucket = histo.getBuckets().get(i); + Bucket bucket = histo.getBuckets().get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) ((i - 1) * interval + offset))); if (i == 0) { @@ -340,9 +340,9 @@ public void testSingleValuedFieldOrderedByKeyAsc() throws Exception { assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); - List buckets = new ArrayList<>(histo.getBuckets()); + List buckets = new ArrayList<>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { - Histogram.Bucket bucket = buckets.get(i); + Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); @@ -362,9 +362,9 @@ public void testsingleValuedFieldOrderedByKeyDesc() throws Exception { assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); - List buckets = new ArrayList<>(histo.getBuckets()); + List buckets = new ArrayList<>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { - Histogram.Bucket bucket = buckets.get(numValueBuckets - i - 1); + Bucket bucket = buckets.get(numValueBuckets - i - 1); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); @@ -385,10 +385,10 @@ public void testSingleValuedFieldOrderedByCountAsc() throws Exception { assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); Set buckets = new HashSet<>(); - List histoBuckets = new ArrayList<>(histo.getBuckets()); + List histoBuckets = new ArrayList<>(histo.getBuckets()); long previousCount = Long.MIN_VALUE; for (int i = 0; i < numValueBuckets; ++i) { - Histogram.Bucket bucket = histoBuckets.get(i); + Bucket bucket = histoBuckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertEquals(0, key % interval); @@ -413,10 +413,10 @@ public void testSingleValuedFieldOrderedByCountDesc() throws Exception { assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); Set buckets = new HashSet<>(); - List histoBuckets = new ArrayList<>(histo.getBuckets()); + List histoBuckets = new ArrayList<>(histo.getBuckets()); long previousCount = Long.MAX_VALUE; for (int i = 0; i < numValueBuckets; ++i) { - Histogram.Bucket bucket = histoBuckets.get(i); + Bucket bucket = histoBuckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertEquals(0, key % interval); @@ -446,9 +446,9 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { Object[] propertiesDocCounts = (Object[]) ((InternalAggregation) histo).getProperty("_count"); Object[] propertiesCounts = (Object[]) ((InternalAggregation) histo).getProperty("sum.value"); - List buckets = new ArrayList<>(histo.getBuckets()); + List buckets = new ArrayList<>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { - Histogram.Bucket bucket = buckets.get(i); + Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); @@ -486,9 +486,9 @@ public void testSingleValuedFieldOrderedBySubAggregationAsc() throws Exception { Set visited = new HashSet<>(); double previousSum = Double.NEGATIVE_INFINITY; - List buckets = new ArrayList<>(histo.getBuckets()); + List buckets = new ArrayList<>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { - Histogram.Bucket bucket = buckets.get(i); + Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertTrue(visited.add(key)); @@ -527,9 +527,9 @@ public void testSingleValuedFieldOrderedBySubAggregationDesc() throws Exception Set visited = new HashSet<>(); double previousSum = Double.POSITIVE_INFINITY; - List buckets = new ArrayList<>(histo.getBuckets()); + List buckets = new ArrayList<>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { - Histogram.Bucket bucket = buckets.get(i); + Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertTrue(visited.add(key)); @@ -569,9 +569,9 @@ public void testSingleValuedFieldOrderedByMultiValuedSubAggregationDesc() throws Set visited = new HashSet<>(); double previousSum = Double.POSITIVE_INFINITY; - List buckets = new ArrayList<>(histo.getBuckets()); + List buckets = new ArrayList<>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { - Histogram.Bucket bucket = buckets.get(i); + Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertTrue(visited.add(key)); @@ -611,9 +611,9 @@ public void testSingleValuedFieldOrderedBySubAggregationDescDeepOrderPath() thro Set visited = new HashSet<>(); double prevMax = asc ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY; - List buckets = new ArrayList<>(histo.getBuckets()); + List buckets = new ArrayList<>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { - Histogram.Bucket bucket = buckets.get(i); + Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertTrue(visited.add(key)); @@ -646,9 +646,9 @@ public void testSingleValuedFieldOrderedByTieBreaker() throws Exception { assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); - List buckets = new ArrayList<>(histo.getBuckets()); + List buckets = new ArrayList<>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { - Histogram.Bucket bucket = buckets.get(i); + Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); @@ -705,7 +705,7 @@ public void testSingleValuedFieldWithValueScript() throws Exception { assertThat(buckets.size(), equalTo(numBuckets)); for (int i = 0; i < numBuckets; i++) { - Histogram.Bucket bucket = buckets.get(i); + Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); int key = ((2 / interval) + i) * interval; assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) key)); @@ -726,7 +726,7 @@ public void testMultiValuedField() throws Exception { assertThat(buckets.size(), equalTo(numValuesBuckets)); for (int i = 0; i < numValuesBuckets; ++i) { - Histogram.Bucket bucket = buckets.get(i); + Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valuesCounts[i])); @@ -746,9 +746,9 @@ public void testMultiValuedFieldOrderedByKeyDesc() throws Exception { assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValuesBuckets)); - List buckets = new ArrayList<>(histo.getBuckets()); + List buckets = new ArrayList<>(histo.getBuckets()); for (int i = 0; i < numValuesBuckets; ++i) { - Histogram.Bucket bucket = buckets.get(numValuesBuckets - i - 1); + Bucket bucket = buckets.get(numValuesBuckets - i - 1); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valuesCounts[i])); @@ -783,7 +783,7 @@ public void testMultiValuedFieldWithValueScript() throws Exception { assertThat(buckets.size(), equalTo(numBuckets)); for (int i = 0; i < numBuckets; i++) { - Histogram.Bucket bucket = buckets.get(i); + Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); int key = ((2 / interval) + i) * interval; assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) key)); @@ -807,7 +807,7 @@ public void testScriptSingleValue() throws Exception { assertThat(buckets.size(), equalTo(numValueBuckets)); for (int i = 0; i < numValueBuckets; ++i) { - Histogram.Bucket bucket = buckets.get(i); + Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); @@ -830,7 +830,7 @@ public void testScriptMultiValued() throws Exception { assertThat(buckets.size(), equalTo(numValuesBuckets)); for (int i = 0; i < numValuesBuckets; ++i) { - Histogram.Bucket bucket = buckets.get(i); + Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valuesCounts[i])); @@ -862,7 +862,7 @@ public void testPartiallyUnmapped() throws Exception { assertThat(buckets.size(), equalTo(numValueBuckets)); for (int i = 0; i < numValueBuckets; ++i) { - Histogram.Bucket bucket = buckets.get(i); + Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); @@ -885,7 +885,7 @@ public void testPartiallyUnmappedWithExtendedBounds() throws Exception { List buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numValueBuckets + 3)); - Histogram.Bucket bucket = buckets.get(0); + Bucket bucket = buckets.get(0); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) -1 * 2 * interval)); assertThat(bucket.getDocCount(), equalTo(0L)); @@ -919,7 +919,7 @@ public void testEmptyAggregation() throws Exception { Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); List buckets = histo.getBuckets(); - Histogram.Bucket bucket = buckets.get(1); + Bucket bucket = buckets.get(1); assertThat(bucket, Matchers.notNullValue()); histo = bucket.getAggregations().get("sub_histo"); @@ -984,7 +984,7 @@ public void testSingleValuedFieldWithExtendedBounds() throws Exception { long key = startKey; for (int i = 0; i < bucketsCount; i++) { - Histogram.Bucket bucket = buckets.get(i); + Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(extendedValueCounts[i])); @@ -1058,7 +1058,7 @@ public void testEmptyWithExtendedBounds() throws Exception { long key = startKey; for (int i = 0; i < bucketsCount; i++) { - Histogram.Bucket bucket = buckets.get(i); + Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(0L)); @@ -1302,7 +1302,7 @@ private void assertMultiSortResponse(long[] expectedKeys, BucketOrder... order) assertThat(histogram.getBuckets().size(), equalTo(expectedKeys.length)); int i = 0; - for (Histogram.Bucket bucket : histogram.getBuckets()) { + for (Bucket bucket : histogram.getBuckets()) { assertThat(bucket, notNullValue()); assertThat(key(bucket), equalTo(expectedKeys[i])); assertThat(bucket.getDocCount(), equalTo(expectedMultiSortBuckets.get(expectedKeys[i]).get("_count"))); @@ -1318,7 +1318,7 @@ private void assertMultiSortResponse(long[] expectedKeys, BucketOrder... order) ); } - private long key(Histogram.Bucket bucket) { + private long key(Bucket bucket) { return ((Number) bucket.getKey()).longValue(); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java index 96f6002f0d490..eeee745b32f92 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java @@ -21,9 +21,9 @@ import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; -import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; @@ -1103,7 +1103,7 @@ public void testEmptyAggregation() throws Exception { assertThat(response.getHits().getTotalHits().value, equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); - Histogram.Bucket bucket = histo.getBuckets().get(1); + Bucket bucket = histo.getBuckets().get(1); assertThat(bucket, notNullValue()); ScriptedMetric scriptedMetric = bucket.getAggregations().get("scripted"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java index 421a5d2d36254..0da75854b9ab2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java @@ -12,8 +12,8 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.search.aggregations.BucketOrder; +import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; -import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.metrics.ExtendedStats.Bounds; import java.util.ArrayList; @@ -119,7 +119,7 @@ public void testGappyIndexWithSigma() { } else { expectedDocCount = 1; } - Histogram.Bucket bucket = buckets.get(i); + Bucket bucket = buckets.get(i); assertThat("i: " + i, bucket, notNullValue()); assertThat("i: " + i, ((Number) bucket.getKey()).longValue(), equalTo((long) i)); assertThat("i: " + i, bucket.getDocCount(), equalTo(expectedDocCount)); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramBucket.java new file mode 100644 index 0000000000000..ef020cd55bfaf --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramBucket.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.aggregations.bucket.histogram; + +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; + +/** + * A bucket in the histogram where documents fall in + */ +public abstract class AbstractHistogramBucket extends InternalMultiBucketAggregation.InternalBucket { + + protected final long docCount; + protected final InternalAggregations aggregations; + protected final transient DocValueFormat format; + + protected AbstractHistogramBucket(long docCount, InternalAggregations aggregations, DocValueFormat format) { + this.docCount = docCount; + this.aggregations = aggregations; + this.format = format; + } + + @Override + public final long getDocCount() { + return docCount; + } + + @Override + public final InternalAggregations getAggregations() { + return aggregations; + } + + public final DocValueFormat getFormatter() { + return format; + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/Histogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/Histogram.java index 834d3c10016cb..7d06e4ad583fc 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/Histogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/Histogram.java @@ -10,8 +10,6 @@ import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.xcontent.ParseField; -import java.util.List; - /** * A {@code histogram} aggregation. Defines multiple buckets, each representing an interval in a histogram. */ @@ -25,17 +23,4 @@ public interface Histogram extends MultiBucketsAggregation { ParseField EXTENDED_BOUNDS_FIELD = new ParseField("extended_bounds"); ParseField HARD_BOUNDS_FIELD = new ParseField("hard_bounds"); - /** - * A bucket in the histogram where documents fall in - */ - interface Bucket extends MultiBucketsAggregation.Bucket { - - } - - /** - * @return The buckets of this histogram (each bucket representing an interval in the histogram) - */ - @Override - List getBuckets(); - } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java index e0de42cebcc7d..951ed222ffb77 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java @@ -49,31 +49,22 @@ public final class InternalDateHistogram extends InternalMultiBucketAggregation< Histogram, HistogramFactory { - public static class Bucket extends InternalMultiBucketAggregation.InternalBucket implements Histogram.Bucket, KeyComparable { + public static class Bucket extends AbstractHistogramBucket implements KeyComparable { final long key; - final long docCount; - final InternalAggregations aggregations; private final transient boolean keyed; - protected final transient DocValueFormat format; public Bucket(long key, long docCount, boolean keyed, DocValueFormat format, InternalAggregations aggregations) { - this.format = format; + super(docCount, aggregations, format); this.keyed = keyed; this.key = key; - this.docCount = docCount; - this.aggregations = aggregations; } /** * Read from a stream. */ - public Bucket(StreamInput in, boolean keyed, DocValueFormat format) throws IOException { - this.format = format; - this.keyed = keyed; - key = in.readLong(); - docCount = in.readVLong(); - aggregations = InternalAggregations.readFrom(in); + public static Bucket readFrom(StreamInput in, boolean keyed, DocValueFormat format) throws IOException { + return new Bucket(in.readLong(), in.readVLong(), keyed, format, InternalAggregations.readFrom(in)); } @Override @@ -109,16 +100,6 @@ public Object getKey() { return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC); } - @Override - public long getDocCount() { - return docCount; - } - - @Override - public InternalAggregations getAggregations() { - return aggregations; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { String keyAsString = format.format(key).toString(); @@ -142,10 +123,6 @@ public int compareKey(Bucket other) { return Long.compare(key, other.key); } - public DocValueFormat getFormatter() { - return format; - } - public boolean getKeyed() { return keyed; } @@ -259,7 +236,7 @@ public InternalDateHistogram(StreamInput in) throws IOException { } else { downsampledResultsOffset = false; } - buckets = in.readCollectionAsList(stream -> new Bucket(stream, keyed, format)); + buckets = in.readCollectionAsList(stream -> Bucket.readFrom(stream, keyed, format)); // we changed the order format in 8.13 for partial reduce, therefore we need to order them to perform merge sort if (in.getTransportVersion().between(TransportVersions.V_8_13_0, TransportVersions.HISTOGRAM_AGGS_KEY_SORTED)) { // list is mutable by #readCollectionAsList contract diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java index b44d8fec4030a..33548aa96b27f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java @@ -44,31 +44,22 @@ public class InternalHistogram extends InternalMultiBucketAggregation { + public static class Bucket extends AbstractHistogramBucket implements KeyComparable { final double key; - final long docCount; - final InternalAggregations aggregations; private final transient boolean keyed; - protected final transient DocValueFormat format; public Bucket(double key, long docCount, boolean keyed, DocValueFormat format, InternalAggregations aggregations) { - this.format = format; + super(docCount, aggregations, format); this.keyed = keyed; this.key = key; - this.docCount = docCount; - this.aggregations = aggregations; } /** * Read from a stream. */ - public Bucket(StreamInput in, boolean keyed, DocValueFormat format) throws IOException { - this.format = format; - this.keyed = keyed; - key = in.readDouble(); - docCount = in.readVLong(); - aggregations = InternalAggregations.readFrom(in); + public static Bucket readFrom(StreamInput in, boolean keyed, DocValueFormat format) throws IOException { + return new Bucket(in.readDouble(), in.readVLong(), keyed, format, InternalAggregations.readFrom(in)); } @Override @@ -104,16 +95,6 @@ public Object getKey() { return key; } - @Override - public long getDocCount() { - return docCount; - } - - @Override - public InternalAggregations getAggregations() { - return aggregations; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { String keyAsString = format.format(key).toString(); @@ -137,10 +118,6 @@ public int compareKey(Bucket other) { return Double.compare(key, other.key); } - public DocValueFormat getFormatter() { - return format; - } - public boolean getKeyed() { return keyed; } @@ -242,7 +219,7 @@ public InternalHistogram(StreamInput in) throws IOException { } format = in.readNamedWriteable(DocValueFormat.class); keyed = in.readBoolean(); - buckets = in.readCollectionAsList(stream -> new Bucket(stream, keyed, format)); + buckets = in.readCollectionAsList(stream -> Bucket.readFrom(stream, keyed, format)); // we changed the order format in 8.13 for partial reduce, therefore we need to order them to perform merge sort if (in.getTransportVersion().between(TransportVersions.V_8_13_0, TransportVersions.HISTOGRAM_AGGS_KEY_SORTED)) { // list is mutable by #readCollectionAsList contract diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java index 46b5a1b7629d8..675b5d218c882 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java @@ -37,7 +37,7 @@ public class InternalVariableWidthHistogram extends InternalMultiBucketAggregati InternalVariableWidthHistogram, InternalVariableWidthHistogram.Bucket> implements Histogram, HistogramFactory { - public static class Bucket extends InternalMultiBucketAggregation.InternalBucket implements Histogram.Bucket, KeyComparable { + public static class Bucket extends AbstractHistogramBucket implements KeyComparable { public static class BucketBounds { public double min; @@ -72,28 +72,23 @@ public int hashCode() { } private final BucketBounds bounds; - private long docCount; - private InternalAggregations aggregations; - protected final transient DocValueFormat format; - private double centroid; + private final double centroid; public Bucket(double centroid, BucketBounds bounds, long docCount, DocValueFormat format, InternalAggregations aggregations) { - this.format = format; + super(docCount, aggregations, format); this.centroid = centroid; this.bounds = bounds; - this.docCount = docCount; - this.aggregations = aggregations; } /** * Read from a stream. */ - public Bucket(StreamInput in, DocValueFormat format) throws IOException { - this.format = format; - centroid = in.readDouble(); - docCount = in.readVLong(); - bounds = new BucketBounds(in); - aggregations = InternalAggregations.readFrom(in); + public static Bucket readFrom(StreamInput in, DocValueFormat format) throws IOException { + final double centroid = in.readDouble(); + final long docCount = in.readVLong(); + final BucketBounds bounds = new BucketBounds(in); + final InternalAggregations aggregations = InternalAggregations.readFrom(in); + return new Bucket(centroid, bounds, docCount, format, aggregations); } @Override @@ -123,7 +118,7 @@ public int hashCode() { @Override public String getKeyAsString() { - return format.format((double) getKey()).toString(); + return format.format(centroid).toString(); } /** @@ -148,16 +143,6 @@ public double centroid() { return centroid; } - @Override - public long getDocCount() { - return docCount; - } - - @Override - public InternalAggregations getAggregations() { - return aggregations; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { String keyAsString = format.format((double) getKey()).toString(); @@ -231,7 +216,7 @@ public int hashCode() { } } - private List buckets; + private final List buckets; private final DocValueFormat format; private final int targetNumBuckets; final EmptyBucketInfo emptyBucketInfo; @@ -258,7 +243,7 @@ public InternalVariableWidthHistogram(StreamInput in) throws IOException { super(in); emptyBucketInfo = new EmptyBucketInfo(in); format = in.readNamedWriteable(DocValueFormat.class); - buckets = in.readCollectionAsList(stream -> new Bucket(stream, format)); + buckets = in.readCollectionAsList(stream -> Bucket.readFrom(stream, format)); targetNumBuckets = in.readVInt(); // we changed the order format in 8.13 for partial reduce, therefore we need to order them to perform merge sort if (in.getTransportVersion().between(TransportVersions.V_8_13_0, TransportVersions.HISTOGRAM_AGGS_KEY_SORTED)) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java index afd3c3534f7c3..561076c302eda 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java @@ -36,12 +36,7 @@ public final class AggregationTestUtils { private AggregationTestUtils() {} static InternalHistogram.Bucket createHistogramBucket(long timestamp, long docCount, List subAggregations) { - InternalHistogram.Bucket bucket = mock(InternalHistogram.Bucket.class); - when(bucket.getKey()).thenReturn(timestamp); - when(bucket.getDocCount()).thenReturn(docCount); - InternalAggregations aggs = createAggs(subAggregations); - when(bucket.getAggregations()).thenReturn(aggs); - return bucket; + return new InternalHistogram.Bucket(timestamp, docCount, false, DocValueFormat.RAW, createAggs(subAggregations)); } static InternalComposite.InternalBucket createCompositeBucket( diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessorTests.java index a3edd63295cea..fc774a4ee3e48 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessorTests.java @@ -414,11 +414,13 @@ public void testProcessGivenMultipleSingleMetricPerSingleTermsPerHistogram() thr } public void testProcessGivenUnsupportedAggregationUnderHistogram() { - InternalHistogram.Bucket histogramBucket = createHistogramBucket(1000L, 2); InternalAggregation anotherHistogram = mock(InternalAggregation.class); when(anotherHistogram.getName()).thenReturn("nested-agg"); - InternalAggregations subAggs = createAggs(Arrays.asList(createMax("time", 1000), anotherHistogram)); - when(histogramBucket.getAggregations()).thenReturn(subAggs); + InternalHistogram.Bucket histogramBucket = createHistogramBucket( + 1000L, + 2, + Arrays.asList(createMax("time", 1000), anotherHistogram) + ); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, @@ -428,13 +430,11 @@ public void testProcessGivenUnsupportedAggregationUnderHistogram() { } public void testProcessGivenMultipleBucketAggregations() { - InternalHistogram.Bucket histogramBucket = createHistogramBucket(1000L, 2); StringTerms terms1 = mock(StringTerms.class); when(terms1.getName()).thenReturn("terms_1"); StringTerms terms2 = mock(StringTerms.class); when(terms2.getName()).thenReturn("terms_2"); - InternalAggregations subAggs = createAggs(Arrays.asList(createMax("time", 1000), terms1, terms2)); - when(histogramBucket.getAggregations()).thenReturn(subAggs); + InternalHistogram.Bucket histogramBucket = createHistogramBucket(1000L, 2, Arrays.asList(createMax("time", 1000), terms1, terms2)); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, From 258a8b5a8efcba347e112cc3dcfe0819e238143d Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Mon, 1 Jul 2024 15:27:03 +0200 Subject: [PATCH 085/216] semantic_text: Add exists query (#110027) --- .../index/query/NestedQueryBuilder.java | 23 ++- .../mapper/SemanticTextFieldMapper.java | 16 ++ .../mapper/SemanticTextFieldMapperTests.java | 79 ++++++---- .../70_semantic_text_exists_query.yml | 144 ++++++++++++++++++ 4 files changed, 232 insertions(+), 30 deletions(-) create mode 100644 x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_semantic_text_exists_query.yml diff --git a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java index 3a96fdc9b0e0e..a6a3d8546187f 100644 --- a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java @@ -33,6 +33,7 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; +import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.index.mapper.NestedObjectMapper; import org.elasticsearch.index.search.ESToParentBlockJoinQuery; import org.elasticsearch.index.search.NestedHelper; @@ -260,6 +261,26 @@ protected int doHashCode() { @Override protected Query doToQuery(SearchExecutionContext context) throws IOException { + return toQuery((this.query::toQuery), path, scoreMode, ignoreUnmapped, context); + } + + /** + * Returns the primitive Lucene query for a nested query given the primitive query to wrap + * @param exception that the queryProvider may throw + * @param queryProvider Retrieves tye query to use given the SearchExecutionContext + * @param path nested path + * @param scoreMode score mode to use + * @param ignoreUnmapped whether to ignore unmapped fields + * @param context search execution context + * @return the primitive Lucene query + */ + public static Query toQuery( + CheckedFunction queryProvider, + String path, + ScoreMode scoreMode, + boolean ignoreUnmapped, + SearchExecutionContext context + ) throws E { if (context.allowExpensiveQueries() == false) { throw new ElasticsearchException( "[joining] queries cannot be executed when '" + ALLOW_EXPENSIVE_QUERIES.getKey() + "' is set to false." @@ -285,7 +306,7 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { try { context.nestedScope().nextLevel(mapper); - innerQuery = this.query.toQuery(context); + innerQuery = queryProvider.apply(context); } finally { context.nestedScope().previousLevel(); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index 84b41bf37db56..3a62428f237bc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.inference.mapper; +import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.search.join.ScoreMode; @@ -352,6 +353,21 @@ public Query termQuery(Object value, SearchExecutionContext context) { throw new IllegalArgumentException(CONTENT_TYPE + " fields do not support term query"); } + @Override + public Query existsQuery(SearchExecutionContext context) { + if (getEmbeddingsField() == null) { + return new MatchNoDocsQuery(); + } + + return NestedQueryBuilder.toQuery( + (c -> getEmbeddingsField().fieldType().existsQuery(c)), + getChunksFieldName(name()), + ScoreMode.None, + false, + context + ); + } + @Override public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { // Redirect the fetcher to load the original values of the field diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java index 14de5ceffa6d4..1cae8d981313f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; @@ -42,6 +43,7 @@ import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.index.mapper.vectors.SparseVectorFieldMapper; +import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.search.ESToParentBlockJoinQuery; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.SimilarityMeasure; @@ -180,36 +182,10 @@ public void testDynamicUpdate() throws IOException { final String fieldName = "semantic"; final String inferenceId = "test_service"; - MapperService mapperService = createMapperService(mapping(b -> {})); - mapperService.merge( - "_doc", - new CompressedXContent( - Strings.toString(PutMappingRequest.simpleMapping(fieldName, "type=semantic_text,inference_id=" + inferenceId)) - ), - MapperService.MergeReason.MAPPING_UPDATE - ); - - SemanticTextField semanticTextField = new SemanticTextField( + MapperService mapperService = mapperServiceForFieldWithModelSettings( fieldName, - List.of(), - new SemanticTextField.InferenceResult( - inferenceId, - new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, null, null), - List.of() - ), - XContentType.JSON - ); - XContentBuilder builder = JsonXContent.contentBuilder().startObject(); - builder.field(semanticTextField.fieldName()); - builder.value(semanticTextField); - builder.endObject(); - - SourceToParse sourceToParse = new SourceToParse("test", BytesReference.bytes(builder), XContentType.JSON); - ParsedDocument parsedDocument = mapperService.documentMapper().parse(sourceToParse); - mapperService.merge( - "_doc", - parsedDocument.dynamicMappingsUpdate().toCompressedXContent(), - MapperService.MergeReason.MAPPING_UPDATE + inferenceId, + new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, null, null) ); assertSemanticTextField(mapperService, fieldName, true); } @@ -565,6 +541,51 @@ private MapperService mapperServiceForFieldWithModelSettings( return mapperService; } + public void testExistsQuerySparseVector() throws IOException { + final String fieldName = "semantic"; + final String inferenceId = "test_service"; + + MapperService mapperService = mapperServiceForFieldWithModelSettings( + fieldName, + inferenceId, + new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, null, null) + ); + + Mapper mapper = mapperService.mappingLookup().getMapper(fieldName); + assertNotNull(mapper); + SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService); + Query existsQuery = ((SemanticTextFieldMapper) mapper).fieldType().existsQuery(searchExecutionContext); + assertThat(existsQuery, instanceOf(ESToParentBlockJoinQuery.class)); + } + + public void testExistsQueryDenseVector() throws IOException { + final String fieldName = "semantic"; + final String inferenceId = "test_service"; + + MapperService mapperService = mapperServiceForFieldWithModelSettings( + fieldName, + inferenceId, + new SemanticTextField.ModelSettings( + TaskType.TEXT_EMBEDDING, + 1024, + SimilarityMeasure.COSINE, + DenseVectorFieldMapper.ElementType.FLOAT + ) + ); + + Mapper mapper = mapperService.mappingLookup().getMapper(fieldName); + assertNotNull(mapper); + SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService); + Query existsQuery = ((SemanticTextFieldMapper) mapper).fieldType().existsQuery(searchExecutionContext); + assertThat(existsQuery, instanceOf(ESToParentBlockJoinQuery.class)); + } + + @Override + protected void assertExistsQuery(MappedFieldType fieldType, Query query, LuceneDocument fields) { + // Until a doc is indexed, the query is rewritten as match no docs + assertThat(query, instanceOf(MatchNoDocsQuery.class)); + } + private static void addSemanticTextMapping(XContentBuilder mappingBuilder, String fieldName, String modelId) throws IOException { mappingBuilder.startObject(fieldName); mappingBuilder.field("type", SemanticTextFieldMapper.CONTENT_TYPE); diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_semantic_text_exists_query.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_semantic_text_exists_query.yml new file mode 100644 index 0000000000000..11bd1f87aab06 --- /dev/null +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_semantic_text_exists_query.yml @@ -0,0 +1,144 @@ +setup: + - requires: + cluster_features: "gte_v8.15.0" + reason: semantic_text introduced in 8.15.0 + + - do: + inference.put: + task_type: sparse_embedding + inference_id: sparse-inference-id + body: > + { + "service": "test_service", + "service_settings": { + "model": "my_model", + "api_key": "abc64" + }, + "task_settings": { + } + } + + - do: + inference.put: + task_type: text_embedding + inference_id: dense-inference-id + body: > + { + "service": "text_embedding_test_service", + "service_settings": { + "model": "my_model", + "dimensions": 10, + "api_key": "abc64", + "similarity": "COSINE" + }, + "task_settings": { + } + } + + - do: + indices.create: + index: test-sparse-index + body: + mappings: + properties: + inference_field: + type: semantic_text + inference_id: sparse-inference-id + + - do: + indices.create: + index: test-dense-index + body: + mappings: + properties: + inference_field: + type: semantic_text + inference_id: dense-inference-id + +--- +"Exists query with no indexed documents": + - do: + search: + index: test-sparse-index + body: + query: + exists: + field: "inference_field" + + - match: { hits.total.value: 0 } + +--- +"Exists query with null indexed documents": + - do: + index: + index: test-sparse-index + id: doc + body: + inference_field: null + refresh: true + + - do: + search: + index: test-sparse-index + body: + query: + exists: + field: "inference_field" + + - match: { hits.total.value: 0 } + + - do: + index: + index: test-dense-index + id: doc + body: + inference_field: null + refresh: true + + - do: + search: + index: test-dense-index + body: + query: + exists: + field: "inference_field" + + - match: { hits.total.value: 0 } + +--- +"Exists query with indexed documents": + - do: + index: + index: test-sparse-index + id: doc + body: + inference_field: "hello world" + refresh: true + + - do: + search: + index: test-sparse-index + body: + query: + exists: + field: "inference_field" + + - match: { hits.total.value: 1 } + + - do: + index: + index: test-dense-index + id: doc + body: + inference_field: "hello world" + refresh: true + + - do: + search: + index: test-dense-index + body: + query: + exists: + field: "inference_field" + + - match: { hits.total.value: 1 } From bddc2e47d57c94e3535e7496c31e40b00aaace31 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Mon, 1 Jul 2024 15:38:15 +0200 Subject: [PATCH 086/216] [Inference API] Add Google Vertex AI Rerank support (#110273) --- .../org/elasticsearch/TransportVersions.java | 1 + .../InferenceNamedWriteablesProvider.java | 18 ++ .../GoogleVertexAiActionCreator.java | 6 + .../GoogleVertexAiActionVisitor.java | 2 + .../GoogleVertexAiRerankAction.java | 56 ++++++ ...oogleVertexAiEmbeddingsRequestManager.java | 10 +- .../sender/GoogleVertexAiRequestManager.java | 14 +- .../GoogleVertexAiRerankRequestManager.java | 70 ++++++++ .../GoogleVertexAiRerankRequest.java | 83 +++++++++ .../GoogleVertexAiRerankRequestEntity.java | 68 ++++++++ .../googlevertexai/GoogleVertexAiUtils.java | 10 ++ .../external/response/XContentUtils.java | 27 +++ .../GoogleVertexAiRerankResponseEntity.java | 116 +++++++++++++ ...oogleVertexAiRateLimitServiceSettings.java | 2 - .../googlevertexai/GoogleVertexAiService.java | 24 ++- .../GoogleVertexAiEmbeddingsModel.java | 5 + ...xAiEmbeddingsRateLimitServiceSettings.java | 15 ++ ...ogleVertexAiEmbeddingsServiceSettings.java | 3 +- ...scoveryEngineRateLimitServiceSettings.java | 14 ++ .../rerank/GoogleVertexAiRerankModel.java | 141 +++++++++++++++ ...ogleVertexAiRerankRequestTaskSettings.java | 43 +++++ .../GoogleVertexAiRerankServiceSettings.java | 149 ++++++++++++++++ .../GoogleVertexAiRerankTaskSettings.java | 105 +++++++++++ .../GoogleVertexAiRerankActionTests.java | 118 +++++++++++++ ...oogleVertexAiRerankRequestEntityTests.java | 115 ++++++++++++ .../GoogleVertexAiRerankRequestTests.java | 122 +++++++++++++ .../external/response/XContentUtilsTests.java | 57 ++++++ ...ogleVertexAiRerankResponseEntityTests.java | 164 ++++++++++++++++++ .../GoogleVertexAiServiceTests.java | 69 ++++++++ .../GoogleVertexAiRerankModelTests.java | 64 +++++++ ...ertexAiRerankRequestTaskSettingsTests.java | 36 ++++ ...gleVertexAiRerankServiceSettingsTests.java | 129 ++++++++++++++ ...GoogleVertexAiRerankTaskSettingsTests.java | 137 +++++++++++++++ 33 files changed, 1975 insertions(+), 18 deletions(-) create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/googlevertexai/GoogleVertexAiRerankAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleVertexAiRerankRequestManager.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/googlevertexai/GoogleVertexAiRerankRequest.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/googlevertexai/GoogleVertexAiRerankRequestEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsRateLimitServiceSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleDiscoveryEngineRateLimitServiceSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankModel.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankRequestTaskSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankServiceSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankTaskSettings.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/googlevertexai/GoogleVertexAiRerankActionTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/googlevertexai/GoogleVertexAiRerankRequestEntityTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/googlevertexai/GoogleVertexAiRerankRequestTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntityTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankModelTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankRequestTaskSettingsTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankServiceSettingsTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankTaskSettingsTests.java diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index ae70bb7cbf0a8..22094b7ec8bac 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -206,6 +206,7 @@ static TransportVersion def(int id) { public static final TransportVersion SECURITY_MIGRATIONS_MIGRATION_NEEDED_ADDED = def(8_697_00_0); public static final TransportVersion K_FOR_KNN_QUERY_ADDED = def(8_698_00_0); public static final TransportVersion TEXT_SIMILARITY_RERANKER_RETRIEVER = def(8_699_00_0); + public static final TransportVersion ML_INFERENCE_GOOGLE_VERTEX_AI_RERANKING_ADDED = def(8_700_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java index b75c44731df06..f3799b824fc0e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java @@ -53,6 +53,8 @@ import org.elasticsearch.xpack.inference.services.googlevertexai.GoogleVertexAiSecretSettings; import org.elasticsearch.xpack.inference.services.googlevertexai.embeddings.GoogleVertexAiEmbeddingsServiceSettings; import org.elasticsearch.xpack.inference.services.googlevertexai.embeddings.GoogleVertexAiEmbeddingsTaskSettings; +import org.elasticsearch.xpack.inference.services.googlevertexai.rerank.GoogleVertexAiRerankServiceSettings; +import org.elasticsearch.xpack.inference.services.googlevertexai.rerank.GoogleVertexAiRerankTaskSettings; import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettings; import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserServiceSettings; import org.elasticsearch.xpack.inference.services.mistral.embeddings.MistralEmbeddingsServiceSettings; @@ -314,6 +316,22 @@ private static void addGoogleVertexAiNamedWriteables(List namedWriteables) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/googlevertexai/GoogleVertexAiActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/googlevertexai/GoogleVertexAiActionCreator.java index 32254432d3ee2..ed2a205151a4c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/googlevertexai/GoogleVertexAiActionCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/googlevertexai/GoogleVertexAiActionCreator.java @@ -11,6 +11,7 @@ import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.googlevertexai.embeddings.GoogleVertexAiEmbeddingsModel; +import org.elasticsearch.xpack.inference.services.googlevertexai.rerank.GoogleVertexAiRerankModel; import java.util.Map; import java.util.Objects; @@ -30,4 +31,9 @@ public GoogleVertexAiActionCreator(Sender sender, ServiceComponents serviceCompo public ExecutableAction create(GoogleVertexAiEmbeddingsModel model, Map taskSettings) { return new GoogleVertexAiEmbeddingsAction(sender, model, serviceComponents); } + + @Override + public ExecutableAction create(GoogleVertexAiRerankModel model, Map taskSettings) { + return new GoogleVertexAiRerankAction(sender, model, serviceComponents.threadPool()); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/googlevertexai/GoogleVertexAiActionVisitor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/googlevertexai/GoogleVertexAiActionVisitor.java index 8d885749fee09..def8f09ce06be 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/googlevertexai/GoogleVertexAiActionVisitor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/googlevertexai/GoogleVertexAiActionVisitor.java @@ -9,6 +9,7 @@ import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.services.googlevertexai.embeddings.GoogleVertexAiEmbeddingsModel; +import org.elasticsearch.xpack.inference.services.googlevertexai.rerank.GoogleVertexAiRerankModel; import java.util.Map; @@ -16,4 +17,5 @@ public interface GoogleVertexAiActionVisitor { ExecutableAction create(GoogleVertexAiEmbeddingsModel model, Map taskSettings); + ExecutableAction create(GoogleVertexAiRerankModel model, Map taskSettings); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/googlevertexai/GoogleVertexAiRerankAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/googlevertexai/GoogleVertexAiRerankAction.java new file mode 100644 index 0000000000000..2827de3b1962d --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/googlevertexai/GoogleVertexAiRerankAction.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.googlevertexai; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.http.sender.GoogleVertexAiRerankRequestManager; +import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.services.googlevertexai.rerank.GoogleVertexAiRerankModel; + +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.createInternalServerError; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.wrapFailuresInElasticsearchException; + +public class GoogleVertexAiRerankAction implements ExecutableAction { + + private final String failedToSendRequestErrorMessage; + + private final Sender sender; + + private final GoogleVertexAiRerankRequestManager requestManager; + + public GoogleVertexAiRerankAction(Sender sender, GoogleVertexAiRerankModel model, ThreadPool threadPool) { + Objects.requireNonNull(model); + this.sender = Objects.requireNonNull(sender); + this.failedToSendRequestErrorMessage = constructFailedToSendRequestMessage(model.uri(), "Google Vertex AI rerank"); + this.requestManager = GoogleVertexAiRerankRequestManager.of(model, threadPool); + } + + @Override + public void execute(InferenceInputs inferenceInputs, TimeValue timeout, ActionListener listener) { + try { + ActionListener wrappedListener = wrapFailuresInElasticsearchException( + failedToSendRequestErrorMessage, + listener + ); + sender.send(requestManager, inferenceInputs, timeout, wrappedListener); + } catch (ElasticsearchException e) { + listener.onFailure(e); + } catch (Exception e) { + listener.onFailure(createInternalServerError(e, failedToSendRequestErrorMessage)); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleVertexAiEmbeddingsRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleVertexAiEmbeddingsRequestManager.java index c79e1a088ad5f..7a9fcff2dc276 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleVertexAiEmbeddingsRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleVertexAiEmbeddingsRequestManager.java @@ -41,11 +41,19 @@ private static ResponseHandler createEmbeddingsHandler() { private final Truncator truncator; public GoogleVertexAiEmbeddingsRequestManager(GoogleVertexAiEmbeddingsModel model, Truncator truncator, ThreadPool threadPool) { - super(threadPool, model); + super(threadPool, model, RateLimitGrouping.of(model)); this.model = Objects.requireNonNull(model); this.truncator = Objects.requireNonNull(truncator); } + record RateLimitGrouping(int modelIdHash) { + public static RateLimitGrouping of(GoogleVertexAiEmbeddingsModel model) { + Objects.requireNonNull(model); + + return new RateLimitGrouping(model.rateLimitServiceSettings().modelId().hashCode()); + } + } + @Override public void execute( String query, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleVertexAiRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleVertexAiRequestManager.java index 698bce3e337d6..b1baa21ab6a8d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleVertexAiRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleVertexAiRequestManager.java @@ -10,19 +10,9 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.inference.services.googlevertexai.GoogleVertexAiModel; -import java.util.Objects; - public abstract class GoogleVertexAiRequestManager extends BaseRequestManager { - GoogleVertexAiRequestManager(ThreadPool threadPool, GoogleVertexAiModel model) { - super(threadPool, model.getInferenceEntityId(), RateLimitGrouping.of(model), model.rateLimitServiceSettings().rateLimitSettings()); - } - - record RateLimitGrouping(int modelIdHash) { - public static RateLimitGrouping of(GoogleVertexAiModel model) { - Objects.requireNonNull(model); - - return new RateLimitGrouping(model.rateLimitServiceSettings().modelId().hashCode()); - } + GoogleVertexAiRequestManager(ThreadPool threadPool, GoogleVertexAiModel model, Object rateLimitGroup) { + super(threadPool, model.getInferenceEntityId(), rateLimitGroup, model.rateLimitServiceSettings().rateLimitSettings()); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleVertexAiRerankRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleVertexAiRerankRequestManager.java new file mode 100644 index 0000000000000..ab49ecc7ab9f9 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleVertexAiRerankRequestManager.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.googlevertexai.GoogleVertexAiResponseHandler; +import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; +import org.elasticsearch.xpack.inference.external.request.googlevertexai.GoogleVertexAiRerankRequest; +import org.elasticsearch.xpack.inference.external.response.googlevertexai.GoogleVertexAiRerankResponseEntity; +import org.elasticsearch.xpack.inference.services.googlevertexai.rerank.GoogleVertexAiRerankModel; + +import java.util.List; +import java.util.Objects; +import java.util.function.Supplier; + +public class GoogleVertexAiRerankRequestManager extends GoogleVertexAiRequestManager { + + private static final Logger logger = LogManager.getLogger(GoogleVertexAiRerankRequestManager.class); + + private static final ResponseHandler HANDLER = createGoogleVertexAiResponseHandler(); + + private static ResponseHandler createGoogleVertexAiResponseHandler() { + return new GoogleVertexAiResponseHandler( + "Google Vertex AI rerank", + (request, response) -> GoogleVertexAiRerankResponseEntity.fromResponse(response) + ); + } + + public static GoogleVertexAiRerankRequestManager of(GoogleVertexAiRerankModel model, ThreadPool threadPool) { + return new GoogleVertexAiRerankRequestManager(Objects.requireNonNull(model), Objects.requireNonNull(threadPool)); + } + + private final GoogleVertexAiRerankModel model; + + private GoogleVertexAiRerankRequestManager(GoogleVertexAiRerankModel model, ThreadPool threadPool) { + super(threadPool, model, RateLimitGrouping.of(model)); + this.model = model; + } + + record RateLimitGrouping(int projectIdHash) { + public static RateLimitGrouping of(GoogleVertexAiRerankModel model) { + Objects.requireNonNull(model); + + return new RateLimitGrouping(model.rateLimitServiceSettings().projectId().hashCode()); + } + } + + @Override + public void execute( + String query, + List input, + RequestSender requestSender, + Supplier hasRequestCompletedFunction, + ActionListener listener + ) { + GoogleVertexAiRerankRequest request = new GoogleVertexAiRerankRequest(query, input, model); + + execute(new ExecutableInferenceRequest(requestSender, logger, request, HANDLER, hasRequestCompletedFunction, listener)); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/googlevertexai/GoogleVertexAiRerankRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/googlevertexai/GoogleVertexAiRerankRequest.java new file mode 100644 index 0000000000000..79606c63e0ed6 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/googlevertexai/GoogleVertexAiRerankRequest.java @@ -0,0 +1,83 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.googlevertexai; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.ByteArrayEntity; +import org.elasticsearch.common.Strings; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.request.HttpRequest; +import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.services.googlevertexai.rerank.GoogleVertexAiRerankModel; + +import java.net.URI; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Objects; + +public class GoogleVertexAiRerankRequest implements GoogleVertexAiRequest { + + private final GoogleVertexAiRerankModel model; + + private final String query; + + private final List input; + + public GoogleVertexAiRerankRequest(String query, List input, GoogleVertexAiRerankModel model) { + this.model = Objects.requireNonNull(model); + this.query = Objects.requireNonNull(query); + this.input = Objects.requireNonNull(input); + } + + @Override + public HttpRequest createHttpRequest() { + HttpPost httpPost = new HttpPost(model.uri()); + + ByteArrayEntity byteEntity = new ByteArrayEntity( + Strings.toString( + new GoogleVertexAiRerankRequestEntity(query, input, model.getServiceSettings().modelId(), model.getTaskSettings().topN()) + ).getBytes(StandardCharsets.UTF_8) + ); + + httpPost.setEntity(byteEntity); + httpPost.setHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaType()); + + decorateWithAuth(httpPost); + + return new HttpRequest(httpPost, getInferenceEntityId()); + } + + public void decorateWithAuth(HttpPost httpPost) { + GoogleVertexAiRequest.decorateWithBearerToken(httpPost, model.getSecretSettings()); + } + + public GoogleVertexAiRerankModel model() { + return model; + } + + @Override + public String getInferenceEntityId() { + return model.getInferenceEntityId(); + } + + @Override + public URI getURI() { + return model.uri(); + } + + @Override + public Request truncate() { + return this; + } + + @Override + public boolean[] getTruncationInfo() { + return null; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/googlevertexai/GoogleVertexAiRerankRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/googlevertexai/GoogleVertexAiRerankRequestEntity.java new file mode 100644 index 0000000000000..2cac067f622cc --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/googlevertexai/GoogleVertexAiRerankRequestEntity.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.googlevertexai; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public record GoogleVertexAiRerankRequestEntity(String query, List inputs, @Nullable String model, @Nullable Integer topN) + implements + ToXContentObject { + + private static final String MODEL_FIELD = "model"; + private static final String QUERY_FIELD = "query"; + private static final String RECORDS_FIELD = "records"; + private static final String ID_FIELD = "id"; + + private static final String CONTENT_FIELD = "content"; + private static final String TOP_N_FIELD = "topN"; + + public GoogleVertexAiRerankRequestEntity { + Objects.requireNonNull(query); + Objects.requireNonNull(inputs); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + if (model != null) { + builder.field(MODEL_FIELD, model); + } + + builder.field(QUERY_FIELD, query); + + builder.startArray(RECORDS_FIELD); + + for (int recordId = 0; recordId < inputs.size(); recordId++) { + builder.startObject(); + + { + builder.field(ID_FIELD, String.valueOf(recordId)); + builder.field(CONTENT_FIELD, inputs.get(recordId)); + } + + builder.endObject(); + } + + builder.endArray(); + + if (topN != null) { + builder.field(TOP_N_FIELD, topN); + } + + builder.endObject(); + + return builder; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/googlevertexai/GoogleVertexAiUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/googlevertexai/GoogleVertexAiUtils.java index 8258679bc6dfe..505676ff457bc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/googlevertexai/GoogleVertexAiUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/googlevertexai/GoogleVertexAiUtils.java @@ -11,12 +11,20 @@ public final class GoogleVertexAiUtils { public static final String GOOGLE_VERTEX_AI_HOST_SUFFIX = "-aiplatform.googleapis.com"; + public static final String GOOGLE_DISCOVERY_ENGINE_HOST = "discoveryengine.googleapis.com"; + public static final String V1 = "v1"; public static final String PROJECTS = "projects"; public static final String LOCATIONS = "locations"; + public static final String GLOBAL = "global"; + + public static final String RANKING_CONFIGS = "rankingConfigs"; + + public static final String DEFAULT_RANKING_CONFIG = "default_ranking_config"; + public static final String PUBLISHERS = "publishers"; public static final String PUBLISHER_GOOGLE = "google"; @@ -25,6 +33,8 @@ public final class GoogleVertexAiUtils { public static final String PREDICT = "predict"; + public static final String RANK = "rank"; + private GoogleVertexAiUtils() {} } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java index a4f48510bc0e6..e0ba93e52c903 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java @@ -53,6 +53,33 @@ public static void positionParserAtTokenAfterField(XContentParser parser, String throw new IllegalStateException(format(errorMsgTemplate, field)); } + /** + * Iterates over the tokens until it finds a field name token with the text matching the field requested + * inside the current object (does not include nested objects). + * + * @param parser parser to move + * @param field the field name to find + * @param errorMsgTemplate a template message to populate an exception if the field cannot be found + * @throws IllegalStateException if the field cannot be found + */ + public static void positionParserAtTokenAfterFieldCurrentFlatObj(XContentParser parser, String field, String errorMsgTemplate) + throws IOException { + XContentParser.Token token = parser.nextToken(); + + while (token != null + && token != XContentParser.Token.END_OBJECT + && token != XContentParser.Token.START_OBJECT + && token != XContentParser.Token.START_ARRAY) { + if (token == XContentParser.Token.FIELD_NAME && parser.currentName().equals(field)) { + parser.nextToken(); + return; + } + token = parser.nextToken(); + } + + throw new IllegalStateException(format(errorMsgTemplate, field)); + } + /** * Progress the parser consuming and discarding tokens until the * parser points to the end of the current object. Nested objects diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntity.java new file mode 100644 index 0000000000000..cd37628e8e17e --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntity.java @@ -0,0 +1,116 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.googlevertexai; + +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.common.xcontent.XContentParserUtils.parseList; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.consumeUntilObjectEnd; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterFieldCurrentFlatObj; + +public class GoogleVertexAiRerankResponseEntity { + + private static final String FAILED_TO_FIND_FIELD_TEMPLATE = "Failed to find required field [%s] in Google Vertex AI rerank response"; + + /** + * Parses the Google Vertex AI rerank response. + * + * For a request like: + * + *
    +     *     
    +     *         {
    +     *              "query": "some query",
    +     *              "records": [
    +     *                  {
    +     *                      "id": "1",
    +     *                      "title": "title 1",
    +     *                      "content": "content 1"
    +     *                  },
    +     *                  {
    +     *                      "id": "2",
    +     *                      "title": "title 2",
    +     *                      "content": "content 2"
    +     *                  }
    +     *     ]
    +     * }
    +     *     
    +     * 
    + * + * The response would look like: + * + *
    +     *     
    +     *         {
    +     *              "records": [
    +     *                  {
    +     *                      "id": "2",
    +     *                      "title": "title 2",
    +     *                      "content": "content 2",
    +     *                      "score": 0.97
    +     *                  },
    +     *                  {
    +     *                      "id": "1",
    +     *                      "title": "title 1",
    +     *                      "content": "content 1",
    +     *                      "score": 0.18
    +     *                  }
    +     *             ]
    +     *         }
    +     *     
    +     * 
    + */ + + public static RankedDocsResults fromResponse(HttpResult response) throws IOException { + var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); + + try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { + moveToFirstToken(jsonParser); + + XContentParser.Token token = jsonParser.currentToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + + positionParserAtTokenAfterField(jsonParser, "records", FAILED_TO_FIND_FIELD_TEMPLATE); + + List rankedDocs = parseList(jsonParser, GoogleVertexAiRerankResponseEntity::parseRankedDoc); + + return new RankedDocsResults(rankedDocs); + } + } + + private static RankedDocsResults.RankedDoc parseRankedDoc(XContentParser parser, Integer index) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + + positionParserAtTokenAfterFieldCurrentFlatObj(parser, "content", FAILED_TO_FIND_FIELD_TEMPLATE); + XContentParser.Token token = parser.currentToken(); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_STRING, token, parser); + String content = parser.text(); + + positionParserAtTokenAfterFieldCurrentFlatObj(parser, "score", FAILED_TO_FIND_FIELD_TEMPLATE); + token = parser.currentToken(); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser); + float score = parser.floatValue(); + + consumeUntilObjectEnd(parser); + + return new RankedDocsResults.RankedDoc(index, score, content); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiRateLimitServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiRateLimitServiceSettings.java index bd1373ae3ab8f..f7c7865a5a8f4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiRateLimitServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiRateLimitServiceSettings.java @@ -11,8 +11,6 @@ public interface GoogleVertexAiRateLimitServiceSettings { - String modelId(); - RateLimitSettings rateLimitSettings(); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java index 4708d5b7d5300..9b2ef5847322c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java @@ -27,12 +27,14 @@ import org.elasticsearch.xpack.inference.external.action.googlevertexai.GoogleVertexAiActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; +import org.elasticsearch.xpack.inference.external.http.sender.QueryAndDocsInputs; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.SenderService; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.googlevertexai.embeddings.GoogleVertexAiEmbeddingsModel; import org.elasticsearch.xpack.inference.services.googlevertexai.embeddings.GoogleVertexAiEmbeddingsServiceSettings; +import org.elasticsearch.xpack.inference.services.googlevertexai.rerank.GoogleVertexAiRerankModel; import java.util.List; import java.util.Map; @@ -139,6 +141,8 @@ public void checkModelConfig(Model model, ActionListener listener) { this, listener.delegateFailureAndWrap((l, size) -> l.onResponse(updateModelWithEmbeddingDetails(embeddingsModel, size))) ); + } else { + listener.onResponse(model); } } @@ -174,7 +178,16 @@ protected void doInfer( TimeValue timeout, ActionListener listener ) { - throw new UnsupportedOperationException("Query input not supported for Google Vertex AI"); + if (model instanceof GoogleVertexAiModel == false) { + listener.onFailure(createInvalidModelException(model)); + return; + } + + GoogleVertexAiModel googleVertexAiModel = (GoogleVertexAiModel) model; + var actionCreator = new GoogleVertexAiActionCreator(getSender(), getServiceComponents()); + + var action = googleVertexAiModel.accept(actionCreator, taskSettings); + action.execute(new QueryAndDocsInputs(query, input), timeout, listener); } @Override @@ -267,6 +280,15 @@ private static GoogleVertexAiModel createModel( secretSettings, context ); + case RERANK -> new GoogleVertexAiRerankModel( + inferenceEntityId, + taskType, + NAME, + serviceSettings, + taskSettings, + secretSettings, + context + ); default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); }; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsModel.java index eb49e3f182a5e..99110045fc3da 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsModel.java @@ -110,6 +110,11 @@ public GoogleVertexAiSecretSettings getSecretSettings() { return (GoogleVertexAiSecretSettings) super.getSecretSettings(); } + @Override + public GoogleVertexAiEmbeddingsRateLimitServiceSettings rateLimitServiceSettings() { + return (GoogleVertexAiEmbeddingsRateLimitServiceSettings) super.rateLimitServiceSettings(); + } + public URI uri() { return uri; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsRateLimitServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsRateLimitServiceSettings.java new file mode 100644 index 0000000000000..7e1e0056de2b5 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsRateLimitServiceSettings.java @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.googlevertexai.embeddings; + +import org.elasticsearch.xpack.inference.services.googlevertexai.GoogleVertexAiRateLimitServiceSettings; + +public interface GoogleVertexAiEmbeddingsRateLimitServiceSettings extends GoogleVertexAiRateLimitServiceSettings { + + String modelId(); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsServiceSettings.java index 5f037f4530999..ce7dc2726545f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsServiceSettings.java @@ -20,7 +20,6 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.ServiceUtils; -import org.elasticsearch.xpack.inference.services.googlevertexai.GoogleVertexAiRateLimitServiceSettings; import org.elasticsearch.xpack.inference.services.googlevertexai.GoogleVertexAiService; import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; @@ -43,7 +42,7 @@ public class GoogleVertexAiEmbeddingsServiceSettings extends FilteredXContentObject implements ServiceSettings, - GoogleVertexAiRateLimitServiceSettings { + GoogleVertexAiEmbeddingsRateLimitServiceSettings { public static final String NAME = "google_vertex_ai_embeddings_service_settings"; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleDiscoveryEngineRateLimitServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleDiscoveryEngineRateLimitServiceSettings.java new file mode 100644 index 0000000000000..850942602248f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleDiscoveryEngineRateLimitServiceSettings.java @@ -0,0 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.googlevertexai.rerank; + +import org.elasticsearch.xpack.inference.services.googlevertexai.GoogleVertexAiRateLimitServiceSettings; + +public interface GoogleDiscoveryEngineRateLimitServiceSettings extends GoogleVertexAiRateLimitServiceSettings { + String projectId(); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankModel.java new file mode 100644 index 0000000000000..45fad977a2b6b --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankModel.java @@ -0,0 +1,141 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.googlevertexai.rerank; + +import org.apache.http.client.utils.URIBuilder; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.action.googlevertexai.GoogleVertexAiActionVisitor; +import org.elasticsearch.xpack.inference.external.request.googlevertexai.GoogleVertexAiUtils; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.googlevertexai.GoogleVertexAiModel; +import org.elasticsearch.xpack.inference.services.googlevertexai.GoogleVertexAiSecretSettings; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Map; + +import static org.elasticsearch.core.Strings.format; + +public class GoogleVertexAiRerankModel extends GoogleVertexAiModel { + + private URI uri; + + public GoogleVertexAiRerankModel( + String inferenceEntityId, + TaskType taskType, + String service, + Map serviceSettings, + Map taskSettings, + Map secrets, + ConfigurationParseContext context + ) { + this( + inferenceEntityId, + taskType, + service, + GoogleVertexAiRerankServiceSettings.fromMap(serviceSettings, context), + GoogleVertexAiRerankTaskSettings.fromMap(taskSettings), + GoogleVertexAiSecretSettings.fromMap(secrets) + ); + } + + public GoogleVertexAiRerankModel(GoogleVertexAiRerankModel model, GoogleVertexAiRerankServiceSettings serviceSettings) { + super(model, serviceSettings); + } + + // Should only be used directly for testing + GoogleVertexAiRerankModel( + String inferenceEntityId, + TaskType taskType, + String service, + GoogleVertexAiRerankServiceSettings serviceSettings, + GoogleVertexAiRerankTaskSettings taskSettings, + @Nullable GoogleVertexAiSecretSettings secrets + ) { + super( + new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings), + new ModelSecrets(secrets), + serviceSettings + ); + try { + this.uri = buildUri(serviceSettings.projectId()); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + // Should only be used directly for testing + protected GoogleVertexAiRerankModel( + String inferenceEntityId, + TaskType taskType, + String service, + String uri, + GoogleVertexAiRerankServiceSettings serviceSettings, + GoogleVertexAiRerankTaskSettings taskSettings, + @Nullable GoogleVertexAiSecretSettings secrets + ) { + super( + new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings), + new ModelSecrets(secrets), + serviceSettings + ); + try { + this.uri = new URI(uri); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + @Override + public GoogleVertexAiRerankServiceSettings getServiceSettings() { + return (GoogleVertexAiRerankServiceSettings) super.getServiceSettings(); + } + + @Override + public GoogleVertexAiRerankTaskSettings getTaskSettings() { + return (GoogleVertexAiRerankTaskSettings) super.getTaskSettings(); + } + + @Override + public GoogleVertexAiSecretSettings getSecretSettings() { + return (GoogleVertexAiSecretSettings) super.getSecretSettings(); + } + + @Override + public GoogleDiscoveryEngineRateLimitServiceSettings rateLimitServiceSettings() { + return (GoogleDiscoveryEngineRateLimitServiceSettings) super.rateLimitServiceSettings(); + } + + public URI uri() { + return uri; + } + + @Override + public ExecutableAction accept(GoogleVertexAiActionVisitor visitor, Map taskSettings) { + return visitor.create(this, taskSettings); + } + + public static URI buildUri(String projectId) throws URISyntaxException { + return new URIBuilder().setScheme("https") + .setHost(GoogleVertexAiUtils.GOOGLE_DISCOVERY_ENGINE_HOST) + .setPathSegments( + GoogleVertexAiUtils.V1, + GoogleVertexAiUtils.PROJECTS, + projectId, + GoogleVertexAiUtils.LOCATIONS, + GoogleVertexAiUtils.GLOBAL, + GoogleVertexAiUtils.RANKING_CONFIGS, + format("%s:%s", GoogleVertexAiUtils.DEFAULT_RANKING_CONFIG, GoogleVertexAiUtils.RANK) + ) + .build(); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankRequestTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankRequestTaskSettings.java new file mode 100644 index 0000000000000..5cb1acd8038f7 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankRequestTaskSettings.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.googlevertexai.rerank; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; + +import java.util.Map; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalPositiveInteger; + +public record GoogleVertexAiRerankRequestTaskSettings(@Nullable Integer topN) { + + public static final GoogleVertexAiRerankRequestTaskSettings EMPTY_SETTINGS = new GoogleVertexAiRerankRequestTaskSettings(null); + + public static GoogleVertexAiRerankRequestTaskSettings fromMap(Map map) { + if (map.isEmpty()) { + return GoogleVertexAiRerankRequestTaskSettings.EMPTY_SETTINGS; + } + + ValidationException validationException = new ValidationException(); + + Integer topN = extractOptionalPositiveInteger( + map, + GoogleVertexAiRerankTaskSettings.TOP_N, + ModelConfigurations.TASK_SETTINGS, + validationException + ); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new GoogleVertexAiRerankRequestTaskSettings(topN); + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankServiceSettings.java new file mode 100644 index 0000000000000..0a0271d611a71 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankServiceSettings.java @@ -0,0 +1,149 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.googlevertexai.rerank; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.googlevertexai.GoogleVertexAiService; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceFields.MODEL_ID; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; +import static org.elasticsearch.xpack.inference.services.googlevertexai.GoogleVertexAiServiceFields.PROJECT_ID; + +public class GoogleVertexAiRerankServiceSettings extends FilteredXContentObject + implements + ServiceSettings, + GoogleDiscoveryEngineRateLimitServiceSettings { + + public static final String NAME = "google_vertex_ai_rerank_service_settings"; + + // Query requests per project per minute: https://cloud.google.com/generative-ai-app-builder/quotas#request_quotas + private static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(300); + + public static GoogleVertexAiRerankServiceSettings fromMap(Map map, ConfigurationParseContext context) { + ValidationException validationException = new ValidationException(); + + String projectId = extractRequiredString(map, PROJECT_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); + String model = extractOptionalString(map, MODEL_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); + RateLimitSettings rateLimitSettings = RateLimitSettings.of( + map, + DEFAULT_RATE_LIMIT_SETTINGS, + validationException, + GoogleVertexAiService.NAME, + context + ); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new GoogleVertexAiRerankServiceSettings(projectId, model, rateLimitSettings); + } + + private final String projectId; + + private final String modelId; + + private final RateLimitSettings rateLimitSettings; + + public GoogleVertexAiRerankServiceSettings(String projectId, @Nullable String modelId, @Nullable RateLimitSettings rateLimitSettings) { + this.projectId = Objects.requireNonNull(projectId); + this.modelId = modelId; + this.rateLimitSettings = Objects.requireNonNullElse(rateLimitSettings, DEFAULT_RATE_LIMIT_SETTINGS); + } + + public GoogleVertexAiRerankServiceSettings(StreamInput in) throws IOException { + this.projectId = in.readString(); + this.modelId = in.readOptionalString(); + this.rateLimitSettings = new RateLimitSettings(in); + } + + public String projectId() { + return projectId; + } + + public String modelId() { + return modelId; + } + + @Override + public RateLimitSettings rateLimitSettings() { + return rateLimitSettings; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_GOOGLE_VERTEX_AI_RERANKING_ADDED; + } + + @Override + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { + builder.field(PROJECT_ID, projectId); + + if (modelId != null) { + builder.field(MODEL_ID, modelId); + } + + rateLimitSettings.toXContent(builder, params); + + return builder; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + toXContentFragmentOfExposedFields(builder, params); + + builder.endObject(); + + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(projectId); + out.writeOptionalString(modelId); + rateLimitSettings.writeTo(out); + } + + @Override + public boolean equals(Object object) { + if (this == object) return true; + if (object == null || getClass() != object.getClass()) return false; + GoogleVertexAiRerankServiceSettings that = (GoogleVertexAiRerankServiceSettings) object; + return Objects.equals(projectId, that.projectId) + && Objects.equals(modelId, that.modelId) + && Objects.equals(rateLimitSettings, that.rateLimitSettings); + } + + @Override + public int hashCode() { + return Objects.hash(projectId, modelId, rateLimitSettings); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankTaskSettings.java new file mode 100644 index 0000000000000..e7c20cc0b4fea --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankTaskSettings.java @@ -0,0 +1,105 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.googlevertexai.rerank; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskSettings; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalPositiveInteger; + +public class GoogleVertexAiRerankTaskSettings implements TaskSettings { + + public static final String NAME = "google_vertex_ai_rerank_task_settings"; + + public static final String TOP_N = "top_n"; + + public static GoogleVertexAiRerankTaskSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + Integer topN = extractOptionalPositiveInteger(map, TOP_N, ModelConfigurations.TASK_SETTINGS, validationException); + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new GoogleVertexAiRerankTaskSettings(topN); + } + + public static GoogleVertexAiRerankTaskSettings of( + GoogleVertexAiRerankTaskSettings originalSettings, + GoogleVertexAiRerankRequestTaskSettings requestSettings + ) { + var topN = requestSettings.topN() == null ? originalSettings.topN() : requestSettings.topN(); + return new GoogleVertexAiRerankTaskSettings(topN); + } + + private final Integer topN; + + public GoogleVertexAiRerankTaskSettings(@Nullable Integer topN) { + this.topN = topN; + } + + public GoogleVertexAiRerankTaskSettings(StreamInput in) throws IOException { + this.topN = in.readOptionalVInt(); + } + + public Integer topN() { + return topN; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_GOOGLE_VERTEX_AI_RERANKING_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalVInt(topN); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + if (topN != null) { + builder.field(TOP_N, topN); + } + + builder.endObject(); + + return builder; + } + + @Override + public boolean equals(Object object) { + if (this == object) return true; + if (object == null || getClass() != object.getClass()) return false; + GoogleVertexAiRerankTaskSettings that = (GoogleVertexAiRerankTaskSettings) object; + return Objects.equals(topN, that.topN); + } + + @Override + public int hashCode() { + return Objects.hash(topN); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/googlevertexai/GoogleVertexAiRerankActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/googlevertexai/GoogleVertexAiRerankActionTests.java new file mode 100644 index 0000000000000..b84a6328e9882 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/googlevertexai/GoogleVertexAiRerankActionTests.java @@ -0,0 +1,118 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.googlevertexai; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.elasticsearch.xpack.inference.services.googlevertexai.rerank.GoogleVertexAiRerankModelTests; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; + +public class GoogleVertexAiRerankActionTests extends ESTestCase { + + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private final MockWebServer webServer = new MockWebServer(); + private ThreadPool threadPool; + private HttpClientManager clientManager; + + @Before + public void init() throws Exception { + webServer.start(); + threadPool = createThreadPool(inferenceUtilityPool()); + clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); + } + + @After + public void shutdown() throws IOException { + clientManager.close(); + terminate(threadPool); + webServer.close(); + } + + // Successful case tested via end-to-end notebook tests in AppEx repo + public void testExecute_ThrowsElasticsearchException() { + var sender = mock(Sender.class); + doThrow(new ElasticsearchException("failed")).when(sender).send(any(), any(), any(), any()); + + var action = createAction(getUrl(webServer), "projectId", sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of("abc")), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is("failed")); + } + + public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled() { + var sender = mock(Sender.class); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onFailure(new IllegalStateException("failed")); + + return Void.TYPE; + }).when(sender).send(any(), any(), any(), any()); + + var action = createAction(getUrl(webServer), "projectId", sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of("abc")), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is(format("Failed to send Google Vertex AI rerank request to [%s]", getUrl(webServer)))); + } + + public void testExecute_ThrowsException() { + var sender = mock(Sender.class); + doThrow(new IllegalArgumentException("failed")).when(sender).send(any(), any(), any(), any()); + + var action = createAction(getUrl(webServer), "projectId", sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of("abc")), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is(format("Failed to send Google Vertex AI rerank request to [%s]", getUrl(webServer)))); + } + + private GoogleVertexAiRerankAction createAction(String url, String projectId, Sender sender) { + var model = GoogleVertexAiRerankModelTests.createModel(url, projectId, null); + + return new GoogleVertexAiRerankAction(sender, model, threadPool); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/googlevertexai/GoogleVertexAiRerankRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/googlevertexai/GoogleVertexAiRerankRequestEntityTests.java new file mode 100644 index 0000000000000..fd18d2573efcc --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/googlevertexai/GoogleVertexAiRerankRequestEntityTests.java @@ -0,0 +1,115 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.googlevertexai; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.inference.MatchersUtils.equalToIgnoringWhitespaceInJsonString; +import static org.hamcrest.MatcherAssert.assertThat; + +public class GoogleVertexAiRerankRequestEntityTests extends ESTestCase { + public void testXContent_SingleRequest_WritesModelAndTopNIfDefined() throws IOException { + var entity = new GoogleVertexAiRerankRequestEntity("query", List.of("abc"), "model", 8); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, equalToIgnoringWhitespaceInJsonString(""" + { + "model": "model", + "query": "query", + "records": [ + { + "id": "0", + "content": "abc" + } + ], + "topN": 8 + } + """)); + } + + public void testXContent_SingleRequest_DoesNotWriteModelAndTopNIfNull() throws IOException { + var entity = new GoogleVertexAiRerankRequestEntity("query", List.of("abc"), null, null); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, equalToIgnoringWhitespaceInJsonString(""" + { + "query": "query", + "records": [ + { + "id": "0", + "content": "abc" + } + ] + } + """)); + } + + public void testXContent_MultipleRequests_WritesModelAndTopNIfDefined() throws IOException { + var entity = new GoogleVertexAiRerankRequestEntity("query", List.of("abc", "def"), "model", 8); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, equalToIgnoringWhitespaceInJsonString(""" + { + "model": "model", + "query": "query", + "records": [ + { + "id": "0", + "content": "abc" + }, + { + "id": "1", + "content": "def" + } + ], + "topN": 8 + } + """)); + } + + public void testXContent_MultipleRequests_DoesNotWriteModelAndTopNIfNull() throws IOException { + var entity = new GoogleVertexAiRerankRequestEntity("query", List.of("abc", "def"), null, null); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, equalToIgnoringWhitespaceInJsonString(""" + { + "query": "query", + "records": [ + { + "id": "0", + "content": "abc" + }, + { + "id": "1", + "content": "def" + } + ] + } + """)); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/googlevertexai/GoogleVertexAiRerankRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/googlevertexai/GoogleVertexAiRerankRequestTests.java new file mode 100644 index 0000000000000..811adb6612a4e --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/googlevertexai/GoogleVertexAiRerankRequestTests.java @@ -0,0 +1,122 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.googlevertexai; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.services.googlevertexai.rerank.GoogleVertexAiRerankModel; +import org.elasticsearch.xpack.inference.services.googlevertexai.rerank.GoogleVertexAiRerankModelTests; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.hamcrest.Matchers.aMapWithSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.sameInstance; + +public class GoogleVertexAiRerankRequestTests extends ESTestCase { + + private static final String AUTH_HEADER_VALUE = "foo"; + + public void testCreateRequest_WithoutModelSet_And_WithoutTopNSet() throws IOException { + var input = "input"; + var query = "query"; + + var request = createRequest(query, input, null, null); + var httpRequest = request.createHttpRequest(); + + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is(AUTH_HEADER_VALUE)); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + + assertThat(requestMap, aMapWithSize(2)); + assertThat(requestMap.get("records"), is(List.of(Map.of("id", "0", "content", input)))); + assertThat(requestMap.get("query"), is(query)); + } + + public void testCreateRequest_WithTopNSet() throws IOException { + var input = "input"; + var query = "query"; + var topN = 1; + + var request = createRequest(query, input, null, topN); + var httpRequest = request.createHttpRequest(); + + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is(AUTH_HEADER_VALUE)); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + + assertThat(requestMap, aMapWithSize(3)); + assertThat(requestMap.get("records"), is(List.of(Map.of("id", "0", "content", input)))); + assertThat(requestMap.get("query"), is(query)); + assertThat(requestMap.get("topN"), is(topN)); + } + + public void testCreateRequest_WithModelSet() throws IOException { + var input = "input"; + var query = "query"; + var modelId = "model"; + + var request = createRequest(query, input, modelId, null); + var httpRequest = request.createHttpRequest(); + + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is(AUTH_HEADER_VALUE)); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + + assertThat(requestMap, aMapWithSize(3)); + assertThat(requestMap.get("records"), is(List.of(Map.of("id", "0", "content", input)))); + assertThat(requestMap.get("query"), is(query)); + assertThat(requestMap.get("model"), is(modelId)); + } + + public void testTruncate_DoesNotTruncate() { + var request = createRequest("query", "input", null, null); + var truncatedRequest = request.truncate(); + + assertThat(truncatedRequest, sameInstance(request)); + } + + private static GoogleVertexAiRerankRequest createRequest(String query, String input, @Nullable String modelId, @Nullable Integer topN) { + var rerankModel = GoogleVertexAiRerankModelTests.createModel(modelId, topN); + + return new GoogleVertexAiRerankWithoutAuthRequest(query, List.of(input), rerankModel); + } + + /** + * We use this class to fake the auth implementation to avoid static mocking of {@link GoogleVertexAiRequest} + */ + private static class GoogleVertexAiRerankWithoutAuthRequest extends GoogleVertexAiRerankRequest { + GoogleVertexAiRerankWithoutAuthRequest(String query, List input, GoogleVertexAiRerankModel model) { + super(query, input, model); + } + + @Override + public void decorateWithAuth(HttpPost httpPost) { + httpPost.setHeader(HttpHeaders.AUTHORIZATION, AUTH_HEADER_VALUE); + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java index e1d786819a536..4ae860f394022 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java @@ -126,6 +126,63 @@ public void testPositionParserAtTokenAfterField_ConsumesUntilEnd() throws IOExce } } + public void testPositionParserAtTokenAfterFieldCurrentObj() throws IOException { + var json = """ + { + "key": "value" + } + """; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + parser.nextToken(); + XContentUtils.positionParserAtTokenAfterFieldCurrentFlatObj(parser, "key", "some error"); + + assertEquals("value", parser.text()); + } + } + + public void testPositionParserAtTokenAfterFieldCurrentObj_ThrowsIfFieldIsMissing() throws IOException { + var json = """ + { + "key": "value" + } + """; + var errorFormat = "Error: %s"; + var missingField = "missing field"; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + parser.nextToken(); + var exception = expectThrows( + IllegalStateException.class, + () -> XContentUtils.positionParserAtTokenAfterFieldCurrentFlatObj(parser, missingField, errorFormat) + ); + + assertEquals(String.format(Locale.ROOT, errorFormat, missingField), exception.getMessage()); + } + } + + public void testPositionParserAtTokenAfterFieldCurrentObj_DoesNotFindNested() throws IOException { + var json = """ + { + "nested": { + "key": "value" + } + } + """; + var errorFormat = "Error: %s"; + var missingField = "missing field"; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + parser.nextToken(); + var exception = expectThrows( + IllegalStateException.class, + () -> XContentUtils.positionParserAtTokenAfterFieldCurrentFlatObj(parser, missingField, errorFormat) + ); + + assertEquals(String.format(Locale.ROOT, errorFormat, missingField), exception.getMessage()); + } + } + public void testConsumeUntilObjectEnd() throws IOException { var json = """ { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntityTests.java new file mode 100644 index 0000000000000..32450e3facfd0 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntityTests.java @@ -0,0 +1,164 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.googlevertexai; + +import org.apache.http.HttpResponse; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.List; + +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class GoogleVertexAiRerankResponseEntityTests extends ESTestCase { + + public void testFromResponse_CreatesResultsForASingleItem() throws IOException { + String responseJson = """ + { + "records": [ + { + "id": "2", + "title": "title 2", + "content": "content 2", + "score": 0.97 + } + ] + } + """; + + RankedDocsResults parsedResults = GoogleVertexAiRerankResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat(parsedResults.getRankedDocs(), is(List.of(new RankedDocsResults.RankedDoc(0, 0.97F, "content 2")))); + } + + public void testFromResponse_CreatesResultsForMultipleItems() throws IOException { + String responseJson = """ + { + "records": [ + { + "id": "2", + "title": "title 2", + "content": "content 2", + "score": 0.97 + }, + { + "id": "1", + "title": "title 1", + "content": "content 1", + "score": 0.90 + } + ] + } + """; + + RankedDocsResults parsedResults = GoogleVertexAiRerankResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat( + parsedResults.getRankedDocs(), + is(List.of(new RankedDocsResults.RankedDoc(0, 0.97F, "content 2"), new RankedDocsResults.RankedDoc(1, 0.90F, "content 1"))) + ); + } + + public void testFromResponse_FailsWhenRecordsFieldIsNotPresent() { + String responseJson = """ + { + "not_records": [ + { + "id": "2", + "title": "title 2", + "content": "content 2", + "score": 0.97 + }, + { + "id": "1", + "title": "title 1", + "content": "content 1", + "score": 0.90 + } + ] + } + """; + + var thrownException = expectThrows( + IllegalStateException.class, + () -> GoogleVertexAiRerankResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat(thrownException.getMessage(), is("Failed to find required field [records] in Google Vertex AI rerank response")); + } + + public void testFromResponse_FailsWhenContentFieldIsNotPresent() { + String responseJson = """ + { + "records": [ + { + "id": "2", + "title": "title 2", + "content": "content 2", + "score": 0.97 + }, + { + "id": "1", + "title": "title 1", + "not_content": "content 1", + "score": 0.97 + } + ] + } + """; + + var thrownException = expectThrows( + IllegalStateException.class, + () -> GoogleVertexAiRerankResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat(thrownException.getMessage(), is("Failed to find required field [content] in Google Vertex AI rerank response")); + } + + public void testFromResponse_FailsWhenScoreFieldIsNotPresent() { + String responseJson = """ + { + "records": [ + { + "id": "2", + "title": "title 2", + "content": "content 2", + "not_score": 0.97 + }, + { + "id": "1", + "title": "title 1", + "content": "content 1", + "score": 0.96 + } + ] + } + """; + + var thrownException = expectThrows( + IllegalStateException.class, + () -> GoogleVertexAiRerankResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat(thrownException.getMessage(), is("Failed to find required field [score] in Google Vertex AI rerank response")); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiServiceTests.java index a8e1dd3997ca0..614ce71c36078 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiServiceTests.java @@ -24,6 +24,8 @@ import org.elasticsearch.xpack.inference.services.googlevertexai.embeddings.GoogleVertexAiEmbeddingsModel; import org.elasticsearch.xpack.inference.services.googlevertexai.embeddings.GoogleVertexAiEmbeddingsServiceSettings; import org.elasticsearch.xpack.inference.services.googlevertexai.embeddings.GoogleVertexAiEmbeddingsTaskSettings; +import org.elasticsearch.xpack.inference.services.googlevertexai.rerank.GoogleVertexAiRerankModel; +import org.elasticsearch.xpack.inference.services.googlevertexai.rerank.GoogleVertexAiRerankTaskSettings; import org.hamcrest.CoreMatchers; import org.hamcrest.Matchers; import org.junit.After; @@ -107,6 +109,38 @@ public void testParseRequestConfig_CreatesGoogleVertexAiEmbeddingsModel() throws } } + public void testParseRequestConfig_CreatesGoogleVertexAiRerankModel() throws IOException { + var projectId = "project"; + var serviceAccountJson = """ + { + "some json" + } + """; + + try (var service = createGoogleVertexAiService()) { + ActionListener modelListener = ActionListener.wrap(model -> { + assertThat(model, instanceOf(GoogleVertexAiRerankModel.class)); + + var rerankModel = (GoogleVertexAiRerankModel) model; + + assertThat(rerankModel.getServiceSettings().projectId(), is(projectId)); + assertThat(rerankModel.getSecretSettings().serviceAccountJson().toString(), is(serviceAccountJson)); + }, e -> fail("Model parsing should succeeded, but failed: " + e.getMessage())); + + service.parseRequestConfig( + "id", + TaskType.RERANK, + getRequestConfigMap( + new HashMap<>(Map.of(GoogleVertexAiServiceFields.PROJECT_ID, projectId)), + new HashMap<>(Map.of()), + getSecretSettingsMap(serviceAccountJson) + ), + Set.of(), + modelListener + ); + } + } + public void testParseRequestConfig_ThrowsUnsupportedModelType() throws IOException { try (var service = createGoogleVertexAiService()) { var failureListener = getModelListenerForException( @@ -291,6 +325,33 @@ public void testParsePersistedConfigWithSecrets_CreatesGoogleVertexAiEmbeddingsM } } + public void testParsePersistedConfigWithSecrets_CreatesGoogleVertexAiRerankModel() throws IOException { + var projectId = "project"; + var topN = 1; + var serviceAccountJson = """ + { + "some json" + } + """; + + try (var service = createGoogleVertexAiService()) { + var persistedConfig = getPersistedConfigMap( + new HashMap<>(Map.of(GoogleVertexAiServiceFields.PROJECT_ID, projectId)), + getTaskSettingsMap(topN), + getSecretSettingsMap(serviceAccountJson) + ); + + var model = service.parsePersistedConfigWithSecrets("id", TaskType.RERANK, persistedConfig.config(), persistedConfig.secrets()); + + assertThat(model, instanceOf(GoogleVertexAiRerankModel.class)); + + var rerankModel = (GoogleVertexAiRerankModel) model; + assertThat(rerankModel.getServiceSettings().projectId(), is(projectId)); + assertThat(rerankModel.getTaskSettings(), is(new GoogleVertexAiRerankTaskSettings(topN))); + assertThat(rerankModel.getSecretSettings().serviceAccountJson().toString(), is(serviceAccountJson)); + } + } + public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInConfig() throws IOException { var projectId = "project"; var location = "location"; @@ -553,4 +614,12 @@ private static Map getTaskSettingsMap(Boolean autoTruncate) { return taskSettings; } + + private static Map getTaskSettingsMap(Integer topN) { + var taskSettings = new HashMap(); + + taskSettings.put(GoogleVertexAiRerankTaskSettings.TOP_N, topN); + + return taskSettings; + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankModelTests.java new file mode 100644 index 0000000000000..dff4e223cf9f4 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankModelTests.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.googlevertexai.rerank; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.googlevertexai.GoogleVertexAiSecretSettings; + +import java.net.URI; +import java.net.URISyntaxException; + +import static org.hamcrest.Matchers.is; + +public class GoogleVertexAiRerankModelTests extends ESTestCase { + + public void testBuildUri() throws URISyntaxException { + var projectId = "project"; + + URI uri = GoogleVertexAiRerankModel.buildUri(projectId); + + assertThat( + uri, + is( + new URI( + Strings.format( + "https://discoveryengine.googleapis.com/v1/projects/%s/locations/global/rankingConfigs/default_ranking_config:rank", + projectId + ) + ) + ) + ); + } + + public static GoogleVertexAiRerankModel createModel(@Nullable String modelId, @Nullable Integer topN) { + return new GoogleVertexAiRerankModel( + "id", + TaskType.RERANK, + "service", + new GoogleVertexAiRerankServiceSettings(randomAlphaOfLength(10), modelId, null), + new GoogleVertexAiRerankTaskSettings(topN), + new GoogleVertexAiSecretSettings(randomSecureStringOfLength(8)) + ); + } + + public static GoogleVertexAiRerankModel createModel(String url, @Nullable String modelId, @Nullable Integer topN) { + return new GoogleVertexAiRerankModel( + "id", + TaskType.RERANK, + "service", + url, + new GoogleVertexAiRerankServiceSettings(randomAlphaOfLength(10), modelId, null), + new GoogleVertexAiRerankTaskSettings(topN), + new GoogleVertexAiSecretSettings(randomSecureStringOfLength(8)) + ); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankRequestTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankRequestTaskSettingsTests.java new file mode 100644 index 0000000000000..abe41793d6e70 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankRequestTaskSettingsTests.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.googlevertexai.rerank; + +import org.elasticsearch.test.ESTestCase; + +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class GoogleVertexAiRerankRequestTaskSettingsTests extends ESTestCase { + + public void testFromMap_ReturnsEmptySettings_IfMapEmpty() { + var requestTaskSettings = GoogleVertexAiRerankRequestTaskSettings.fromMap(new HashMap<>()); + assertThat(requestTaskSettings, is(GoogleVertexAiRerankRequestTaskSettings.EMPTY_SETTINGS)); + } + + public void testFromMap_DoesNotThrowValidationException_IfTopNIsMissing() { + var requestTaskSettings = GoogleVertexAiRerankRequestTaskSettings.fromMap(new HashMap<>(Map.of("unrelated", 1))); + assertThat(requestTaskSettings, is(new GoogleVertexAiRerankRequestTaskSettings(null))); + } + + public void testFromMap_ExtractsTopN() { + var topN = 1; + var requestTaskSettings = GoogleVertexAiRerankRequestTaskSettings.fromMap( + new HashMap<>(Map.of(GoogleVertexAiRerankTaskSettings.TOP_N, topN)) + ); + assertThat(requestTaskSettings, is(new GoogleVertexAiRerankRequestTaskSettings(topN))); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankServiceSettingsTests.java new file mode 100644 index 0000000000000..9a8eb897f021e --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankServiceSettingsTests.java @@ -0,0 +1,129 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.googlevertexai.rerank; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.ServiceFields; +import org.elasticsearch.xpack.inference.services.googlevertexai.GoogleVertexAiServiceFields; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettingsTests; + +import java.io.IOException; +import java.util.HashMap; + +import static org.elasticsearch.xpack.inference.MatchersUtils.equalToIgnoringWhitespaceInJsonString; +import static org.hamcrest.Matchers.is; + +public class GoogleVertexAiRerankServiceSettingsTests extends AbstractBWCWireSerializationTestCase { + + public void testFromMap_Request_CreatesSettingsCorrectly() { + var projectId = randomAlphaOfLength(10); + var modelId = randomFrom(new String[] { null, randomAlphaOfLength(10) }); + + var serviceSettings = GoogleVertexAiRerankServiceSettings.fromMap(new HashMap<>() { + { + put(GoogleVertexAiServiceFields.PROJECT_ID, projectId); + put(ServiceFields.MODEL_ID, modelId); + } + }, ConfigurationParseContext.REQUEST); + + assertThat(serviceSettings, is(new GoogleVertexAiRerankServiceSettings(projectId, modelId, null))); + } + + public void testToXContent_WritesAllValues() throws IOException { + var entity = new GoogleVertexAiRerankServiceSettings("projectId", "modelId", null); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, equalToIgnoringWhitespaceInJsonString(""" + { + "project_id": "projectId", + "model_id": "modelId", + "rate_limit": { + "requests_per_minute": 300 + } + } + """)); + } + + public void testToXContent_DoesNotWriteModelIfNull() throws IOException { + var entity = new GoogleVertexAiRerankServiceSettings("projectId", null, null); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, equalToIgnoringWhitespaceInJsonString(""" + { + "project_id": "projectId", + "rate_limit": { + "requests_per_minute": 300 + } + } + """)); + } + + public void testFilteredXContentObject_WritesAllValues() throws IOException { + var entity = new GoogleVertexAiRerankServiceSettings("projectId", "modelId", null); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + var filteredXContent = entity.getFilteredXContentObject(); + filteredXContent.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, equalToIgnoringWhitespaceInJsonString(""" + { + "project_id": "projectId", + "model_id": "modelId", + "rate_limit": { + "requests_per_minute": 300 + } + } + """)); + } + + @Override + protected Writeable.Reader instanceReader() { + return GoogleVertexAiRerankServiceSettings::new; + } + + @Override + protected GoogleVertexAiRerankServiceSettings createTestInstance() { + return createRandom(); + } + + @Override + protected GoogleVertexAiRerankServiceSettings mutateInstance(GoogleVertexAiRerankServiceSettings instance) throws IOException { + return randomValueOtherThan(instance, GoogleVertexAiRerankServiceSettingsTests::createRandom); + } + + @Override + protected GoogleVertexAiRerankServiceSettings mutateInstanceForVersion( + GoogleVertexAiRerankServiceSettings instance, + TransportVersion version + ) { + return instance; + } + + private static GoogleVertexAiRerankServiceSettings createRandom() { + return new GoogleVertexAiRerankServiceSettings( + randomAlphaOfLength(10), + randomFrom(new String[] { null, randomAlphaOfLength(10) }), + randomFrom(new RateLimitSettings[] { null, RateLimitSettingsTests.createRandom() }) + ); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankTaskSettingsTests.java new file mode 100644 index 0000000000000..5bfcbb9a3f636 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/rerank/GoogleVertexAiRerankTaskSettingsTests.java @@ -0,0 +1,137 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.googlevertexai.rerank; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.xpack.inference.MatchersUtils.equalToIgnoringWhitespaceInJsonString; +import static org.elasticsearch.xpack.inference.services.googlevertexai.rerank.GoogleVertexAiRerankTaskSettings.TOP_N; +import static org.hamcrest.Matchers.is; + +public class GoogleVertexAiRerankTaskSettingsTests extends AbstractBWCWireSerializationTestCase { + + public void testFromMap_TopNIsSet() { + var topN = 1; + var taskSettingsMap = getTaskSettingsMap(topN); + var taskSettings = GoogleVertexAiRerankTaskSettings.fromMap(taskSettingsMap); + + assertThat(taskSettings, is(new GoogleVertexAiRerankTaskSettings(topN))); + } + + public void testFromMap_ThrowsValidationException_IfTopNIsInvalidValue() { + var taskSettingsMap = getTaskSettingsMap("invalid"); + + expectThrows(ValidationException.class, () -> GoogleVertexAiRerankTaskSettings.fromMap(taskSettingsMap)); + } + + public void testFromMap_TopNIsNull() { + var taskSettingsMap = getTaskSettingsMap(null); + var taskSettings = GoogleVertexAiRerankTaskSettings.fromMap(taskSettingsMap); + // needed, because of constructors being ambiguous otherwise + Integer nullInt = null; + + assertThat(taskSettings, is(new GoogleVertexAiRerankTaskSettings(nullInt))); + } + + public void testFromMap_DoesNotThrow_WithEmptyMap() { + assertNull(GoogleVertexAiRerankTaskSettings.fromMap(new HashMap<>()).topN()); + } + + public void testOf_UseRequestSettings() { + var originalTopN = 1; + var originalSettings = new GoogleVertexAiRerankTaskSettings(originalTopN); + + var requestTopN = originalTopN + 1; + var requestTaskSettings = new GoogleVertexAiRerankRequestTaskSettings(requestTopN); + + assertThat(GoogleVertexAiRerankTaskSettings.of(originalSettings, requestTaskSettings).topN(), is(requestTopN)); + } + + public void testOf_UseOriginalSettings() { + var originalTopN = 1; + var originalSettings = new GoogleVertexAiRerankTaskSettings(originalTopN); + + var requestTaskSettings = new GoogleVertexAiRerankRequestTaskSettings(null); + + assertThat(GoogleVertexAiRerankTaskSettings.of(originalSettings, requestTaskSettings).topN(), is(originalTopN)); + } + + public void testToXContent_WritesTopNIfNotNull() throws IOException { + var settings = GoogleVertexAiRerankTaskSettings.fromMap(getTaskSettingsMap(1)); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + settings.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, equalToIgnoringWhitespaceInJsonString(""" + { + "top_n":1 + } + """)); + } + + public void testToXContent_DoesNotWriteTopNIfNull() throws IOException { + var settings = GoogleVertexAiRerankTaskSettings.fromMap(getTaskSettingsMap(null)); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + settings.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {}""")); + } + + @Override + protected Writeable.Reader instanceReader() { + return GoogleVertexAiRerankTaskSettings::new; + } + + @Override + protected GoogleVertexAiRerankTaskSettings createTestInstance() { + return createRandom(); + } + + @Override + protected GoogleVertexAiRerankTaskSettings mutateInstance(GoogleVertexAiRerankTaskSettings instance) throws IOException { + return randomValueOtherThan(instance, GoogleVertexAiRerankTaskSettingsTests::createRandom); + } + + @Override + protected GoogleVertexAiRerankTaskSettings mutateInstanceForVersion( + GoogleVertexAiRerankTaskSettings instance, + TransportVersion version + ) { + return instance; + } + + private static GoogleVertexAiRerankTaskSettings createRandom() { + return new GoogleVertexAiRerankTaskSettings(randomFrom(new Integer[] { null, randomNonNegativeInt() })); + } + + private static Map getTaskSettingsMap(@Nullable Object topN) { + var map = new HashMap(); + + if (topN != null) { + map.put(TOP_N, topN); + } + + return map; + } +} From 5f308b791a0207aa125a2e8812a3978a64b0b202 Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Mon, 1 Jul 2024 15:53:33 +0200 Subject: [PATCH 087/216] Avoid semantic_text to be defined as part of a nested field until we make it work (#110055) --- .../org/elasticsearch/index/mapper/MapperBuilderContext.java | 4 ++++ .../org/elasticsearch/index/mapper/NestedObjectMapper.java | 5 +++++ .../xpack/inference/mapper/SemanticTextFieldMapper.java | 3 +++ .../test/inference/10_semantic_text_field_mapping.yml | 5 +++-- 4 files changed, 15 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperBuilderContext.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperBuilderContext.java index fa501a31045e7..5a35cfb11bbe0 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperBuilderContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperBuilderContext.java @@ -134,4 +134,8 @@ public ObjectMapper.Dynamic getDynamic() { public MergeReason getMergeReason() { return mergeReason; } + + public boolean isNested() { + return false; + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java index 308634e0c4cd7..766fb2a8e574f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java @@ -182,6 +182,11 @@ public MapperBuilderContext createChildContext(String name, Dynamic dynamic) { getMergeReason() ); } + + @Override + public boolean isNested() { + return true; + } } private final Explicit includeInRoot; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index 3a62428f237bc..81e3dc9d6adc7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -153,6 +153,9 @@ public SemanticTextFieldMapper build(MapperBuilderContext context) { throw new IllegalArgumentException(CONTENT_TYPE + " field [" + leafName() + "] does not support multi-fields"); } final String fullName = context.buildFullName(leafName()); + if (context.isNested()) { + throw new IllegalArgumentException(CONTENT_TYPE + " field [" + fullName + "] cannot be nested"); + } var childContext = context.createChildContext(leafName(), ObjectMapper.Dynamic.FALSE); final ObjectMapper inferenceField = inferenceFieldBuilder.apply(childContext); return new SemanticTextFieldMapper( diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml index d60667448d6a0..d177ce08b3847 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml @@ -157,11 +157,12 @@ setup: type: keyword --- -"Can be used as a nested field": +"Cannot be used as a nested field": - do: + catch: /semantic_text field \[nested.semantic\] cannot be nested/ indices.create: - index: test-copy_to-index + index: test-nested-index body: mappings: properties: From cfa1a8a0b6432dc59de9ee3e01ea240cda6d9ed3 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Mon, 1 Jul 2024 16:14:22 +0200 Subject: [PATCH 088/216] ESQL: Remove processors from esql core (#110333) These were leftovers from when we relied on the ql project. Turns out, we never instantiated the processors, so we can get rid of their serialization code - and inlining their static apply methods (normally used to implement the process interface method) allows to get rid of them entirely. Some inner enums/classes needed to be extracted, and some tests made sense to adapt rather than get rid of. --- .../function/scalar/UnaryScalarFunction.java | 7 +- .../gen/processor/BinaryProcessor.java | 70 ----------- .../gen/processor/ChainingProcessor.java | 71 ----------- .../gen/processor/ConstantNamedWriteable.java | 17 --- .../gen/processor/ConstantProcessor.java | 111 ------------------ .../processor/FunctionalBinaryProcessor.java | 63 ---------- .../FunctionalEnumBinaryProcessor.java | 37 ------ .../expression/gen/processor/Processor.java | 20 ---- .../gen/processor/UnaryProcessor.java | 69 ----------- .../expression/predicate/logical/And.java | 1 - .../predicate/logical/BinaryLogic.java | 1 - .../logical/BinaryLogicOperation.java | 62 ++++++++++ .../logical/BinaryLogicProcessor.java | 102 ---------------- .../expression/predicate/logical/Not.java | 17 ++- .../predicate/logical/NotProcessor.java | 64 ---------- .../core/expression/predicate/logical/Or.java | 1 - .../predicate/nulls/CheckNullProcessor.java | 90 -------------- .../expression/predicate/nulls/IsNotNull.java | 7 -- .../expression/predicate/nulls/IsNull.java | 7 -- .../arithmetic/BinaryArithmeticProcessor.java | 48 -------- .../predicate/operator/arithmetic/Neg.java | 7 -- .../arithmetic/UnaryArithmeticProcessor.java | 76 ------------ .../operator/comparison/BinaryComparison.java | 1 - .../comparison/BinaryComparisonOperation.java | 54 +++++++++ .../comparison/BinaryComparisonProcessor.java | 84 ------------- .../predicate/operator/comparison/Equals.java | 1 - .../operator/comparison/GreaterThan.java | 1 - .../comparison/GreaterThanOrEqual.java | 1 - .../predicate/operator/comparison/In.java | 15 ++- .../operator/comparison/InProcessor.java | 85 -------------- .../operator/comparison/LessThan.java | 1 - .../operator/comparison/LessThanOrEqual.java | 1 - .../operator/comparison/NotEquals.java | 1 - .../operator/comparison/NullEquals.java | 1 - .../predicate/regex/RegexMatch.java | 8 +- .../predicate/regex/RegexOperation.java | 45 +++++++ .../predicate/regex/RegexProcessor.java | 99 ---------------- .../core/expression/processor/Processors.java | 63 ---------- .../gen/processor/ChainingProcessorTests.java | 57 --------- .../gen/processor/ConstantProcessorTests.java | 69 ----------- .../logical/BinaryLogicOperationTests.java | 42 +++++++ .../logical/BinaryLogicProcessorTests.java | 79 ------------- .../nulls/CheckNullProcessorTests.java | 57 --------- .../esql/core/tree/NodeSubclassTests.java | 10 -- .../comparison/EsqlBinaryComparison.java | 37 ++++-- .../predicate/operator/comparison/In.java | 17 ++- .../comparison/EsqlBinaryComparisonTests.java | 33 ++++-- 47 files changed, 299 insertions(+), 1511 deletions(-) delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/BinaryProcessor.java delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/ChainingProcessor.java delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/ConstantNamedWriteable.java delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/ConstantProcessor.java delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/FunctionalBinaryProcessor.java delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/FunctionalEnumBinaryProcessor.java delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/Processor.java delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/UnaryProcessor.java create mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogicOperation.java delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogicProcessor.java delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/NotProcessor.java delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/CheckNullProcessor.java delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/BinaryArithmeticProcessor.java delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/UnaryArithmeticProcessor.java create mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/BinaryComparisonOperation.java delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/BinaryComparisonProcessor.java delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/InProcessor.java create mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RegexOperation.java delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RegexProcessor.java delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/processor/Processors.java delete mode 100644 x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/ChainingProcessorTests.java delete mode 100644 x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/ConstantProcessorTests.java create mode 100644 x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogicOperationTests.java delete mode 100644 x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogicProcessorTests.java delete mode 100644 x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/CheckNullProcessorTests.java diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/UnaryScalarFunction.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/UnaryScalarFunction.java index 1efda1e54185b..e821ed33ffc0d 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/UnaryScalarFunction.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/UnaryScalarFunction.java @@ -9,7 +9,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; @@ -49,15 +48,11 @@ public Expression field() { return field; } - protected abstract Processor makeProcessor(); - @Override public boolean foldable() { return field.foldable(); } @Override - public Object fold() { - return makeProcessor().process(field().fold()); - } + public abstract Object fold(); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/BinaryProcessor.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/BinaryProcessor.java deleted file mode 100644 index 13c4498e54986..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/BinaryProcessor.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.gen.processor; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; - -import java.io.IOException; - -public abstract class BinaryProcessor implements Processor { - - private final Processor left, right; - - public BinaryProcessor(Processor left, Processor right) { - this.left = left; - this.right = right; - } - - protected BinaryProcessor(StreamInput in) throws IOException { - left = in.readNamedWriteable(Processor.class); - right = in.readNamedWriteable(Processor.class); - } - - @Override - public final void writeTo(StreamOutput out) throws IOException { - out.writeNamedWriteable(left); - out.writeNamedWriteable(right); - doWrite(out); - } - - protected abstract void doWrite(StreamOutput out) throws IOException; - - @Override - public Object process(Object input) { - Object l = left.process(input); - if (l == null) { - return null; - } - checkParameter(l); - - Object r = right.process(input); - if (r == null) { - return null; - } - checkParameter(r); - - return doProcess(l, r); - } - - /** - * Checks the parameter (typically for its type) if the value is not null. - */ - protected void checkParameter(Object param) { - // no-op - } - - protected Processor left() { - return left; - } - - protected Processor right() { - return right; - } - - protected abstract Object doProcess(Object left, Object right); -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/ChainingProcessor.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/ChainingProcessor.java deleted file mode 100644 index 60e60bc264369..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/ChainingProcessor.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.gen.processor; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; - -import java.io.IOException; -import java.util.Objects; - -/** - * A {@linkplain Processor} that composes the results of two - * {@linkplain Processor}s. - */ -public class ChainingProcessor extends UnaryProcessor { - public static final String NAME = "."; - - private final Processor processor; - - public ChainingProcessor(Processor first, Processor second) { - super(first); - this.processor = second; - } - - public ChainingProcessor(StreamInput in) throws IOException { - super(in); - processor = in.readNamedWriteable(Processor.class); - } - - @Override - protected void doWrite(StreamOutput out) throws IOException { - out.writeNamedWriteable(processor); - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - protected Object doProcess(Object input) { - return processor.process(input); - } - - Processor first() { - return child(); - } - - Processor second() { - return processor; - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), processor); - } - - @Override - public boolean equals(Object obj) { - return super.equals(obj) && Objects.equals(processor, ((ChainingProcessor) obj).processor); - } - - @Override - public String toString() { - return processor + "(" + super.toString() + ")"; - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/ConstantNamedWriteable.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/ConstantNamedWriteable.java deleted file mode 100644 index 97733ed4d705f..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/ConstantNamedWriteable.java +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.core.expression.gen.processor; - -import org.elasticsearch.common.io.stream.NamedWriteable; - -/** - * Marker interface used by QL for pluggable constant serialization. - */ -public interface ConstantNamedWriteable extends NamedWriteable { - -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/ConstantProcessor.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/ConstantProcessor.java deleted file mode 100644 index ad426b641ed06..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/ConstantProcessor.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.gen.processor; - -import org.elasticsearch.common.io.stream.NamedWriteable; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.versionfield.Version; - -import java.io.IOException; -import java.time.Instant; -import java.time.ZoneId; -import java.time.ZonedDateTime; -import java.util.Objects; - -public class ConstantProcessor implements Processor { - - public static String NAME = "c"; - - private Object constant; - private final Type type; - - enum Type { - NAMED_WRITABLE, - ZONEDDATETIME, - GENERIC, - VERSION // Version is in x-pack, so StreamInput/Output cannot manage it as a generic type - } - - public ConstantProcessor(Object value) { - this.constant = value; - if (value instanceof NamedWriteable) { - type = Type.NAMED_WRITABLE; - } else if (value instanceof ZonedDateTime) { - type = Type.ZONEDDATETIME; - } else if (value instanceof Version) { - type = Type.VERSION; - } else { - type = Type.GENERIC; - } - } - - public ConstantProcessor(StreamInput in) throws IOException { - type = in.readEnum(Type.class); - switch (type) { - case NAMED_WRITABLE -> constant = in.readNamedWriteable(ConstantNamedWriteable.class); - case ZONEDDATETIME -> { - ZonedDateTime zdt; - ZoneId zoneId = in.readZoneId(); - zdt = ZonedDateTime.ofInstant(Instant.ofEpochMilli(in.readLong()), zoneId); - constant = zdt.withNano(in.readInt()); - } - case VERSION -> constant = new Version(in.readString()); - case GENERIC -> constant = in.readGenericValue(); - } - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeEnum(type); - switch (type) { - case NAMED_WRITABLE -> out.writeNamedWriteable((NamedWriteable) constant); - case ZONEDDATETIME -> { - ZonedDateTime zdt = (ZonedDateTime) constant; - out.writeZoneId(zdt.getZone()); - out.writeLong(zdt.toInstant().toEpochMilli()); - out.writeInt(zdt.getNano()); - } - case VERSION -> out.writeString(constant.toString()); - case GENERIC -> out.writeGenericValue(constant); - } - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - public Object process(Object input) { - return constant; - } - - @Override - public int hashCode() { - return Objects.hashCode(constant); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - ConstantProcessor other = (ConstantProcessor) obj; - return Objects.equals(constant, other.constant); - } - - @Override - public String toString() { - return "^" + constant; - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/FunctionalBinaryProcessor.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/FunctionalBinaryProcessor.java deleted file mode 100644 index 3713102b893f1..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/FunctionalBinaryProcessor.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.core.expression.gen.processor; - -import org.elasticsearch.common.io.stream.StreamInput; - -import java.io.IOException; -import java.util.Objects; -import java.util.function.BiFunction; - -/** - * Base class for definition binary processors based on functions (for applying). - */ -public abstract class FunctionalBinaryProcessor> extends BinaryProcessor { - - private final F function; - - protected FunctionalBinaryProcessor(Processor left, Processor right, F function) { - super(left, right); - this.function = function; - } - - protected FunctionalBinaryProcessor(StreamInput in, Reader reader) throws IOException { - super(in); - this.function = reader.read(in); - } - - public F function() { - return function; - } - - @SuppressWarnings("unchecked") - @Override - protected Object doProcess(Object left, Object right) { - return function.apply((T) left, (U) right); - } - - @Override - public int hashCode() { - return Objects.hash(left(), right(), function()); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - FunctionalBinaryProcessor other = (FunctionalBinaryProcessor) obj; - return Objects.equals(function(), other.function()) - && Objects.equals(left(), other.left()) - && Objects.equals(right(), other.right()); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/FunctionalEnumBinaryProcessor.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/FunctionalEnumBinaryProcessor.java deleted file mode 100644 index 352cea13535c1..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/FunctionalEnumBinaryProcessor.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.core.expression.gen.processor; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; - -import java.io.IOException; -import java.util.function.BiFunction; - -/** - * Base class for definition binary processors based on functions (for applying) defined as enums (for serialization purposes). - */ -public abstract class FunctionalEnumBinaryProcessor & BiFunction> extends FunctionalBinaryProcessor< - T, - U, - R, - F> { - - protected FunctionalEnumBinaryProcessor(Processor left, Processor right, F function) { - super(left, right, function); - } - - protected FunctionalEnumBinaryProcessor(StreamInput in, Reader reader) throws IOException { - super(in, reader); - } - - @Override - protected void doWrite(StreamOutput out) throws IOException { - out.writeEnum(function()); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/Processor.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/Processor.java deleted file mode 100644 index bafdf3b05f40c..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/Processor.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.gen.processor; - -import org.elasticsearch.common.io.stream.NamedWriteable; - -/** - * A {@code Processor} evaluates locally an expression. For instance, ABS(foo). - * Aggregate functions are handled by ES but scalars are not. - * - * This is an opaque class, the computed/compiled result gets saved on the client during scrolling. - */ -public interface Processor extends NamedWriteable { - - Object process(Object input); -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/UnaryProcessor.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/UnaryProcessor.java deleted file mode 100644 index 4ddf851ce3c27..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/UnaryProcessor.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.gen.processor; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; - -import java.io.IOException; -import java.util.Objects; - -public abstract class UnaryProcessor implements Processor { - - private final Processor child; - - public UnaryProcessor(Processor child) { - this.child = child; - } - - protected UnaryProcessor(StreamInput in) throws IOException { - child = in.readNamedWriteable(Processor.class); - } - - @Override - public final void writeTo(StreamOutput out) throws IOException { - out.writeNamedWriteable(child); - doWrite(out); - } - - protected abstract void doWrite(StreamOutput out) throws IOException; - - @Override - public final Object process(Object input) { - return doProcess(child.process(input)); - } - - public Processor child() { - return child; - } - - protected abstract Object doProcess(Object input); - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - UnaryProcessor other = (UnaryProcessor) obj; - return Objects.equals(child, other.child); - } - - @Override - public int hashCode() { - return Objects.hashCode(child); - } - - @Override - public String toString() { - return Objects.toString(child); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/And.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/And.java index e5ab86605657d..d2b801a012d0c 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/And.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/And.java @@ -11,7 +11,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.predicate.Negatable; import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogicProcessor.BinaryLogicOperation; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogic.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogic.java index b52cd728dd773..f40db61774fc5 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogic.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogic.java @@ -12,7 +12,6 @@ import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal; import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogicProcessor.BinaryLogicOperation; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogicOperation.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogicOperation.java new file mode 100644 index 0000000000000..8b8224334654a --- /dev/null +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogicOperation.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.core.expression.predicate.logical; + +import org.elasticsearch.xpack.esql.core.expression.predicate.PredicateBiFunction; + +import java.util.function.BiFunction; + +public enum BinaryLogicOperation implements PredicateBiFunction { + + AND((l, r) -> { + if (Boolean.FALSE.equals(l) || Boolean.FALSE.equals(r)) { + return Boolean.FALSE; + } + if (l == null || r == null) { + return null; + } + return Boolean.logicalAnd(l.booleanValue(), r.booleanValue()); + }, "AND"), + OR((l, r) -> { + if (Boolean.TRUE.equals(l) || Boolean.TRUE.equals(r)) { + return Boolean.TRUE; + } + if (l == null || r == null) { + return null; + } + return Boolean.logicalOr(l.booleanValue(), r.booleanValue()); + }, "OR"); + + private final BiFunction process; + private final String symbol; + + BinaryLogicOperation(BiFunction process, String symbol) { + this.process = process; + this.symbol = symbol; + } + + @Override + public String symbol() { + return symbol; + } + + @Override + public Boolean apply(Boolean left, Boolean right) { + return process.apply(left, right); + } + + @Override + public final Boolean doApply(Boolean left, Boolean right) { + return null; + } + + @Override + public String toString() { + return symbol; + } +} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogicProcessor.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogicProcessor.java deleted file mode 100644 index 14d6b819e87fe..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogicProcessor.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.logical; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.FunctionalEnumBinaryProcessor; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; -import org.elasticsearch.xpack.esql.core.expression.predicate.PredicateBiFunction; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogicProcessor.BinaryLogicOperation; - -import java.io.IOException; -import java.util.function.BiFunction; - -public class BinaryLogicProcessor extends FunctionalEnumBinaryProcessor { - - public enum BinaryLogicOperation implements PredicateBiFunction { - - AND((l, r) -> { - if (Boolean.FALSE.equals(l) || Boolean.FALSE.equals(r)) { - return Boolean.FALSE; - } - if (l == null || r == null) { - return null; - } - return Boolean.logicalAnd(l.booleanValue(), r.booleanValue()); - }, "AND"), - OR((l, r) -> { - if (Boolean.TRUE.equals(l) || Boolean.TRUE.equals(r)) { - return Boolean.TRUE; - } - if (l == null || r == null) { - return null; - } - return Boolean.logicalOr(l.booleanValue(), r.booleanValue()); - }, "OR"); - - private final BiFunction process; - private final String symbol; - - BinaryLogicOperation(BiFunction process, String symbol) { - this.process = process; - this.symbol = symbol; - } - - @Override - public String symbol() { - return symbol; - } - - @Override - public Boolean apply(Boolean left, Boolean right) { - return process.apply(left, right); - } - - @Override - public final Boolean doApply(Boolean left, Boolean right) { - return null; - } - - @Override - public String toString() { - return symbol; - } - } - - public static final String NAME = "lb"; - - public BinaryLogicProcessor(Processor left, Processor right, BinaryLogicOperation operation) { - super(left, right, operation); - } - - public BinaryLogicProcessor(StreamInput in) throws IOException { - super(in, i -> i.readEnum(BinaryLogicOperation.class)); - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - protected void checkParameter(Object param) { - if (param != null && (param instanceof Boolean) == false) { - throw new QlIllegalArgumentException("A boolean is required; received {}", param); - } - } - - @Override - public Object process(Object input) { - Object l = left().process(input); - checkParameter(l); - Object r = right().process(input); - checkParameter(r); - - return doProcess(l, r); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/Not.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/Not.java index 5f183a1cc26ea..c4983b49a6bc8 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/Not.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/Not.java @@ -8,9 +8,9 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.function.scalar.UnaryScalarFunction; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; import org.elasticsearch.xpack.esql.core.expression.predicate.Negatable; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -57,12 +57,19 @@ protected TypeResolution resolveType() { @Override public Object fold() { - return NotProcessor.INSTANCE.process(field().fold()); + return apply(field().fold()); } - @Override - protected Processor makeProcessor() { - return NotProcessor.INSTANCE; + private static Boolean apply(Object input) { + if (input == null) { + return null; + } + + if ((input instanceof Boolean) == false) { + throw new QlIllegalArgumentException("A boolean is required; received {}", input); + } + + return ((Boolean) input).booleanValue() ? Boolean.FALSE : Boolean.TRUE; } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/NotProcessor.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/NotProcessor.java deleted file mode 100644 index 5f633c902dff0..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/NotProcessor.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.logical; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; - -import java.io.IOException; - -public class NotProcessor implements Processor { - - public static final NotProcessor INSTANCE = new NotProcessor(); - - public static final String NAME = "ln"; - - private NotProcessor() {} - - public NotProcessor(StreamInput in) throws IOException {} - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - public void writeTo(StreamOutput out) throws IOException {} - - @Override - public Object process(Object input) { - return apply(input); - } - - public static Boolean apply(Object input) { - if (input == null) { - return null; - } - - if ((input instanceof Boolean) == false) { - throw new QlIllegalArgumentException("A boolean is required; received {}", input); - } - - return ((Boolean) input).booleanValue() ? Boolean.FALSE : Boolean.TRUE; - } - - @Override - public int hashCode() { - return NotProcessor.class.hashCode(); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - return obj == null || getClass() != obj.getClass(); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/Or.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/Or.java index b3afb662a009d..bf7a16aec8df9 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/Or.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/Or.java @@ -11,7 +11,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.predicate.Negatable; import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogicProcessor.BinaryLogicOperation; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/CheckNullProcessor.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/CheckNullProcessor.java deleted file mode 100644 index 10503fcd00178..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/CheckNullProcessor.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.nulls; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; - -import java.io.IOException; -import java.util.Objects; -import java.util.function.Predicate; - -public class CheckNullProcessor implements Processor { - - public enum CheckNullOperation implements Predicate { - - IS_NULL(Objects::isNull, "IS NULL"), - IS_NOT_NULL(Objects::nonNull, "IS NOT NULL"); - - private final Predicate process; - private final String symbol; - - CheckNullOperation(Predicate process, String symbol) { - this.process = process; - this.symbol = symbol; - } - - public String symbol() { - return symbol; - } - - @Override - public String toString() { - return symbol; - } - - @Override - public boolean test(Object o) { - return process.test(o); - } - } - - public static final String NAME = "nckn"; - - private final CheckNullOperation operation; - - CheckNullProcessor(CheckNullOperation operation) { - this.operation = operation; - } - - public CheckNullProcessor(StreamInput in) throws IOException { - this(in.readEnum(CheckNullOperation.class)); - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeEnum(operation); - } - - @Override - public Object process(Object input) { - return operation.test(input); - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - CheckNullProcessor that = (CheckNullProcessor) o; - return operation == that.operation; - } - - @Override - public int hashCode() { - return Objects.hash(operation); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/IsNotNull.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/IsNotNull.java index e365480a6fd79..9879a1f5ffc29 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/IsNotNull.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/IsNotNull.java @@ -11,9 +11,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.function.scalar.UnaryScalarFunction; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; import org.elasticsearch.xpack.esql.core.expression.predicate.Negatable; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.CheckNullProcessor.CheckNullOperation; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -55,11 +53,6 @@ public Object fold() { return field().fold() != null && DataType.isNull(field().dataType()) == false; } - @Override - protected Processor makeProcessor() { - return new CheckNullProcessor(CheckNullOperation.IS_NOT_NULL); - } - @Override public Nullability nullable() { return Nullability.FALSE; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/IsNull.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/IsNull.java index 8b6eb5d4404b0..d88945045b03e 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/IsNull.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/IsNull.java @@ -11,9 +11,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.function.scalar.UnaryScalarFunction; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; import org.elasticsearch.xpack.esql.core.expression.predicate.Negatable; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.CheckNullProcessor.CheckNullOperation; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -51,11 +49,6 @@ public Object fold() { return field().fold() == null || DataType.isNull(field().dataType()); } - @Override - protected Processor makeProcessor() { - return new CheckNullProcessor(CheckNullOperation.IS_NULL); - } - @Override public Nullability nullable() { return Nullability.FALSE; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/BinaryArithmeticProcessor.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/BinaryArithmeticProcessor.java deleted file mode 100644 index 73e3ed560d6fa..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/BinaryArithmeticProcessor.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.FunctionalBinaryProcessor; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; - -import java.io.IOException; - -public final class BinaryArithmeticProcessor extends FunctionalBinaryProcessor { - - public static final String NAME = "abn"; - - public BinaryArithmeticProcessor(Processor left, Processor right, BinaryArithmeticOperation operation) { - super(left, right, operation); - } - - public BinaryArithmeticProcessor(StreamInput in) throws IOException { - super(in, i -> i.readNamedWriteable(BinaryArithmeticOperation.class)); - } - - @Override - protected void doWrite(StreamOutput out) throws IOException { - out.writeNamedWriteable(function()); - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - protected Object doProcess(Object left, Object right) { - BinaryArithmeticOperation f = function(); - - if (left == null || right == null) { - return null; - } - - return f.apply(left, right); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/Neg.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/Neg.java index c7cb2bb3e3832..ddd4ce736879b 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/Neg.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/Neg.java @@ -8,8 +8,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.function.scalar.UnaryScalarFunction; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.UnaryArithmeticProcessor.UnaryArithmeticOperation; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -50,9 +48,4 @@ public Object fold() { public DataType dataType() { return field().dataType(); } - - @Override - protected Processor makeProcessor() { - return new UnaryArithmeticProcessor(UnaryArithmeticOperation.NEGATE); - } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/UnaryArithmeticProcessor.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/UnaryArithmeticProcessor.java deleted file mode 100644 index 835d1a7366486..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/UnaryArithmeticProcessor.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; - -import java.io.IOException; -import java.util.function.Function; - -public class UnaryArithmeticProcessor implements Processor { - - public enum UnaryArithmeticOperation { - - NEGATE(Arithmetics::negate); - - private final Function process; - - UnaryArithmeticOperation(Function process) { - this.process = process; - } - - public final Number apply(Number number) { - return process.apply(number); - } - - public String symbol() { - return "-"; - } - } - - public static final String NAME = "au"; - - private final UnaryArithmeticOperation operation; - - public UnaryArithmeticProcessor(UnaryArithmeticOperation operation) { - this.operation = operation; - } - - public UnaryArithmeticProcessor(StreamInput in) throws IOException { - operation = in.readEnum(UnaryArithmeticOperation.class); - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeEnum(operation); - } - - @Override - public Object process(Object input) { - if (input == null) { - return null; - } - - if (input instanceof Number number) { - return operation.apply(number); - } - throw new QlIllegalArgumentException("A number is required; received {}", input); - } - - @Override - public String toString() { - return operation.symbol() + super.toString(); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/BinaryComparison.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/BinaryComparison.java index 193b77f2344c0..3f0f817c3c3d4 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/BinaryComparison.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/BinaryComparison.java @@ -10,7 +10,6 @@ import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal; import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/BinaryComparisonOperation.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/BinaryComparisonOperation.java new file mode 100644 index 0000000000000..efe8a7a8cf615 --- /dev/null +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/BinaryComparisonOperation.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison; + +import org.elasticsearch.xpack.esql.core.expression.predicate.PredicateBiFunction; + +import java.util.function.BiFunction; + +public enum BinaryComparisonOperation implements PredicateBiFunction { + + EQ(Comparisons::eq, "=="), + NULLEQ(Comparisons::nulleq, "<=>"), + NEQ(Comparisons::neq, "!="), + GT(Comparisons::gt, ">"), + GTE(Comparisons::gte, ">="), + LT(Comparisons::lt, "<"), + LTE(Comparisons::lte, "<="); + + private final BiFunction process; + private final String symbol; + + BinaryComparisonOperation(BiFunction process, String symbol) { + this.process = process; + this.symbol = symbol; + } + + @Override + public String symbol() { + return symbol; + } + + @Override + public Boolean apply(Object left, Object right) { + if (this != NULLEQ && (left == null || right == null)) { + return null; + } + return doApply(left, right); + } + + @Override + public final Boolean doApply(Object left, Object right) { + return process.apply(left, right); + } + + @Override + public String toString() { + return symbol; + } +} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/BinaryComparisonProcessor.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/BinaryComparisonProcessor.java deleted file mode 100644 index 6434f2d9b6ac2..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/BinaryComparisonProcessor.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.FunctionalEnumBinaryProcessor; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; -import org.elasticsearch.xpack.esql.core.expression.predicate.PredicateBiFunction; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; - -import java.io.IOException; -import java.util.function.BiFunction; - -public class BinaryComparisonProcessor extends FunctionalEnumBinaryProcessor { - - public enum BinaryComparisonOperation implements PredicateBiFunction { - - EQ(Comparisons::eq, "=="), - NULLEQ(Comparisons::nulleq, "<=>"), - NEQ(Comparisons::neq, "!="), - GT(Comparisons::gt, ">"), - GTE(Comparisons::gte, ">="), - LT(Comparisons::lt, "<"), - LTE(Comparisons::lte, "<="); - - private final BiFunction process; - private final String symbol; - - BinaryComparisonOperation(BiFunction process, String symbol) { - this.process = process; - this.symbol = symbol; - } - - @Override - public String symbol() { - return symbol; - } - - @Override - public Boolean apply(Object left, Object right) { - if (this != NULLEQ && (left == null || right == null)) { - return null; - } - return doApply(left, right); - } - - @Override - public final Boolean doApply(Object left, Object right) { - return process.apply(left, right); - } - - @Override - public String toString() { - return symbol; - } - } - - public static final String NAME = "cb"; - - public BinaryComparisonProcessor(Processor left, Processor right, BinaryComparisonOperation operation) { - super(left, right, operation); - } - - public BinaryComparisonProcessor(StreamInput in) throws IOException { - super(in, i -> i.readEnum(BinaryComparisonOperation.class)); - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - public Object process(Object input) { - if (function() == BinaryComparisonOperation.NULLEQ) { - return doProcess(left().process(input), right().process(input)); - } - return super.process(input); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/Equals.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/Equals.java index ba4816e3b68fe..96e174d9afa5d 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/Equals.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/Equals.java @@ -8,7 +8,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.predicate.Negatable; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/GreaterThan.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/GreaterThan.java index 4e3880defdd79..cb165e42d0098 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/GreaterThan.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/GreaterThan.java @@ -8,7 +8,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.predicate.Negatable; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/GreaterThanOrEqual.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/GreaterThanOrEqual.java index 2132a028c4d79..2f0b3feeaf7d8 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/GreaterThanOrEqual.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/GreaterThanOrEqual.java @@ -8,7 +8,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.predicate.Negatable; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/In.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/In.java index 21fbfa56b0d98..abbfaabd09ba2 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/In.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/In.java @@ -89,7 +89,20 @@ public Boolean fold() { if (Expressions.isNull(value) || list.size() == 1 && Expressions.isNull(list.get(0))) { return null; } - return InProcessor.apply(value.fold(), foldAndConvertListOfValues(list, value.dataType())); + return apply(value.fold(), foldAndConvertListOfValues(list, value.dataType())); + } + + private static Boolean apply(Object input, List values) { + Boolean result = Boolean.FALSE; + for (Object v : values) { + Boolean compResult = Comparisons.eq(input, v); + if (compResult == null) { + result = null; + } else if (compResult == Boolean.TRUE) { + return Boolean.TRUE; + } + } + return result; } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/InProcessor.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/InProcessor.java deleted file mode 100644 index 61d33ab631bfb..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/InProcessor.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Objects; - -public class InProcessor implements Processor { - - public static final String NAME = "in"; - - private final List processsors; - - InProcessor(List processors) { - this.processsors = processors; - } - - public InProcessor(StreamInput in) throws IOException { - processsors = in.readNamedWriteableCollectionAsList(Processor.class); - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - public final void writeTo(StreamOutput out) throws IOException { - out.writeNamedWriteableCollection(processsors); - } - - @Override - public Object process(Object input) { - Object leftValue = processsors.get(processsors.size() - 1).process(input); - return apply(leftValue, process(processsors.subList(0, processsors.size() - 1), leftValue)); - } - - private static List process(List processors, Object input) { - List values = new ArrayList<>(processors.size()); - for (Processor p : processors) { - values.add(p.process(input)); - } - return values; - } - - public static Boolean apply(Object input, List values) { - Boolean result = Boolean.FALSE; - for (Object v : values) { - Boolean compResult = Comparisons.eq(input, v); - if (compResult == null) { - result = null; - } else if (compResult == Boolean.TRUE) { - return Boolean.TRUE; - } - } - return result; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - InProcessor that = (InProcessor) o; - return Objects.equals(processsors, that.processsors); - } - - @Override - public int hashCode() { - return Objects.hash(processsors); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/LessThan.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/LessThan.java index c7985548918f9..5f59e4abf8abf 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/LessThan.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/LessThan.java @@ -8,7 +8,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.predicate.Negatable; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/LessThanOrEqual.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/LessThanOrEqual.java index ff87d02cd654a..0b7f4c732c87d 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/LessThanOrEqual.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/LessThanOrEqual.java @@ -8,7 +8,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.predicate.Negatable; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/NotEquals.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/NotEquals.java index 936e684ab37c6..a4cfb8ca2e9b9 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/NotEquals.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/NotEquals.java @@ -8,7 +8,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.predicate.Negatable; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/NullEquals.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/NullEquals.java index 0b135d380f621..a647cdeb5ca20 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/NullEquals.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/NullEquals.java @@ -8,7 +8,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Nullability; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RegexMatch.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RegexMatch.java index 4e7e70685dc3a..32e8b04573d2d 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RegexMatch.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RegexMatch.java @@ -11,7 +11,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.function.scalar.UnaryScalarFunction; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -69,12 +68,7 @@ public Boolean fold() { if (val instanceof BytesRef br) { val = br.utf8ToString(); } - return RegexProcessor.RegexOperation.match(val, pattern().asJavaRegex()); - } - - @Override - protected Processor makeProcessor() { - return new RegexProcessor(pattern().asJavaRegex()); + return RegexOperation.match(val, pattern().asJavaRegex()); } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RegexOperation.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RegexOperation.java new file mode 100644 index 0000000000000..1501ae65ed485 --- /dev/null +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RegexOperation.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.core.expression.predicate.regex; + +import java.util.regex.Pattern; + +public class RegexOperation { + + public static Boolean match(Object value, Pattern pattern) { + if (pattern == null) { + return Boolean.TRUE; + } + + if (value == null) { + return null; + } + + return pattern.matcher(value.toString()).matches(); + } + + public static Boolean match(Object value, String pattern) { + return match(value, pattern, Boolean.FALSE); + } + + public static Boolean match(Object value, String pattern, Boolean caseInsensitive) { + if (pattern == null) { + return Boolean.TRUE; + } + + if (value == null) { + return null; + } + + int flags = 0; + if (Boolean.TRUE.equals(caseInsensitive)) { + flags |= Pattern.CASE_INSENSITIVE; + } + return Pattern.compile(pattern, flags).matcher(value.toString()).matches(); + } +} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RegexProcessor.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RegexProcessor.java deleted file mode 100644 index 41b0ab406bf89..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RegexProcessor.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.regex; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; - -import java.io.IOException; -import java.util.Objects; -import java.util.regex.Pattern; - -public class RegexProcessor implements Processor { - - public static class RegexOperation { - - public static Boolean match(Object value, Pattern pattern) { - if (pattern == null) { - return Boolean.TRUE; - } - - if (value == null) { - return null; - } - - return pattern.matcher(value.toString()).matches(); - } - - public static Boolean match(Object value, String pattern) { - return match(value, pattern, Boolean.FALSE); - } - - public static Boolean match(Object value, String pattern, Boolean caseInsensitive) { - if (pattern == null) { - return Boolean.TRUE; - } - - if (value == null) { - return null; - } - - int flags = 0; - if (Boolean.TRUE.equals(caseInsensitive)) { - flags |= Pattern.CASE_INSENSITIVE; - } - return Pattern.compile(pattern, flags).matcher(value.toString()).matches(); - } - } - - public static final String NAME = "rgx"; - - private Pattern pattern; - - public RegexProcessor(String pattern) { - this.pattern = pattern != null ? Pattern.compile(pattern) : null; - } - - @Override - public String getWriteableName() { - return NAME; - } - - public RegexProcessor(StreamInput in) throws IOException { - this(in.readOptionalString()); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeOptionalString(pattern != null ? pattern.toString() : null); - } - - @Override - public Object process(Object input) { - return RegexOperation.match(input, pattern); - } - - @Override - public int hashCode() { - return Objects.hash(pattern); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - RegexProcessor other = (RegexProcessor) obj; - return Objects.equals(pattern, other.pattern); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/processor/Processors.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/processor/Processors.java deleted file mode 100644 index e47b80ee0ab59..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/processor/Processors.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.processor; - -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.ChainingProcessor; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.ConstantProcessor; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogicProcessor; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.NotProcessor; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.BinaryArithmeticOperation; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.BinaryArithmeticProcessor; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.DefaultBinaryArithmeticOperation; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.UnaryArithmeticProcessor; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonProcessor; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RegexProcessor; -import org.elasticsearch.xpack.esql.core.type.Converter; -import org.elasticsearch.xpack.esql.core.type.DataTypeConverter.DefaultConverter; - -import java.util.ArrayList; -import java.util.List; - -public final class Processors { - - private Processors() {} - - /** - * All of the named writeables needed to deserialize the instances of - * {@linkplain Processors}. - */ - public static List getNamedWriteables() { - List entries = new ArrayList<>(); - - // base - entries.add(new Entry(Converter.class, DefaultConverter.NAME, DefaultConverter::read)); - - entries.add(new Entry(Processor.class, ConstantProcessor.NAME, ConstantProcessor::new)); - entries.add(new Entry(Processor.class, ChainingProcessor.NAME, ChainingProcessor::new)); - - // logical - entries.add(new Entry(Processor.class, BinaryLogicProcessor.NAME, BinaryLogicProcessor::new)); - entries.add(new Entry(Processor.class, NotProcessor.NAME, NotProcessor::new)); - - // arithmetic - // binary arithmetics are pluggable - entries.add( - new Entry(BinaryArithmeticOperation.class, DefaultBinaryArithmeticOperation.NAME, DefaultBinaryArithmeticOperation::read) - ); - entries.add(new Entry(Processor.class, BinaryArithmeticProcessor.NAME, BinaryArithmeticProcessor::new)); - entries.add(new Entry(Processor.class, UnaryArithmeticProcessor.NAME, UnaryArithmeticProcessor::new)); - // comparators - entries.add(new Entry(Processor.class, BinaryComparisonProcessor.NAME, BinaryComparisonProcessor::new)); - // regex - entries.add(new Entry(Processor.class, RegexProcessor.NAME, RegexProcessor::new)); - - return entries; - } -} diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/ChainingProcessorTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/ChainingProcessorTests.java deleted file mode 100644 index f7bbbd9f61189..0000000000000 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/ChainingProcessorTests.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.gen.processor; - -import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogicProcessorTests; -import org.elasticsearch.xpack.esql.core.expression.processor.Processors; - -import java.util.ArrayList; -import java.util.List; -import java.util.function.Supplier; - -@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/109012") -public class ChainingProcessorTests extends AbstractWireSerializingTestCase { - public static ChainingProcessor randomComposeProcessor() { - return new ChainingProcessor(randomProcessor(), randomProcessor()); - } - - @Override - protected NamedWriteableRegistry getNamedWriteableRegistry() { - return new NamedWriteableRegistry(Processors.getNamedWriteables()); - } - - @Override - protected ChainingProcessor createTestInstance() { - return randomComposeProcessor(); - } - - @Override - protected Reader instanceReader() { - return ChainingProcessor::new; - } - - @Override - protected ChainingProcessor mutateInstance(ChainingProcessor instance) { - @SuppressWarnings("unchecked") - Supplier supplier = randomFrom( - () -> new ChainingProcessor(instance.first(), randomValueOtherThan(instance.second(), () -> randomProcessor())), - () -> new ChainingProcessor(randomValueOtherThan(instance.first(), () -> randomProcessor()), instance.second()) - ); - return supplier.get(); - } - - public static Processor randomProcessor() { - List> options = new ArrayList<>(); - options.add(ChainingProcessorTests::randomComposeProcessor); - options.add(BinaryLogicProcessorTests::randomProcessor); - return randomFrom(options).get(); - } -} diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/ConstantProcessorTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/ConstantProcessorTests.java deleted file mode 100644 index 00ca460920d03..0000000000000 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/gen/processor/ConstantProcessorTests.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.gen.processor; - -import org.elasticsearch.common.io.stream.ByteArrayStreamInput; -import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.versionfield.Version; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.time.Clock; -import java.time.Duration; -import java.time.ZonedDateTime; - -public class ConstantProcessorTests extends AbstractWireSerializingTestCase { - - public static ConstantProcessor randomConstantProcessor() { - if (randomBoolean()) { - Clock clock = Clock.tickMillis(randomZone()); - if (randomBoolean()) { - clock = Clock.tick(clock, Duration.ofNanos(1)); - } - return new ConstantProcessor(ZonedDateTime.now(clock)); - } else { - return new ConstantProcessor(randomAlphaOfLength(5)); - } - } - - @Override - protected ConstantProcessor createTestInstance() { - return randomConstantProcessor(); - } - - @Override - protected Reader instanceReader() { - return ConstantProcessor::new; - } - - @Override - protected ConstantProcessor mutateInstance(ConstantProcessor instance) { - return new ConstantProcessor(randomValueOtherThan(instance.process(null), () -> randomLong())); - } - - public void testApply() { - ConstantProcessor proc = new ConstantProcessor("test"); - assertEquals("test", proc.process(null)); - assertEquals("test", proc.process("cat")); - } - - public void testReadWriteVersion() throws IOException { - ConstantProcessor original = new ConstantProcessor(new Version("1.2.3")); - try (ByteArrayOutputStream baos = new ByteArrayOutputStream(); StreamOutput out = new OutputStreamStreamOutput(baos)) { - original.writeTo(out); - try (StreamInput is = new ByteArrayStreamInput(baos.toByteArray())) { - ConstantProcessor result = new ConstantProcessor(is); - assertEquals(Version.class, result.process(null).getClass()); - assertEquals("1.2.3", result.process(null).toString()); - } - } - } -} diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogicOperationTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogicOperationTests.java new file mode 100644 index 0000000000000..05279b74f6382 --- /dev/null +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogicOperationTests.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.esql.core.expression.predicate.logical; + +import org.elasticsearch.test.ESTestCase; + +public class BinaryLogicOperationTests extends ESTestCase { + + public void testOR() { + assertEquals(true, BinaryLogicOperation.OR.apply(true, false)); + assertEquals(true, BinaryLogicOperation.OR.apply(false, true)); + assertEquals(false, BinaryLogicOperation.OR.apply(false, false)); + assertEquals(true, BinaryLogicOperation.OR.apply(true, true)); + } + + public void testORNullHandling() { + assertEquals(true, BinaryLogicOperation.OR.apply(true, null)); + assertEquals(true, BinaryLogicOperation.OR.apply(null, true)); + assertNull(BinaryLogicOperation.OR.apply(false, null)); + assertNull(BinaryLogicOperation.OR.apply(null, false)); + assertNull(BinaryLogicOperation.OR.apply(null, null)); + } + + public void testAnd() { + assertEquals(false, BinaryLogicOperation.AND.apply(true, false)); + assertEquals(false, BinaryLogicOperation.AND.apply(false, true)); + assertEquals(false, BinaryLogicOperation.AND.apply(false, false)); + assertEquals(true, BinaryLogicOperation.AND.apply(true, true)); + } + + public void testAndNullHandling() { + assertNull(BinaryLogicOperation.AND.apply(true, null)); + assertNull(BinaryLogicOperation.AND.apply(null, true)); + assertEquals(false, BinaryLogicOperation.AND.apply(false, null)); + assertEquals(false, BinaryLogicOperation.AND.apply(null, false)); + assertNull(BinaryLogicOperation.AND.apply(null, null)); + } +} diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogicProcessorTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogicProcessorTests.java deleted file mode 100644 index 83a9ca0a8ee3d..0000000000000 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogicProcessorTests.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.logical; - -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.ConstantProcessor; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; -import org.elasticsearch.xpack.esql.core.expression.processor.Processors; - -public class BinaryLogicProcessorTests extends AbstractWireSerializingTestCase { - - private static final Processor FALSE = new ConstantProcessor(false); - private static final Processor TRUE = new ConstantProcessor(true); - private static final Processor NULL = new ConstantProcessor((Object) null); - - public static BinaryLogicProcessor randomProcessor() { - return new BinaryLogicProcessor( - new ConstantProcessor(randomFrom(Boolean.FALSE, Boolean.TRUE, null)), - new ConstantProcessor(randomFrom(Boolean.FALSE, Boolean.TRUE, null)), - randomFrom(BinaryLogicProcessor.BinaryLogicOperation.values()) - ); - } - - @Override - protected BinaryLogicProcessor createTestInstance() { - return randomProcessor(); - } - - @Override - protected BinaryLogicProcessor mutateInstance(BinaryLogicProcessor instance) { - return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 - } - - @Override - protected Reader instanceReader() { - return BinaryLogicProcessor::new; - } - - @Override - protected NamedWriteableRegistry getNamedWriteableRegistry() { - return new NamedWriteableRegistry(Processors.getNamedWriteables()); - } - - public void testOR() { - assertEquals(true, new BinaryLogicProcessor(TRUE, FALSE, BinaryLogicProcessor.BinaryLogicOperation.OR).process(null)); - assertEquals(true, new BinaryLogicProcessor(FALSE, TRUE, BinaryLogicProcessor.BinaryLogicOperation.OR).process(null)); - assertEquals(false, new BinaryLogicProcessor(FALSE, FALSE, BinaryLogicProcessor.BinaryLogicOperation.OR).process(null)); - assertEquals(true, new BinaryLogicProcessor(TRUE, TRUE, BinaryLogicProcessor.BinaryLogicOperation.OR).process(null)); - } - - public void testORNullHandling() { - assertEquals(true, new BinaryLogicProcessor(TRUE, NULL, BinaryLogicProcessor.BinaryLogicOperation.OR).process(null)); - assertEquals(true, new BinaryLogicProcessor(NULL, TRUE, BinaryLogicProcessor.BinaryLogicOperation.OR).process(null)); - assertNull(new BinaryLogicProcessor(FALSE, NULL, BinaryLogicProcessor.BinaryLogicOperation.OR).process(null)); - assertNull(new BinaryLogicProcessor(NULL, FALSE, BinaryLogicProcessor.BinaryLogicOperation.OR).process(null)); - assertNull(new BinaryLogicProcessor(NULL, NULL, BinaryLogicProcessor.BinaryLogicOperation.OR).process(null)); - } - - public void testAnd() { - assertEquals(false, new BinaryLogicProcessor(TRUE, FALSE, BinaryLogicProcessor.BinaryLogicOperation.AND).process(null)); - assertEquals(false, new BinaryLogicProcessor(FALSE, TRUE, BinaryLogicProcessor.BinaryLogicOperation.AND).process(null)); - assertEquals(false, new BinaryLogicProcessor(FALSE, FALSE, BinaryLogicProcessor.BinaryLogicOperation.AND).process(null)); - assertEquals(true, new BinaryLogicProcessor(TRUE, TRUE, BinaryLogicProcessor.BinaryLogicOperation.AND).process(null)); - } - - public void testAndNullHandling() { - assertNull(new BinaryLogicProcessor(TRUE, NULL, BinaryLogicProcessor.BinaryLogicOperation.AND).process(null)); - assertNull(new BinaryLogicProcessor(NULL, TRUE, BinaryLogicProcessor.BinaryLogicOperation.AND).process(null)); - assertEquals(false, new BinaryLogicProcessor(FALSE, NULL, BinaryLogicProcessor.BinaryLogicOperation.AND).process(null)); - assertEquals(false, new BinaryLogicProcessor(NULL, FALSE, BinaryLogicProcessor.BinaryLogicOperation.AND).process(null)); - assertNull(new BinaryLogicProcessor(NULL, NULL, BinaryLogicProcessor.BinaryLogicOperation.AND).process(null)); - } -} diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/CheckNullProcessorTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/CheckNullProcessorTests.java deleted file mode 100644 index 69104c7601f6a..0000000000000 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/CheckNullProcessorTests.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.nulls; - -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.ConstantProcessor; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; -import org.elasticsearch.xpack.esql.core.expression.processor.Processors; - -public class CheckNullProcessorTests extends AbstractWireSerializingTestCase { - - private static final Processor FALSE = new ConstantProcessor(false); - private static final Processor TRUE = new ConstantProcessor(true); - private static final Processor NULL = new ConstantProcessor((Object) null); - - public static CheckNullProcessor randomProcessor() { - return new CheckNullProcessor(randomFrom(CheckNullProcessor.CheckNullOperation.values())); - } - - @Override - protected CheckNullProcessor createTestInstance() { - return randomProcessor(); - } - - @Override - protected CheckNullProcessor mutateInstance(CheckNullProcessor instance) { - return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 - } - - @Override - protected Reader instanceReader() { - return CheckNullProcessor::new; - } - - @Override - protected NamedWriteableRegistry getNamedWriteableRegistry() { - return new NamedWriteableRegistry(Processors.getNamedWriteables()); - } - - public void testIsNull() { - assertEquals(true, new CheckNullProcessor(CheckNullProcessor.CheckNullOperation.IS_NULL).process(null)); - assertEquals(false, new CheckNullProcessor(CheckNullProcessor.CheckNullOperation.IS_NULL).process("foo")); - assertEquals(false, new CheckNullProcessor(CheckNullProcessor.CheckNullOperation.IS_NULL).process(1)); - } - - public void testIsNotNull() { - assertEquals(false, new CheckNullProcessor(CheckNullProcessor.CheckNullOperation.IS_NOT_NULL).process(null)); - assertEquals(true, new CheckNullProcessor(CheckNullProcessor.CheckNullOperation.IS_NOT_NULL).process("foo")); - assertEquals(true, new CheckNullProcessor(CheckNullProcessor.CheckNullOperation.IS_NOT_NULL).process(1)); - } -} diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/tree/NodeSubclassTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/tree/NodeSubclassTests.java index 8d424f8694b97..fae5e349712df 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/tree/NodeSubclassTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/tree/NodeSubclassTests.java @@ -17,8 +17,6 @@ import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttributeTests; import org.elasticsearch.xpack.esql.core.expression.function.Function; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.ConstantProcessor; -import org.elasticsearch.xpack.esql.core.expression.gen.processor.Processor; import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.FullTextPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.Like; @@ -447,14 +445,6 @@ private Object makeArg(Class> toBuildClass, Type argType) thro return new EnrichPolicy(randomFrom("match", "range"), null, List.of(), randomFrom("m1", "m2"), enrichFields); } - if (Processor.class == argClass) { - /* - * Similar to expressions, mock pipes to avoid - * stackoverflow errors while building the tree. - */ - return new ConstantProcessor(randomAlphaOfLength(16)); - } - if (Node.class.isAssignableFrom(argClass)) { /* * Rather than attempting to mock subclasses of node diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java index a4559e10eaf3a..a4eea2d676aad 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java @@ -16,7 +16,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonProcessor; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; @@ -51,24 +50,44 @@ public interface BinaryOperatorConstructor { public enum BinaryComparisonOperation implements Writeable { - EQ(0, "==", BinaryComparisonProcessor.BinaryComparisonOperation.EQ, Equals::new), + EQ(0, "==", org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonOperation.EQ, Equals::new), // id 1 reserved for NullEquals - NEQ(2, "!=", BinaryComparisonProcessor.BinaryComparisonOperation.NEQ, NotEquals::new), - GT(3, ">", BinaryComparisonProcessor.BinaryComparisonOperation.GT, GreaterThan::new), - GTE(4, ">=", BinaryComparisonProcessor.BinaryComparisonOperation.GTE, GreaterThanOrEqual::new), - LT(5, "<", BinaryComparisonProcessor.BinaryComparisonOperation.LT, LessThan::new), - LTE(6, "<=", BinaryComparisonProcessor.BinaryComparisonOperation.LTE, LessThanOrEqual::new); + NEQ( + 2, + "!=", + org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonOperation.NEQ, + NotEquals::new + ), + GT( + 3, + ">", + org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonOperation.GT, + GreaterThan::new + ), + GTE( + 4, + ">=", + org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonOperation.GTE, + GreaterThanOrEqual::new + ), + LT(5, "<", org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonOperation.LT, LessThan::new), + LTE( + 6, + "<=", + org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonOperation.LTE, + LessThanOrEqual::new + ); private final int id; private final String symbol; // Temporary mapping to the old enum, to satisfy the superclass constructor signature. - private final BinaryComparisonProcessor.BinaryComparisonOperation shim; + private final org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonOperation shim; private final BinaryOperatorConstructor constructor; BinaryComparisonOperation( int id, String symbol, - BinaryComparisonProcessor.BinaryComparisonOperation shim, + org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonOperation shim, BinaryOperatorConstructor constructor ) { this.id = id; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java index a2024f9e9e7e4..924c483717d16 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java @@ -12,7 +12,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.InProcessor; +import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.Comparisons; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -90,7 +90,20 @@ public Boolean fold() { // QL's `In` fold() doesn't handle BytesRef and can't know if this is Keyword/Text, Version or IP anyway. // `In` allows comparisons of same type only (safe for numerics), so it's safe to apply InProcessor directly with no implicit // (non-numerical) conversions. - return InProcessor.apply(value().fold(), list().stream().map(Expression::fold).toList()); + return apply(value().fold(), list().stream().map(Expression::fold).toList()); + } + + private static Boolean apply(Object input, List values) { + Boolean result = Boolean.FALSE; + for (Object v : values) { + Boolean compResult = Comparisons.eq(input, v); + if (compResult == null) { + result = null; + } else if (compResult == Boolean.TRUE) { + return Boolean.TRUE; + } + } + return result; } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparisonTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparisonTests.java index cc282186d4385..540a5fafbae06 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparisonTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparisonTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonProcessor; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.EsqlBinaryComparison.BinaryComparisonOperation; import java.io.IOException; @@ -40,16 +39,34 @@ public void testSerializationOfBinaryComparisonOperation() throws IOException { * {@link BinaryComparisonOperation} */ public void testCompatibleWithQLBinaryComparisonOperation() throws IOException { - validateCompatibility(BinaryComparisonProcessor.BinaryComparisonOperation.EQ, BinaryComparisonOperation.EQ); - validateCompatibility(BinaryComparisonProcessor.BinaryComparisonOperation.NEQ, BinaryComparisonOperation.NEQ); - validateCompatibility(BinaryComparisonProcessor.BinaryComparisonOperation.GT, BinaryComparisonOperation.GT); - validateCompatibility(BinaryComparisonProcessor.BinaryComparisonOperation.GTE, BinaryComparisonOperation.GTE); - validateCompatibility(BinaryComparisonProcessor.BinaryComparisonOperation.LT, BinaryComparisonOperation.LT); - validateCompatibility(BinaryComparisonProcessor.BinaryComparisonOperation.LTE, BinaryComparisonOperation.LTE); + validateCompatibility( + org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonOperation.EQ, + BinaryComparisonOperation.EQ + ); + validateCompatibility( + org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonOperation.NEQ, + BinaryComparisonOperation.NEQ + ); + validateCompatibility( + org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonOperation.GT, + BinaryComparisonOperation.GT + ); + validateCompatibility( + org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonOperation.GTE, + BinaryComparisonOperation.GTE + ); + validateCompatibility( + org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonOperation.LT, + BinaryComparisonOperation.LT + ); + validateCompatibility( + org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonOperation.LTE, + BinaryComparisonOperation.LTE + ); } private static void validateCompatibility( - BinaryComparisonProcessor.BinaryComparisonOperation original, + org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparisonOperation original, BinaryComparisonOperation expected ) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { From b5ec38e75ddafcf0584609c36afa87145a4ab0dc Mon Sep 17 00:00:00 2001 From: Felix Barnsteiner Date: Mon, 1 Jul 2024 17:21:25 +0200 Subject: [PATCH 089/216] Fix assertion error when using passthrough with contradicting fields (#110326) I've added a yml rest test that reproduces an assertion error when using the `passthrough` field type with conflicts on multiple levels with a top-level `match_only_text` field. ``` [2024-07-01T03:48:58,737][ERROR][o.e.b.ElasticsearchUncaughtExceptionHandler] [test-cluster-1] fatal error in thread [elasticsearch[test-cluster-1][write][T#5]], exiting java.lang.AssertionError: Field body should not have docvalues at org.elasticsearch.server@8.15.0-SNAPSHOT/org.elasticsearch.index.mapper.FieldNamesFieldMapper.addFieldNames(FieldNamesFieldMapper.java:177) at org.elasticsearch.server@8.15.0-SNAPSHOT/org.elasticsearch.index.mapper.DocumentParserContext.addToFieldNames(DocumentParserContext.java:292) at org.elasticsearch.mapper.extras@8.15.0-SNAPSHOT/org.elasticsearch.index.mapper.extras.MatchOnlyTextFieldMapper.parseCreateField(MatchOnlyTextFieldMapper.java:421) at org.elasticsearch.server@8.15.0-SNAPSHOT/org.elasticsearch.index.mapper.FieldMapper.parse(FieldMapper.java:185) at org.elasticsearch.server@8.15.0-SNAPSHOT/org.elasticsearch.index.mapper.DocumentParser.parseObjectOrField(DocumentParser.java:450) at org.elasticsearch.server@8.15.0-SNAPSHOT/org.elasticsearch.index.mapper.DocumentParser.parseValue(DocumentParser.java:775) at org.elasticsearch.server@8.15.0-SNAPSHOT/org.elasticsearch.index.mapper.DocumentParser.innerParseObject(DocumentParser.java:368) at org.elasticsearch.server@8.15.0-SNAPSHOT/org.elasticsearch.index.mapper.DocumentParser.parseObjectOrNested(DocumentParser.java:319) at org.elasticsearch.server@8.15.0-SNAPSHOT/org.elasticsearch.index.mapper.DocumentParser.internalParseDocument(DocumentParser.java:143) at org.elasticsearch.server@8.15.0-SNAPSHOT/org.elasticsearch.index.mapper.DocumentParser.parseDocument(DocumentParser.java:88) at org.elasticsearch.server@8.15.0-SNAPSHOT/org.elasticsearch.index.mapper.DocumentMapper.parse(DocumentMapper.java:112) at org.elasticsearch.server@8.15.0-SNAPSHOT/org.elasticsearch.index.shard.IndexShard.prepareIndex(IndexShard.java:1038) at org.elasticsearch.server@8.15.0-SNAPSHOT/org.elasticsearch.index.shard.IndexShard.applyIndexOperation(IndexShard.java:979) at org.elasticsearch.server@8.15.0-SNAPSHOT/org.elasticsearch.index.shard.IndexShard.applyIndexOperationOnPrimary(IndexShard.java:923) at org.elasticsearch.server@8.15.0-SNAPSHOT/org.elasticsearch.action.bulk.TransportShardBulkAction.executeBulkItemRequest(TransportShardBulkAction.java:376) at org.elasticsearch.server@8.15.0-SNAPSHOT/org.elasticsearch.action.bulk.TransportShardBulkAction$2.doRun(TransportShardBulkAction.java:234) at org.elasticsearch.server@8.15.0-SNAPSHOT/org.elasticsearch.common.util.concurrent.AbstractRunnable.run(AbstractRunnable.java:26) at org.elasticsearch.server@8.15.0-SNAPSHOT/org.elasticsearch.common.util.concurrent.TimedRunnable.doRun(TimedRunnable.java:33) at org.elasticsearch.server@8.15.0-SNAPSHOT/org.elasticsearch.common.util.concurrent.ThreadContext$ContextPreservingAbstractRunnable.doRun(ThreadContext.java:984) at org.elasticsearch.server@8.15.0-SNAPSHOT/org.elasticsearch.common.util.concurrent.AbstractRunnable.run(AbstractRunnable.java:26) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) at java.base/java.lang.Thread.run(Thread.java:1570) ``` When changing the top-level field to a different field type, for example, `keyword` or `text`, the assertion error disappears, but the ordering is not quite right. Note that sometimes the tests pass and sometimes they fail with the following message: `Expected a list containing 0: expected "top-level" but was "resource"`. Seems like in certain cases, the passthrough field type overrides the top-level field. I'm not sure if the two issues are related or separate. This is a dump of the search: ```json [{ "stash" : { "body" : { "took" : 35, "timed_out" : false, "_shards" : { "total" : 1, "successful" : 1, "skipped" : 0, "failed" : 0 }, "hits" : { "total" : { "value" : 1, "relation" : "eq" }, "max_score" : 1.0, "hits" : [ { "_index" : ".ds-otel-2024.07.01-000001", "_id" : "IHWS-EudCPGf0ayGAAABilDXF2o", "_score" : 1.0, "_source" : { "@timestamp" : "2023-09-01T13:03:08.138Z", "attributes" : { "body" : "attribute" }, "body" : "top-level", "metrics" : { "data" : 10 }, "resource" : { "attributes" : { "body" : "resource" } }, "scope" : { "attributes" : { "body" : "scope" } } }, "fields" : { "metrics.data" : [ 10 ], "@timestamp" : [ "2023-09-01T13:03:08.138Z" ], "data" : [ 10 ], "scope.attributes.body" : [ "scope" ], "attributes.body" : [ "attribute" ], "resource.attributes.body" : [ "resource" ], "body" : [ "resource" ] } } ] } } } }] ``` --- .../test/data_stream/150_tsdb.yml | 83 +++++++++++++++++++ .../index/mapper/FieldTypeLookup.java | 25 ++++-- .../index/mapper/FieldTypeLookupTests.java | 23 ++++- 3 files changed, 119 insertions(+), 12 deletions(-) diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml index a1ded40ce1852..0f7752bd43bd2 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml @@ -423,6 +423,89 @@ dynamic templates - conflicting aliases: - match: { aggregations.filterA.tsids.buckets.0.key: "KGejYryCnrIkXYZdIF_Q8F8X2dfFIGKYisFh7t1RGGWOWgWU7C0RiFE" } - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } +--- +dynamic templates - conflicting aliases with top-level field: + - requires: + cluster_features: ["mapper.pass_through_priority"] + reason: support for priority in passthrough objects + - do: + allowed_warnings: + - "index template [my-dynamic-template] has index patterns [otel] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-dynamic-template] will take precedence during new index creation" + indices.put_index_template: + name: my-dynamic-template + body: + index_patterns: [otel] + data_stream: {} + template: + settings: + index: + number_of_shards: 1 + mode: time_series + time_series: + start_time: 2023-08-31T13:03:08.138Z + + mappings: + properties: + body: + type: match_only_text + attributes: + type: passthrough + dynamic: true + time_series_dimension: true + priority: 1 + scope: + properties: + attributes: + type: passthrough + dynamic: true + time_series_dimension: true + priority: 2 + resource: + properties: + attributes: + type: passthrough + dynamic: true + time_series_dimension: true + priority: 3 + metrics: + type: passthrough + dynamic: true + priority: 0 + dynamic_templates: + - counter_metric: + mapping: + type: integer + time_series_metric: counter + ignore_malformed: true + - strings_as_keyword: + mapping: + type: keyword + ignore_above: 1024 + match_mapping_type: string + path_match: "*attributes.*" + + - do: + bulk: + index: otel + refresh: true + body: + - '{ "create": { "dynamic_templates": { "metrics.data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:03:08.138Z", "metrics": {"data": "10"}, "body": "top-level", "attributes": {"body": "attribute"}, "scope": {"attributes": {"body": "scope" }}, "resource": {"attributes": {"body": "resource" }}}' + - match: { errors: false } + + - do: + search: + index: otel + body: + size: 1 + fields: ["*"] + + - match: { hits.total.value: 1 } + - match: { hits.hits.0.fields.body: [ top-level ] } + - match: { hits.hits.0.fields.attributes\.body: [ attribute ] } + - match: { hits.hits.0.fields.scope\.attributes\.body: [ scope ] } + - match: { hits.hits.0.fields.resource\.attributes\.body: [ resource ] } + --- dynamic templates with nesting: - requires: diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java index 7bfb65c52e193..65ee587d8cb50 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java @@ -106,21 +106,28 @@ final class FieldTypeLookup { if (conflict.priority() > passThroughMapper.priority()) { // Keep the conflicting field if it has higher priority. passThroughFieldAliases.put(name, conflict); - continue; } - } else if (fullNameToFieldType.containsKey(name)) { - // There's an existing field or alias for the same field. - continue; - } - MappedFieldType fieldType = fieldMapper.fieldType(); - fullNameToFieldType.put(name, fieldType); - if (fieldType instanceof DynamicFieldType) { - dynamicFieldTypes.put(name, (DynamicFieldType) fieldType); } } } } + for (Map.Entry entry : passThroughFieldAliases.entrySet()) { + String name = entry.getKey(); + if (fullNameToFieldType.containsKey(name)) { + // There's an existing field or alias for the same field. + continue; + } + Mapper mapper = entry.getValue().getMapper(name); + if (mapper instanceof FieldMapper fieldMapper) { + MappedFieldType fieldType = fieldMapper.fieldType(); + fullNameToFieldType.put(name, fieldType); + if (fieldType instanceof DynamicFieldType) { + dynamicFieldTypes.put(name, (DynamicFieldType) fieldType); + } + } + } + for (MappedFieldType fieldType : RuntimeField.collectFieldTypes(runtimeFields).values()) { // this will override concrete fields with runtime fields that have the same name fullNameToFieldType.put(fieldType.name(), fieldType); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java index 04013bf01d57c..ad8f2c9f4f8af 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -493,9 +494,9 @@ public void testAddRootAliasForConflictingPassThroughFields() { ); FieldTypeLookup lookup = new FieldTypeLookup( - List.of(attributeField, resourceAttributeField), + randomizedList(attributeField, resourceAttributeField), List.of(), - List.of(attributes, resourceAttributes), + randomizedList(attributes, resourceAttributes), List.of() ); assertEquals(attributeField.fieldType(), lookup.get("foo")); @@ -503,10 +504,26 @@ public void testAddRootAliasForConflictingPassThroughFields() { public void testNoRootAliasForPassThroughFieldOnConflictingField() { MockFieldMapper attributeFoo = new MockFieldMapper("attributes.foo"); + MockFieldMapper resourceAttributeFoo = new MockFieldMapper("resource.attributes.foo"); MockFieldMapper foo = new MockFieldMapper("foo"); PassThroughObjectMapper attributes = createPassThroughMapper("attributes", Map.of("foo", attributeFoo), 0); + PassThroughObjectMapper resourceAttributes = createPassThroughMapper("resource.attributes", Map.of("foo", resourceAttributeFoo), 1); + + FieldTypeLookup lookup = new FieldTypeLookup( + randomizedList(foo, attributeFoo, resourceAttributeFoo), + List.of(), + randomizedList(attributes, resourceAttributes), + List.of() + ); - FieldTypeLookup lookup = new FieldTypeLookup(List.of(foo, attributeFoo), List.of(), List.of(attributes), List.of()); assertEquals(foo.fieldType(), lookup.get("foo")); } + + @SafeVarargs + @SuppressWarnings("varargs") + static List randomizedList(T... values) { + ArrayList list = new ArrayList<>(Arrays.asList(values)); + Collections.shuffle(list, random()); + return list; + } } From 7c2b852f65903cb635bb41ea224378c51ec082ae Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Mon, 1 Jul 2024 18:43:23 +0200 Subject: [PATCH 090/216] Remove semantic_text feature flag (#110338) --- docs/changelog/110338.yaml | 5 ++++ .../test/cluster/FeatureFlag.java | 3 +-- x-pack/plugin/inference/build.gradle | 6 ----- .../xpack/inference/InferencePlugin.java | 15 +++--------- .../xpack/inference/SemanticTextFeature.java | 24 ------------------- .../xpack/inference/InferenceRestIT.java | 2 -- 6 files changed, 9 insertions(+), 46 deletions(-) create mode 100644 docs/changelog/110338.yaml delete mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/SemanticTextFeature.java diff --git a/docs/changelog/110338.yaml b/docs/changelog/110338.yaml new file mode 100644 index 0000000000000..2334a1cbc9283 --- /dev/null +++ b/docs/changelog/110338.yaml @@ -0,0 +1,5 @@ +pr: 110338 +summary: Add `semantic_text` field type and `semantic` query +area: Mapping +type: feature +issues: [] diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java index d555337f467ae..49fb38b518dce 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java @@ -16,8 +16,7 @@ */ public enum FeatureFlag { TIME_SERIES_MODE("es.index_mode_feature_flag_registered=true", Version.fromString("8.0.0"), null), - FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null), - SEMANTIC_TEXT_ENABLED("es.semantic_text_feature_flag_enabled=true", Version.fromString("8.15.0"), null); + FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null); public final String systemProperty; public final Version from; diff --git a/x-pack/plugin/inference/build.gradle b/x-pack/plugin/inference/build.gradle index 92afa3faa51e3..41ca9966c1336 100644 --- a/x-pack/plugin/inference/build.gradle +++ b/x-pack/plugin/inference/build.gradle @@ -203,12 +203,6 @@ tasks.named("thirdPartyAudit").configure { ) } -if (BuildParams.isSnapshotBuild() == false) { - tasks.withType(Test).configureEach { - systemProperty 'es.semantic_text_feature_flag_enabled', 'true' - } -} - tasks.named('yamlRestTest') { usesDefaultDistribution() } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 8ab9f774898b6..1db5b4135ee94 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -294,25 +294,16 @@ public void close() { @Override public Map getMappers() { - if (SemanticTextFeature.isEnabled()) { - return Map.of(SemanticTextFieldMapper.CONTENT_TYPE, SemanticTextFieldMapper.PARSER); - } - return Map.of(); + return Map.of(SemanticTextFieldMapper.CONTENT_TYPE, SemanticTextFieldMapper.PARSER); } @Override public Collection getMappedActionFilters() { - if (SemanticTextFeature.isEnabled()) { - return singletonList(shardBulkInferenceActionFilter.get()); - } - return List.of(); + return singletonList(shardBulkInferenceActionFilter.get()); } public List> getQueries() { - if (SemanticTextFeature.isEnabled()) { - return List.of(new QuerySpec<>(SemanticQueryBuilder.NAME, SemanticQueryBuilder::new, SemanticQueryBuilder::fromXContent)); - } - return List.of(); + return List.of(new QuerySpec<>(SemanticQueryBuilder.NAME, SemanticQueryBuilder::new, SemanticQueryBuilder::fromXContent)); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/SemanticTextFeature.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/SemanticTextFeature.java deleted file mode 100644 index 4f2c5c564bcb8..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/SemanticTextFeature.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference; - -import org.elasticsearch.common.util.FeatureFlag; - -/** - * semantic_text feature flag. When the feature is complete, this flag will be removed. - */ -public class SemanticTextFeature { - - private SemanticTextFeature() {} - - private static final FeatureFlag FEATURE_FLAG = new FeatureFlag("semantic_text"); - - public static boolean isEnabled() { - return FEATURE_FLAG.isEnabled(); - } -} diff --git a/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java b/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java index c84fdd871f857..701bcd204fcfe 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java +++ b/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java @@ -10,7 +10,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; @@ -24,7 +23,6 @@ public class InferenceRestIT extends ESClientYamlSuiteTestCase { .setting("xpack.security.http.ssl.enabled", "false") .setting("xpack.license.self_generated.type", "trial") .plugin("inference-service-test") - .feature(FeatureFlag.SEMANTIC_TEXT_ENABLED) .distribution(DistributionType.DEFAULT) .build(); From 01b7ccd4d11005793351c427cb043acd051ffa87 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 1 Jul 2024 13:36:19 -0400 Subject: [PATCH 091/216] ESQL: Reenable test (#110305) We have a security test that fails one every thousand or so runs because it runs an async esql action and *sometimes* it requests the async result and the index does not yet exist. This retries. I think there's probably a better solution, but for now I'm going to fix the test and open an issue to track that. Closes #109806 --- muted-tests.yml | 3 -- .../xpack/esql/EsqlAsyncSecurityIT.java | 51 +++++++++++++++---- .../xpack/esql/EsqlSecurityIT.java | 1 + 3 files changed, 41 insertions(+), 14 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 4d447bcab7dcb..e8c6536b0ce0e 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -47,9 +47,6 @@ tests: - class: "org.elasticsearch.xpack.test.rest.XPackRestIT" issue: "https://github.com/elastic/elasticsearch/issues/109687" method: "test {p0=sql/translate/Translate SQL}" -- class: "org.elasticsearch.xpack.esql.EsqlAsyncSecurityIT" - issue: "https://github.com/elastic/elasticsearch/issues/109806" - method: "testInsufficientPrivilege" - class: org.elasticsearch.action.search.SearchProgressActionListenerIT method: testSearchProgressWithHits issue: https://github.com/elastic/elasticsearch/issues/109830 diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlAsyncSecurityIT.java b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlAsyncSecurityIT.java index 544eb82fb5ace..443813442165a 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlAsyncSecurityIT.java +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlAsyncSecurityIT.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql; +import org.apache.http.util.EntityUtils; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -36,7 +37,8 @@ protected Response runESQLCommand(String user, String command) throws IOExceptio var respMap = entityAsMap(response.getEntity()); String id = (String) respMap.get("id"); assertThat((boolean) respMap.get("is_running"), either(is(true)).or(is(false))); - var getResponse = runAsyncGet(user, id); + int tries = 0; + Response getResponse = runAsyncGet(user, id); assertOK(getResponse); var deleteResponse = runAsyncDelete(user, id); assertOK(deleteResponse); @@ -98,6 +100,7 @@ private Response runAsync(String user, String command) throws IOException { Request request = new Request("POST", "_query/async"); request.setJsonEntity(Strings.toString(json)); request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("es-security-runas-user", user)); + request.addParameter("error_trace", "true"); logRequest(request); Response response = client().performRequest(request); logResponse(response); @@ -105,19 +108,45 @@ private Response runAsync(String user, String command) throws IOException { } private Response runAsyncGet(String user, String id) throws IOException { - Request getRequest = new Request("GET", "_query/async/" + id + "?wait_for_completion_timeout=60s"); - getRequest.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("es-security-runas-user", user)); - logRequest(getRequest); - var response = client().performRequest(getRequest); - logResponse(response); - return response; + int tries = 0; + while (tries < 10) { + // Sometimes we get 404s fetching the task status. + try { + Request getRequest = new Request("GET", "_query/async/" + id + "?wait_for_completion_timeout=60s"); + getRequest.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("es-security-runas-user", user)); + getRequest.addParameter("error_trace", "true"); + logRequest(getRequest); + var response = client().performRequest(getRequest); + logResponse(response); + return response; + } catch (ResponseException e) { + if (e.getResponse().getStatusLine().getStatusCode() == 404 + && EntityUtils.toString(e.getResponse().getEntity()).contains("no such index [.async-search]")) { + /* + * Work around https://github.com/elastic/elasticsearch/issues/110304 - the .async-search + * index may not exist when we try the fetch, but it should exist on next attempt. + */ + logger.warn("async-search index does not exist", e); + try { + Thread.sleep(1000); + } catch (InterruptedException ex) { + throw new RuntimeException(ex); + } + } else { + throw e; + } + tries++; + } + } + throw new IllegalStateException("couldn't find task status"); } private Response runAsyncDelete(String user, String id) throws IOException { - Request getRequest = new Request("DELETE", "_query/async/" + id); - getRequest.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("es-security-runas-user", user)); - logRequest(getRequest); - var response = client().performRequest(getRequest); + Request deleteRequest = new Request("DELETE", "_query/async/" + id); + deleteRequest.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("es-security-runas-user", user)); + deleteRequest.addParameter("error_trace", "true"); + logRequest(deleteRequest); + var response = client().performRequest(deleteRequest); logResponse(response); return response; } diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java index d7e146cd6d7c1..faa2eb9bd82b0 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java @@ -142,6 +142,7 @@ public void testInsufficientPrivilege() { Exception.class, () -> runESQLCommand("metadata1_read2", "FROM index-user1,index-user2 | STATS sum=sum(value)") ); + logger.info("error", error); assertThat( error.getMessage(), containsString( From 3b827f6a8c73f55fcfb6c46999d19d2294f00ecd Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Mon, 1 Jul 2024 19:41:28 +0200 Subject: [PATCH 092/216] Create `manage_connector` privilege (#110128) * Create manage_seaech_connector privilege * `manage_search_connector` -> `manage_connector` and exclude connector secrets patterns from this privilege * Add `monitor_connector` privilege * Update Kibana system privilege to monitor_connector for telemetry * Rename privilege to 'manage_connector_state' Since privilege names are often namespaced and used with globs, we want to ensure that if there's a future privilege like `manage_connector_secrets`, that it is not implicitly included in this new privileg's *. By extending the privilege name to include "_state", we better namespace this distinct from any "_secrets" namespace. * Revert "Rename privilege to 'manage_connector_state'" This reverts commit 70b89eee76cb9a03ac7caec3fe7927be4b6e11c3. After further discussion with the security team, this name change is not needed after all since the secret management privileges aren't currently prefixed with "manage_" --------- Co-authored-by: Sean Story --- .../security/get-builtin-privileges.asciidoc | 2 + .../privilege/ClusterPrivilegeResolver.java | 37 ++++++++++++++++--- .../KibanaOwnedReservedRoleDescriptors.java | 2 + .../test/privileges/11_builtin.yml | 2 +- 4 files changed, 36 insertions(+), 7 deletions(-) diff --git a/docs/reference/rest-api/security/get-builtin-privileges.asciidoc b/docs/reference/rest-api/security/get-builtin-privileges.asciidoc index bbd0ca03c0473..8435f5539ab9d 100644 --- a/docs/reference/rest-api/security/get-builtin-privileges.asciidoc +++ b/docs/reference/rest-api/security/get-builtin-privileges.asciidoc @@ -77,6 +77,7 @@ A successful call returns an object with "cluster", "index", and "remote_cluster "manage_autoscaling", "manage_behavioral_analytics", "manage_ccr", + "manage_connector", "manage_data_frame_transforms", "manage_data_stream_global_retention", "manage_enrich", @@ -102,6 +103,7 @@ A successful call returns an object with "cluster", "index", and "remote_cluster "manage_user_profile", "manage_watcher", "monitor", + "monitor_connector", "monitor_data_frame_transforms", "monitor_data_stream_global_retention", "monitor_enrich", diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java index 1b517bccf91c6..1cbe6c739a75f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java @@ -67,6 +67,7 @@ import java.util.SortedMap; import java.util.TreeMap; import java.util.function.Predicate; +import java.util.stream.Collectors; import java.util.stream.Stream; /** @@ -174,6 +175,23 @@ public class ClusterPrivilegeResolver { ); private static final Set MANAGE_SEARCH_APPLICATION_PATTERN = Set.of("cluster:admin/xpack/application/search_application/*"); + private static final Set MANAGE_CONNECTOR_PATTERN = Set.of("cluster:admin/xpack/connector/*"); + private static final Set MONITOR_CONNECTOR_PATTERN = Set.of( + "cluster:admin/xpack/connector/get", + "cluster:admin/xpack/connector/list", + "cluster:admin/xpack/connector/sync_job/get", + "cluster:admin/xpack/connector/sync_job/list" + ); + private static final Set READ_CONNECTOR_SECRETS_PATTERN = Set.of("cluster:admin/xpack/connector/secret/get"); + private static final Set WRITE_CONNECTOR_SECRETS_PATTERN = Set.of( + "cluster:admin/xpack/connector/secret/delete", + "cluster:admin/xpack/connector/secret/post", + "cluster:admin/xpack/connector/secret/put" + ); + private static final Set CONNECTOR_SECRETS_PATTERN = Stream.concat( + READ_CONNECTOR_SECRETS_PATTERN.stream(), + WRITE_CONNECTOR_SECRETS_PATTERN.stream() + ).collect(Collectors.toSet()); private static final Set MANAGE_SEARCH_QUERY_RULES_PATTERN = Set.of("cluster:admin/xpack/query_rules/*"); private static final Set MANAGE_SEARCH_SYNONYMS_PATTERN = Set.of( "cluster:admin/synonyms/*", @@ -332,6 +350,15 @@ public class ClusterPrivilegeResolver { "manage_search_application", MANAGE_SEARCH_APPLICATION_PATTERN ); + public static final NamedClusterPrivilege MANAGE_CONNECTOR = new ActionClusterPrivilege( + "manage_connector", + MANAGE_CONNECTOR_PATTERN, + CONNECTOR_SECRETS_PATTERN + ); + public static final NamedClusterPrivilege MONITOR_CONNECTOR = new ActionClusterPrivilege( + "monitor_connector", + MONITOR_CONNECTOR_PATTERN + ); public static final NamedClusterPrivilege MANAGE_SEARCH_SYNONYMS = new ActionClusterPrivilege( "manage_search_synonyms", MANAGE_SEARCH_SYNONYMS_PATTERN @@ -362,16 +389,12 @@ public class ClusterPrivilegeResolver { public static final NamedClusterPrivilege READ_CONNECTOR_SECRETS = new ActionClusterPrivilege( "read_connector_secrets", - Set.of("cluster:admin/xpack/connector/secret/get") + READ_CONNECTOR_SECRETS_PATTERN ); public static final NamedClusterPrivilege WRITE_CONNECTOR_SECRETS = new ActionClusterPrivilege( "write_connector_secrets", - Set.of( - "cluster:admin/xpack/connector/secret/delete", - "cluster:admin/xpack/connector/secret/post", - "cluster:admin/xpack/connector/secret/put" - ) + WRITE_CONNECTOR_SECRETS_PATTERN ); public static final NamedClusterPrivilege MONITOR_GLOBAL_RETENTION = new ActionClusterPrivilege( "monitor_data_stream_global_retention", @@ -391,6 +414,7 @@ public class ClusterPrivilegeResolver { NONE, ALL, MONITOR, + MONITOR_CONNECTOR, MONITOR_INFERENCE, MONITOR_ML, MONITOR_TEXT_STRUCTURE, @@ -400,6 +424,7 @@ public class ClusterPrivilegeResolver { MONITOR_ROLLUP, MONITOR_ENRICH, MANAGE, + MANAGE_CONNECTOR, MANAGE_INFERENCE, MANAGE_ML, MANAGE_TRANSFORM_DEPRECATED, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index 41da995797e29..a0fe3d09eccc7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -73,6 +73,8 @@ static RoleDescriptor kibanaSystem(String name) { // For Fleet package upgrade "manage_pipeline", "manage_ilm", + // For connectors telemetry + "monitor_connector", // For the endpoint package that ships a transform "manage_transform", InvalidateApiKeyAction.NAME, diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/privileges/11_builtin.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/privileges/11_builtin.yml index bb784f52884f6..ef8fab9ca7b6d 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/privileges/11_builtin.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/privileges/11_builtin.yml @@ -15,5 +15,5 @@ setup: # This is fragile - it needs to be updated every time we add a new cluster/index privilege # I would much prefer we could just check that specific entries are in the array, but we don't have # an assertion for that - - length: { "cluster" : 59 } + - length: { "cluster" : 61 } - length: { "index" : 22 } From c89ee3b648dcf206057a2529da627b2084e6bff0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Iv=C3=A1n=20Cea=20Fontenla?= Date: Mon, 1 Jul 2024 19:52:24 +0200 Subject: [PATCH 093/216] ESQL: Renamed TopList to Top (#110347) Rename TopList aggregation to Top, after internal discussions --- docs/changelog/110347.yaml | 5 + .../functions/aggregation-functions.asciidoc | 4 +- .../{top_list.asciidoc => top.asciidoc} | 0 .../{top_list.asciidoc => top.asciidoc} | 4 +- .../definition/{top_list.json => top.json} | 4 +- .../kibana/docs/{top_list.md => top.md} | 4 +- .../esql/functions/layout/top.asciidoc | 15 ++ .../esql/functions/layout/top_list.asciidoc | 15 -- .../{top_list.asciidoc => top.asciidoc} | 0 .../esql/functions/signature/top.svg | 1 + .../esql/functions/signature/top_list.svg | 1 - .../types/{top_list.asciidoc => top.asciidoc} | 0 x-pack/plugin/esql/compute/build.gradle | 18 +- ...gregator.java => TopDoubleAggregator.java} | 4 +- ...ggregator.java => TopFloatAggregator.java} | 4 +- ...tAggregator.java => TopIntAggregator.java} | 4 +- ...Aggregator.java => TopLongAggregator.java} | 4 +- ....java => TopDoubleAggregatorFunction.java} | 24 +-- ... TopDoubleAggregatorFunctionSupplier.java} | 18 +- ... TopDoubleGroupingAggregatorFunction.java} | 34 ++-- ...n.java => TopFloatAggregatorFunction.java} | 24 +-- ...> TopFloatAggregatorFunctionSupplier.java} | 18 +- ...> TopFloatGroupingAggregatorFunction.java} | 34 ++-- ...ion.java => TopIntAggregatorFunction.java} | 24 +-- ... => TopIntAggregatorFunctionSupplier.java} | 18 +- ... => TopIntGroupingAggregatorFunction.java} | 34 ++-- ...on.java => TopLongAggregatorFunction.java} | 24 +-- ...=> TopLongAggregatorFunctionSupplier.java} | 18 +- ...=> TopLongGroupingAggregatorFunction.java} | 34 ++-- ...egator.java.st => X-TopAggregator.java.st} | 4 +- ... => TopDoubleAggregatorFunctionTests.java} | 6 +- ...a => TopFloatAggregatorFunctionTests.java} | 6 +- ...ava => TopIntAggregatorFunctionTests.java} | 6 +- ...va => TopLongAggregatorFunctionTests.java} | 6 +- .../src/main/resources/meta.csv-spec | 8 +- .../src/main/resources/stats_top.csv-spec | 156 ++++++++++++++++++ .../main/resources/stats_top_list.csv-spec | 156 ------------------ .../xpack/esql/action/EsqlCapabilities.java | 4 +- .../function/EsqlFunctionRegistry.java | 4 +- .../function/aggregate/AggregateFunction.java | 2 +- .../aggregate/{TopList.java => Top.java} | 30 ++-- .../function/aggregate/package-info.java | 4 +- .../xpack/esql/planner/AggregateMapper.java | 6 +- ...nTests.java => TopSerializationTests.java} | 10 +- .../{TopListTests.java => TopTests.java} | 30 ++-- 45 files changed, 417 insertions(+), 412 deletions(-) create mode 100644 docs/changelog/110347.yaml rename docs/reference/esql/functions/description/{top_list.asciidoc => top.asciidoc} (100%) rename docs/reference/esql/functions/examples/{top_list.asciidoc => top.asciidoc} (64%) rename docs/reference/esql/functions/kibana/definition/{top_list.json => top.json} (95%) rename docs/reference/esql/functions/kibana/docs/{top_list.md => top.md} (69%) create mode 100644 docs/reference/esql/functions/layout/top.asciidoc delete mode 100644 docs/reference/esql/functions/layout/top_list.asciidoc rename docs/reference/esql/functions/parameters/{top_list.asciidoc => top.asciidoc} (100%) create mode 100644 docs/reference/esql/functions/signature/top.svg delete mode 100644 docs/reference/esql/functions/signature/top_list.svg rename docs/reference/esql/functions/types/{top_list.asciidoc => top.asciidoc} (100%) rename x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/{TopListDoubleAggregator.java => TopDoubleAggregator.java} (97%) rename x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/{TopListFloatAggregator.java => TopFloatAggregator.java} (97%) rename x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/{TopListIntAggregator.java => TopIntAggregator.java} (97%) rename x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/{TopListLongAggregator.java => TopLongAggregator.java} (97%) rename x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/{TopListDoubleAggregatorFunction.java => TopDoubleAggregatorFunction.java} (75%) rename x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/{TopListDoubleAggregatorFunctionSupplier.java => TopDoubleAggregatorFunctionSupplier.java} (56%) rename x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/{TopListDoubleGroupingAggregatorFunction.java => TopDoubleGroupingAggregatorFunction.java} (80%) rename x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/{TopListFloatAggregatorFunction.java => TopFloatAggregatorFunction.java} (75%) rename x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/{TopListFloatAggregatorFunctionSupplier.java => TopFloatAggregatorFunctionSupplier.java} (56%) rename x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/{TopListFloatGroupingAggregatorFunction.java => TopFloatGroupingAggregatorFunction.java} (80%) rename x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/{TopListIntAggregatorFunction.java => TopIntAggregatorFunction.java} (76%) rename x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/{TopListIntAggregatorFunctionSupplier.java => TopIntAggregatorFunctionSupplier.java} (57%) rename x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/{TopListIntGroupingAggregatorFunction.java => TopIntGroupingAggregatorFunction.java} (81%) rename x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/{TopListLongAggregatorFunction.java => TopLongAggregatorFunction.java} (75%) rename x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/{TopListLongAggregatorFunctionSupplier.java => TopLongAggregatorFunctionSupplier.java} (57%) rename x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/{TopListLongGroupingAggregatorFunction.java => TopLongGroupingAggregatorFunction.java} (81%) rename x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/{X-TopListAggregator.java.st => X-TopAggregator.java.st} (97%) rename x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/{TopListDoubleAggregatorFunctionTests.java => TopDoubleAggregatorFunctionTests.java} (87%) rename x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/{TopListFloatAggregatorFunctionTests.java => TopFloatAggregatorFunctionTests.java} (87%) rename x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/{TopListIntAggregatorFunctionTests.java => TopIntAggregatorFunctionTests.java} (87%) rename x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/{TopListLongAggregatorFunctionTests.java => TopLongAggregatorFunctionTests.java} (87%) create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_top.csv-spec delete mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_top_list.csv-spec rename x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/{TopList.java => Top.java} (84%) rename x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/{TopListSerializationTests.java => TopSerializationTests.java} (82%) rename x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/{TopListTests.java => TopTests.java} (88%) diff --git a/docs/changelog/110347.yaml b/docs/changelog/110347.yaml new file mode 100644 index 0000000000000..8727128230935 --- /dev/null +++ b/docs/changelog/110347.yaml @@ -0,0 +1,5 @@ +pr: 110347 +summary: "ESQL: Renamed `TopList` to Top" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/reference/esql/functions/aggregation-functions.asciidoc b/docs/reference/esql/functions/aggregation-functions.asciidoc index cf3512449e26f..b23416c0e20bf 100644 --- a/docs/reference/esql/functions/aggregation-functions.asciidoc +++ b/docs/reference/esql/functions/aggregation-functions.asciidoc @@ -18,7 +18,7 @@ The <> command supports these aggregate functions: * <> * experimental:[] <> * <> -* <> +* <> * <> // end::agg_list[] @@ -32,5 +32,5 @@ include::min.asciidoc[] include::percentile.asciidoc[] include::st_centroid_agg.asciidoc[] include::sum.asciidoc[] -include::layout/top_list.asciidoc[] +include::layout/top.asciidoc[] include::values.asciidoc[] diff --git a/docs/reference/esql/functions/description/top_list.asciidoc b/docs/reference/esql/functions/description/top.asciidoc similarity index 100% rename from docs/reference/esql/functions/description/top_list.asciidoc rename to docs/reference/esql/functions/description/top.asciidoc diff --git a/docs/reference/esql/functions/examples/top_list.asciidoc b/docs/reference/esql/functions/examples/top.asciidoc similarity index 64% rename from docs/reference/esql/functions/examples/top_list.asciidoc rename to docs/reference/esql/functions/examples/top.asciidoc index 09d32bc9f601a..3d48d3c346c9e 100644 --- a/docs/reference/esql/functions/examples/top_list.asciidoc +++ b/docs/reference/esql/functions/examples/top.asciidoc @@ -4,10 +4,10 @@ [source.merge.styled,esql] ---- -include::{esql-specs}/stats_top_list.csv-spec[tag=top-list] +include::{esql-specs}/stats_top.csv-spec[tag=top] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/stats_top_list.csv-spec[tag=top-list-result] +include::{esql-specs}/stats_top.csv-spec[tag=top-result] |=== diff --git a/docs/reference/esql/functions/kibana/definition/top_list.json b/docs/reference/esql/functions/kibana/definition/top.json similarity index 95% rename from docs/reference/esql/functions/kibana/definition/top_list.json rename to docs/reference/esql/functions/kibana/definition/top.json index 99518a40680ee..7ad073d6e7564 100644 --- a/docs/reference/esql/functions/kibana/definition/top_list.json +++ b/docs/reference/esql/functions/kibana/definition/top.json @@ -1,7 +1,7 @@ { "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "agg", - "name" : "top_list", + "name" : "top", "description" : "Collects the top values for a field. Includes repeated values.", "signatures" : [ { @@ -102,6 +102,6 @@ } ], "examples" : [ - "FROM employees\n| STATS top_salaries = TOP_LIST(salary, 3, \"desc\"), top_salary = MAX(salary)" + "FROM employees\n| STATS top_salaries = TOP(salary, 3, \"desc\"), top_salary = MAX(salary)" ] } diff --git a/docs/reference/esql/functions/kibana/docs/top_list.md b/docs/reference/esql/functions/kibana/docs/top.md similarity index 69% rename from docs/reference/esql/functions/kibana/docs/top_list.md rename to docs/reference/esql/functions/kibana/docs/top.md index f7acdf3162b38..10db4e7ac5b55 100644 --- a/docs/reference/esql/functions/kibana/docs/top_list.md +++ b/docs/reference/esql/functions/kibana/docs/top.md @@ -2,10 +2,10 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. --> -### TOP_LIST +### TOP Collects the top values for a field. Includes repeated values. ``` FROM employees -| STATS top_salaries = TOP_LIST(salary, 3, "desc"), top_salary = MAX(salary) +| STATS top_salaries = TOP(salary, 3, "desc"), top_salary = MAX(salary) ``` diff --git a/docs/reference/esql/functions/layout/top.asciidoc b/docs/reference/esql/functions/layout/top.asciidoc new file mode 100644 index 0000000000000..a29a7c96a3697 --- /dev/null +++ b/docs/reference/esql/functions/layout/top.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-top]] +=== `TOP` + +*Syntax* + +[.text-center] +image::esql/functions/signature/top.svg[Embedded,opts=inline] + +include::../parameters/top.asciidoc[] +include::../description/top.asciidoc[] +include::../types/top.asciidoc[] +include::../examples/top.asciidoc[] diff --git a/docs/reference/esql/functions/layout/top_list.asciidoc b/docs/reference/esql/functions/layout/top_list.asciidoc deleted file mode 100644 index 4735395ca0c0d..0000000000000 --- a/docs/reference/esql/functions/layout/top_list.asciidoc +++ /dev/null @@ -1,15 +0,0 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. - -[discrete] -[[esql-top_list]] -=== `TOP_LIST` - -*Syntax* - -[.text-center] -image::esql/functions/signature/top_list.svg[Embedded,opts=inline] - -include::../parameters/top_list.asciidoc[] -include::../description/top_list.asciidoc[] -include::../types/top_list.asciidoc[] -include::../examples/top_list.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/top_list.asciidoc b/docs/reference/esql/functions/parameters/top.asciidoc similarity index 100% rename from docs/reference/esql/functions/parameters/top_list.asciidoc rename to docs/reference/esql/functions/parameters/top.asciidoc diff --git a/docs/reference/esql/functions/signature/top.svg b/docs/reference/esql/functions/signature/top.svg new file mode 100644 index 0000000000000..cfd15e0d94ac4 --- /dev/null +++ b/docs/reference/esql/functions/signature/top.svg @@ -0,0 +1 @@ +TOP(field,limit,order) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/top_list.svg b/docs/reference/esql/functions/signature/top_list.svg deleted file mode 100644 index e7a5c7a292d41..0000000000000 --- a/docs/reference/esql/functions/signature/top_list.svg +++ /dev/null @@ -1 +0,0 @@ -TOP_LIST(field,limit,order) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/top_list.asciidoc b/docs/reference/esql/functions/types/top.asciidoc similarity index 100% rename from docs/reference/esql/functions/types/top_list.asciidoc rename to docs/reference/esql/functions/types/top.asciidoc diff --git a/x-pack/plugin/esql/compute/build.gradle b/x-pack/plugin/esql/compute/build.gradle index 852d46a43c24f..3e61b9bc5e51c 100644 --- a/x-pack/plugin/esql/compute/build.gradle +++ b/x-pack/plugin/esql/compute/build.gradle @@ -523,26 +523,26 @@ tasks.named('stringTemplates').configure { } - File topListAggregatorInputFile = new File("${projectDir}/src/main/java/org/elasticsearch/compute/aggregation/X-TopListAggregator.java.st") + File topAggregatorInputFile = new File("${projectDir}/src/main/java/org/elasticsearch/compute/aggregation/X-TopAggregator.java.st") template { it.properties = intProperties - it.inputFile = topListAggregatorInputFile - it.outputFile = "org/elasticsearch/compute/aggregation/TopListIntAggregator.java" + it.inputFile = topAggregatorInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/TopIntAggregator.java" } template { it.properties = longProperties - it.inputFile = topListAggregatorInputFile - it.outputFile = "org/elasticsearch/compute/aggregation/TopListLongAggregator.java" + it.inputFile = topAggregatorInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/TopLongAggregator.java" } template { it.properties = floatProperties - it.inputFile = topListAggregatorInputFile - it.outputFile = "org/elasticsearch/compute/aggregation/TopListFloatAggregator.java" + it.inputFile = topAggregatorInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/TopFloatAggregator.java" } template { it.properties = doubleProperties - it.inputFile = topListAggregatorInputFile - it.outputFile = "org/elasticsearch/compute/aggregation/TopListDoubleAggregator.java" + it.inputFile = topAggregatorInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/TopDoubleAggregator.java" } File multivalueDedupeInputFile = file("src/main/java/org/elasticsearch/compute/operator/mvdedupe/X-MultivalueDedupe.java.st") diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopListDoubleAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopDoubleAggregator.java similarity index 97% rename from x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopListDoubleAggregator.java rename to x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopDoubleAggregator.java index 941722b4424d3..3bd76b79d62f2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopListDoubleAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopDoubleAggregator.java @@ -24,9 +24,9 @@ /** * Aggregates the top N field values for double. */ -@Aggregator({ @IntermediateState(name = "topList", type = "DOUBLE_BLOCK") }) +@Aggregator({ @IntermediateState(name = "top", type = "DOUBLE_BLOCK") }) @GroupingAggregator -class TopListDoubleAggregator { +class TopDoubleAggregator { public static SingleState initSingle(BigArrays bigArrays, int limit, boolean ascending) { return new SingleState(bigArrays, limit, ascending); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopListFloatAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopFloatAggregator.java similarity index 97% rename from x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopListFloatAggregator.java rename to x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopFloatAggregator.java index c5fc51d5ba13f..066c82e9448fb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopListFloatAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopFloatAggregator.java @@ -24,9 +24,9 @@ /** * Aggregates the top N field values for float. */ -@Aggregator({ @IntermediateState(name = "topList", type = "FLOAT_BLOCK") }) +@Aggregator({ @IntermediateState(name = "top", type = "FLOAT_BLOCK") }) @GroupingAggregator -class TopListFloatAggregator { +class TopFloatAggregator { public static SingleState initSingle(BigArrays bigArrays, int limit, boolean ascending) { return new SingleState(bigArrays, limit, ascending); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopListIntAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopIntAggregator.java similarity index 97% rename from x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopListIntAggregator.java rename to x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopIntAggregator.java index dafbf1c2a3051..2f5149c594d94 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopListIntAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopIntAggregator.java @@ -24,9 +24,9 @@ /** * Aggregates the top N field values for int. */ -@Aggregator({ @IntermediateState(name = "topList", type = "INT_BLOCK") }) +@Aggregator({ @IntermediateState(name = "top", type = "INT_BLOCK") }) @GroupingAggregator -class TopListIntAggregator { +class TopIntAggregator { public static SingleState initSingle(BigArrays bigArrays, int limit, boolean ascending) { return new SingleState(bigArrays, limit, ascending); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopListLongAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopLongAggregator.java similarity index 97% rename from x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopListLongAggregator.java rename to x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopLongAggregator.java index c0e7122a4be0b..d6bafaa30c425 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopListLongAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/TopLongAggregator.java @@ -24,9 +24,9 @@ /** * Aggregates the top N field values for long. */ -@Aggregator({ @IntermediateState(name = "topList", type = "LONG_BLOCK") }) +@Aggregator({ @IntermediateState(name = "top", type = "LONG_BLOCK") }) @GroupingAggregator -class TopListLongAggregator { +class TopLongAggregator { public static SingleState initSingle(BigArrays bigArrays, int limit, boolean ascending) { return new SingleState(bigArrays, limit, ascending); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunction.java similarity index 75% rename from x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListDoubleAggregatorFunction.java rename to x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunction.java index d52d25941780c..3d658294c154f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunction.java @@ -17,16 +17,16 @@ import org.elasticsearch.compute.operator.DriverContext; /** - * {@link AggregatorFunction} implementation for {@link TopListDoubleAggregator}. + * {@link AggregatorFunction} implementation for {@link TopDoubleAggregator}. * This class is generated. Do not edit it. */ -public final class TopListDoubleAggregatorFunction implements AggregatorFunction { +public final class TopDoubleAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( new IntermediateStateDesc("topList", ElementType.DOUBLE) ); private final DriverContext driverContext; - private final TopListDoubleAggregator.SingleState state; + private final TopDoubleAggregator.SingleState state; private final List channels; @@ -34,8 +34,8 @@ public final class TopListDoubleAggregatorFunction implements AggregatorFunction private final boolean ascending; - public TopListDoubleAggregatorFunction(DriverContext driverContext, List channels, - TopListDoubleAggregator.SingleState state, int limit, boolean ascending) { + public TopDoubleAggregatorFunction(DriverContext driverContext, List channels, + TopDoubleAggregator.SingleState state, int limit, boolean ascending) { this.driverContext = driverContext; this.channels = channels; this.state = state; @@ -43,9 +43,9 @@ public TopListDoubleAggregatorFunction(DriverContext driverContext, List channels, int limit, boolean ascending) { - return new TopListDoubleAggregatorFunction(driverContext, channels, TopListDoubleAggregator.initSingle(driverContext.bigArrays(), limit, ascending), limit, ascending); + public static TopDoubleAggregatorFunction create(DriverContext driverContext, + List channels, int limit, boolean ascending) { + return new TopDoubleAggregatorFunction(driverContext, channels, TopDoubleAggregator.initSingle(driverContext.bigArrays(), limit, ascending), limit, ascending); } public static List intermediateStateDesc() { @@ -70,7 +70,7 @@ public void addRawInput(Page page) { private void addRawVector(DoubleVector vector) { for (int i = 0; i < vector.getPositionCount(); i++) { - TopListDoubleAggregator.combine(state, vector.getDouble(i)); + TopDoubleAggregator.combine(state, vector.getDouble(i)); } } @@ -82,7 +82,7 @@ private void addRawBlock(DoubleBlock block) { int start = block.getFirstValueIndex(p); int end = start + block.getValueCount(p); for (int i = start; i < end; i++) { - TopListDoubleAggregator.combine(state, block.getDouble(i)); + TopDoubleAggregator.combine(state, block.getDouble(i)); } } } @@ -97,7 +97,7 @@ public void addIntermediateInput(Page page) { } DoubleBlock topList = (DoubleBlock) topListUncast; assert topList.getPositionCount() == 1; - TopListDoubleAggregator.combineIntermediate(state, topList); + TopDoubleAggregator.combineIntermediate(state, topList); } @Override @@ -107,7 +107,7 @@ public void evaluateIntermediate(Block[] blocks, int offset, DriverContext drive @Override public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { - blocks[offset] = TopListDoubleAggregator.evaluateFinal(state, driverContext); + blocks[offset] = TopDoubleAggregator.evaluateFinal(state, driverContext); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionSupplier.java similarity index 56% rename from x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListDoubleAggregatorFunctionSupplier.java rename to x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionSupplier.java index 48df091d339b6..b781af87ddc82 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionSupplier.java @@ -11,35 +11,35 @@ import org.elasticsearch.compute.operator.DriverContext; /** - * {@link AggregatorFunctionSupplier} implementation for {@link TopListDoubleAggregator}. + * {@link AggregatorFunctionSupplier} implementation for {@link TopDoubleAggregator}. * This class is generated. Do not edit it. */ -public final class TopListDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { +public final class TopDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; private final int limit; private final boolean ascending; - public TopListDoubleAggregatorFunctionSupplier(List channels, int limit, - boolean ascending) { + public TopDoubleAggregatorFunctionSupplier(List channels, int limit, + boolean ascending) { this.channels = channels; this.limit = limit; this.ascending = ascending; } @Override - public TopListDoubleAggregatorFunction aggregator(DriverContext driverContext) { - return TopListDoubleAggregatorFunction.create(driverContext, channels, limit, ascending); + public TopDoubleAggregatorFunction aggregator(DriverContext driverContext) { + return TopDoubleAggregatorFunction.create(driverContext, channels, limit, ascending); } @Override - public TopListDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { - return TopListDoubleGroupingAggregatorFunction.create(channels, driverContext, limit, ascending); + public TopDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return TopDoubleGroupingAggregatorFunction.create(channels, driverContext, limit, ascending); } @Override public String describe() { - return "top_list of doubles"; + return "top of doubles"; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleGroupingAggregatorFunction.java similarity index 80% rename from x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListDoubleGroupingAggregatorFunction.java rename to x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleGroupingAggregatorFunction.java index 0e3b98bb0f7e5..493e76d23a85f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleGroupingAggregatorFunction.java @@ -19,14 +19,14 @@ import org.elasticsearch.compute.operator.DriverContext; /** - * {@link GroupingAggregatorFunction} implementation for {@link TopListDoubleAggregator}. + * {@link GroupingAggregatorFunction} implementation for {@link TopDoubleAggregator}. * This class is generated. Do not edit it. */ -public final class TopListDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { +public final class TopDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( new IntermediateStateDesc("topList", ElementType.DOUBLE) ); - private final TopListDoubleAggregator.GroupingState state; + private final TopDoubleAggregator.GroupingState state; private final List channels; @@ -36,9 +36,9 @@ public final class TopListDoubleGroupingAggregatorFunction implements GroupingAg private final boolean ascending; - public TopListDoubleGroupingAggregatorFunction(List channels, - TopListDoubleAggregator.GroupingState state, DriverContext driverContext, int limit, - boolean ascending) { + public TopDoubleGroupingAggregatorFunction(List channels, + TopDoubleAggregator.GroupingState state, DriverContext driverContext, int limit, + boolean ascending) { this.channels = channels; this.state = state; this.driverContext = driverContext; @@ -46,9 +46,9 @@ public TopListDoubleGroupingAggregatorFunction(List channels, this.ascending = ascending; } - public static TopListDoubleGroupingAggregatorFunction create(List channels, - DriverContext driverContext, int limit, boolean ascending) { - return new TopListDoubleGroupingAggregatorFunction(channels, TopListDoubleAggregator.initGrouping(driverContext.bigArrays(), limit, ascending), driverContext, limit, ascending); + public static TopDoubleGroupingAggregatorFunction create(List channels, + DriverContext driverContext, int limit, boolean ascending) { + return new TopDoubleGroupingAggregatorFunction(channels, TopDoubleAggregator.initGrouping(driverContext.bigArrays(), limit, ascending), driverContext, limit, ascending); } public static List intermediateStateDesc() { @@ -103,7 +103,7 @@ private void addRawInput(int positionOffset, IntVector groups, DoubleBlock value int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); for (int v = valuesStart; v < valuesEnd; v++) { - TopListDoubleAggregator.combine(state, groupId, values.getDouble(v)); + TopDoubleAggregator.combine(state, groupId, values.getDouble(v)); } } } @@ -111,7 +111,7 @@ private void addRawInput(int positionOffset, IntVector groups, DoubleBlock value private void addRawInput(int positionOffset, IntVector groups, DoubleVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); - TopListDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); + TopDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); } } @@ -130,7 +130,7 @@ private void addRawInput(int positionOffset, IntBlock groups, DoubleBlock values int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); for (int v = valuesStart; v < valuesEnd; v++) { - TopListDoubleAggregator.combine(state, groupId, values.getDouble(v)); + TopDoubleAggregator.combine(state, groupId, values.getDouble(v)); } } } @@ -145,7 +145,7 @@ private void addRawInput(int positionOffset, IntBlock groups, DoubleVector value int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { int groupId = Math.toIntExact(groups.getInt(g)); - TopListDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); + TopDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); } } } @@ -161,7 +161,7 @@ public void addIntermediateInput(int positionOffset, IntVector groups, Page page DoubleBlock topList = (DoubleBlock) topListUncast; for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); - TopListDoubleAggregator.combineIntermediate(state, groupId, topList, groupPosition + positionOffset); + TopDoubleAggregator.combineIntermediate(state, groupId, topList, groupPosition + positionOffset); } } @@ -170,9 +170,9 @@ public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction inpu if (input.getClass() != getClass()) { throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); } - TopListDoubleAggregator.GroupingState inState = ((TopListDoubleGroupingAggregatorFunction) input).state; + TopDoubleAggregator.GroupingState inState = ((TopDoubleGroupingAggregatorFunction) input).state; state.enableGroupIdTracking(new SeenGroupIds.Empty()); - TopListDoubleAggregator.combineStates(state, groupId, inState, position); + TopDoubleAggregator.combineStates(state, groupId, inState, position); } @Override @@ -183,7 +183,7 @@ public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) @Override public void evaluateFinal(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { - blocks[offset] = TopListDoubleAggregator.evaluateFinal(state, selected, driverContext); + blocks[offset] = TopDoubleAggregator.evaluateFinal(state, selected, driverContext); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunction.java similarity index 75% rename from x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListFloatAggregatorFunction.java rename to x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunction.java index 6232d6ff21fc9..674b534667863 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunction.java @@ -17,16 +17,16 @@ import org.elasticsearch.compute.operator.DriverContext; /** - * {@link AggregatorFunction} implementation for {@link TopListFloatAggregator}. + * {@link AggregatorFunction} implementation for {@link TopFloatAggregator}. * This class is generated. Do not edit it. */ -public final class TopListFloatAggregatorFunction implements AggregatorFunction { +public final class TopFloatAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( new IntermediateStateDesc("topList", ElementType.FLOAT) ); private final DriverContext driverContext; - private final TopListFloatAggregator.SingleState state; + private final TopFloatAggregator.SingleState state; private final List channels; @@ -34,8 +34,8 @@ public final class TopListFloatAggregatorFunction implements AggregatorFunction private final boolean ascending; - public TopListFloatAggregatorFunction(DriverContext driverContext, List channels, - TopListFloatAggregator.SingleState state, int limit, boolean ascending) { + public TopFloatAggregatorFunction(DriverContext driverContext, List channels, + TopFloatAggregator.SingleState state, int limit, boolean ascending) { this.driverContext = driverContext; this.channels = channels; this.state = state; @@ -43,9 +43,9 @@ public TopListFloatAggregatorFunction(DriverContext driverContext, List this.ascending = ascending; } - public static TopListFloatAggregatorFunction create(DriverContext driverContext, - List channels, int limit, boolean ascending) { - return new TopListFloatAggregatorFunction(driverContext, channels, TopListFloatAggregator.initSingle(driverContext.bigArrays(), limit, ascending), limit, ascending); + public static TopFloatAggregatorFunction create(DriverContext driverContext, + List channels, int limit, boolean ascending) { + return new TopFloatAggregatorFunction(driverContext, channels, TopFloatAggregator.initSingle(driverContext.bigArrays(), limit, ascending), limit, ascending); } public static List intermediateStateDesc() { @@ -70,7 +70,7 @@ public void addRawInput(Page page) { private void addRawVector(FloatVector vector) { for (int i = 0; i < vector.getPositionCount(); i++) { - TopListFloatAggregator.combine(state, vector.getFloat(i)); + TopFloatAggregator.combine(state, vector.getFloat(i)); } } @@ -82,7 +82,7 @@ private void addRawBlock(FloatBlock block) { int start = block.getFirstValueIndex(p); int end = start + block.getValueCount(p); for (int i = start; i < end; i++) { - TopListFloatAggregator.combine(state, block.getFloat(i)); + TopFloatAggregator.combine(state, block.getFloat(i)); } } } @@ -97,7 +97,7 @@ public void addIntermediateInput(Page page) { } FloatBlock topList = (FloatBlock) topListUncast; assert topList.getPositionCount() == 1; - TopListFloatAggregator.combineIntermediate(state, topList); + TopFloatAggregator.combineIntermediate(state, topList); } @Override @@ -107,7 +107,7 @@ public void evaluateIntermediate(Block[] blocks, int offset, DriverContext drive @Override public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { - blocks[offset] = TopListFloatAggregator.evaluateFinal(state, driverContext); + blocks[offset] = TopFloatAggregator.evaluateFinal(state, driverContext); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionSupplier.java similarity index 56% rename from x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListFloatAggregatorFunctionSupplier.java rename to x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionSupplier.java index ff1c3e8df4b46..f40bbce1d73f6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionSupplier.java @@ -11,35 +11,35 @@ import org.elasticsearch.compute.operator.DriverContext; /** - * {@link AggregatorFunctionSupplier} implementation for {@link TopListFloatAggregator}. + * {@link AggregatorFunctionSupplier} implementation for {@link TopFloatAggregator}. * This class is generated. Do not edit it. */ -public final class TopListFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { +public final class TopFloatAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; private final int limit; private final boolean ascending; - public TopListFloatAggregatorFunctionSupplier(List channels, int limit, - boolean ascending) { + public TopFloatAggregatorFunctionSupplier(List channels, int limit, + boolean ascending) { this.channels = channels; this.limit = limit; this.ascending = ascending; } @Override - public TopListFloatAggregatorFunction aggregator(DriverContext driverContext) { - return TopListFloatAggregatorFunction.create(driverContext, channels, limit, ascending); + public TopFloatAggregatorFunction aggregator(DriverContext driverContext) { + return TopFloatAggregatorFunction.create(driverContext, channels, limit, ascending); } @Override - public TopListFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { - return TopListFloatGroupingAggregatorFunction.create(channels, driverContext, limit, ascending); + public TopFloatGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return TopFloatGroupingAggregatorFunction.create(channels, driverContext, limit, ascending); } @Override public String describe() { - return "top_list of floats"; + return "top of floats"; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatGroupingAggregatorFunction.java similarity index 80% rename from x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListFloatGroupingAggregatorFunction.java rename to x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatGroupingAggregatorFunction.java index 66f8fa7eeb35d..2555c0aeafec5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatGroupingAggregatorFunction.java @@ -19,14 +19,14 @@ import org.elasticsearch.compute.operator.DriverContext; /** - * {@link GroupingAggregatorFunction} implementation for {@link TopListFloatAggregator}. + * {@link GroupingAggregatorFunction} implementation for {@link TopFloatAggregator}. * This class is generated. Do not edit it. */ -public final class TopListFloatGroupingAggregatorFunction implements GroupingAggregatorFunction { +public final class TopFloatGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( new IntermediateStateDesc("topList", ElementType.FLOAT) ); - private final TopListFloatAggregator.GroupingState state; + private final TopFloatAggregator.GroupingState state; private final List channels; @@ -36,9 +36,9 @@ public final class TopListFloatGroupingAggregatorFunction implements GroupingAgg private final boolean ascending; - public TopListFloatGroupingAggregatorFunction(List channels, - TopListFloatAggregator.GroupingState state, DriverContext driverContext, int limit, - boolean ascending) { + public TopFloatGroupingAggregatorFunction(List channels, + TopFloatAggregator.GroupingState state, DriverContext driverContext, int limit, + boolean ascending) { this.channels = channels; this.state = state; this.driverContext = driverContext; @@ -46,9 +46,9 @@ public TopListFloatGroupingAggregatorFunction(List channels, this.ascending = ascending; } - public static TopListFloatGroupingAggregatorFunction create(List channels, - DriverContext driverContext, int limit, boolean ascending) { - return new TopListFloatGroupingAggregatorFunction(channels, TopListFloatAggregator.initGrouping(driverContext.bigArrays(), limit, ascending), driverContext, limit, ascending); + public static TopFloatGroupingAggregatorFunction create(List channels, + DriverContext driverContext, int limit, boolean ascending) { + return new TopFloatGroupingAggregatorFunction(channels, TopFloatAggregator.initGrouping(driverContext.bigArrays(), limit, ascending), driverContext, limit, ascending); } public static List intermediateStateDesc() { @@ -103,7 +103,7 @@ private void addRawInput(int positionOffset, IntVector groups, FloatBlock values int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); for (int v = valuesStart; v < valuesEnd; v++) { - TopListFloatAggregator.combine(state, groupId, values.getFloat(v)); + TopFloatAggregator.combine(state, groupId, values.getFloat(v)); } } } @@ -111,7 +111,7 @@ private void addRawInput(int positionOffset, IntVector groups, FloatBlock values private void addRawInput(int positionOffset, IntVector groups, FloatVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); - TopListFloatAggregator.combine(state, groupId, values.getFloat(groupPosition + positionOffset)); + TopFloatAggregator.combine(state, groupId, values.getFloat(groupPosition + positionOffset)); } } @@ -130,7 +130,7 @@ private void addRawInput(int positionOffset, IntBlock groups, FloatBlock values) int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); for (int v = valuesStart; v < valuesEnd; v++) { - TopListFloatAggregator.combine(state, groupId, values.getFloat(v)); + TopFloatAggregator.combine(state, groupId, values.getFloat(v)); } } } @@ -145,7 +145,7 @@ private void addRawInput(int positionOffset, IntBlock groups, FloatVector values int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { int groupId = Math.toIntExact(groups.getInt(g)); - TopListFloatAggregator.combine(state, groupId, values.getFloat(groupPosition + positionOffset)); + TopFloatAggregator.combine(state, groupId, values.getFloat(groupPosition + positionOffset)); } } } @@ -161,7 +161,7 @@ public void addIntermediateInput(int positionOffset, IntVector groups, Page page FloatBlock topList = (FloatBlock) topListUncast; for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); - TopListFloatAggregator.combineIntermediate(state, groupId, topList, groupPosition + positionOffset); + TopFloatAggregator.combineIntermediate(state, groupId, topList, groupPosition + positionOffset); } } @@ -170,9 +170,9 @@ public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction inpu if (input.getClass() != getClass()) { throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); } - TopListFloatAggregator.GroupingState inState = ((TopListFloatGroupingAggregatorFunction) input).state; + TopFloatAggregator.GroupingState inState = ((TopFloatGroupingAggregatorFunction) input).state; state.enableGroupIdTracking(new SeenGroupIds.Empty()); - TopListFloatAggregator.combineStates(state, groupId, inState, position); + TopFloatAggregator.combineStates(state, groupId, inState, position); } @Override @@ -183,7 +183,7 @@ public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) @Override public void evaluateFinal(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { - blocks[offset] = TopListFloatAggregator.evaluateFinal(state, selected, driverContext); + blocks[offset] = TopFloatAggregator.evaluateFinal(state, selected, driverContext); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunction.java similarity index 76% rename from x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListIntAggregatorFunction.java rename to x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunction.java index e885b285c4a51..94163e4915944 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunction.java @@ -17,16 +17,16 @@ import org.elasticsearch.compute.operator.DriverContext; /** - * {@link AggregatorFunction} implementation for {@link TopListIntAggregator}. + * {@link AggregatorFunction} implementation for {@link TopIntAggregator}. * This class is generated. Do not edit it. */ -public final class TopListIntAggregatorFunction implements AggregatorFunction { +public final class TopIntAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( new IntermediateStateDesc("topList", ElementType.INT) ); private final DriverContext driverContext; - private final TopListIntAggregator.SingleState state; + private final TopIntAggregator.SingleState state; private final List channels; @@ -34,8 +34,8 @@ public final class TopListIntAggregatorFunction implements AggregatorFunction { private final boolean ascending; - public TopListIntAggregatorFunction(DriverContext driverContext, List channels, - TopListIntAggregator.SingleState state, int limit, boolean ascending) { + public TopIntAggregatorFunction(DriverContext driverContext, List channels, + TopIntAggregator.SingleState state, int limit, boolean ascending) { this.driverContext = driverContext; this.channels = channels; this.state = state; @@ -43,9 +43,9 @@ public TopListIntAggregatorFunction(DriverContext driverContext, List c this.ascending = ascending; } - public static TopListIntAggregatorFunction create(DriverContext driverContext, - List channels, int limit, boolean ascending) { - return new TopListIntAggregatorFunction(driverContext, channels, TopListIntAggregator.initSingle(driverContext.bigArrays(), limit, ascending), limit, ascending); + public static TopIntAggregatorFunction create(DriverContext driverContext, + List channels, int limit, boolean ascending) { + return new TopIntAggregatorFunction(driverContext, channels, TopIntAggregator.initSingle(driverContext.bigArrays(), limit, ascending), limit, ascending); } public static List intermediateStateDesc() { @@ -70,7 +70,7 @@ public void addRawInput(Page page) { private void addRawVector(IntVector vector) { for (int i = 0; i < vector.getPositionCount(); i++) { - TopListIntAggregator.combine(state, vector.getInt(i)); + TopIntAggregator.combine(state, vector.getInt(i)); } } @@ -82,7 +82,7 @@ private void addRawBlock(IntBlock block) { int start = block.getFirstValueIndex(p); int end = start + block.getValueCount(p); for (int i = start; i < end; i++) { - TopListIntAggregator.combine(state, block.getInt(i)); + TopIntAggregator.combine(state, block.getInt(i)); } } } @@ -97,7 +97,7 @@ public void addIntermediateInput(Page page) { } IntBlock topList = (IntBlock) topListUncast; assert topList.getPositionCount() == 1; - TopListIntAggregator.combineIntermediate(state, topList); + TopIntAggregator.combineIntermediate(state, topList); } @Override @@ -107,7 +107,7 @@ public void evaluateIntermediate(Block[] blocks, int offset, DriverContext drive @Override public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { - blocks[offset] = TopListIntAggregator.evaluateFinal(state, driverContext); + blocks[offset] = TopIntAggregator.evaluateFinal(state, driverContext); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionSupplier.java similarity index 57% rename from x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListIntAggregatorFunctionSupplier.java rename to x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionSupplier.java index d8bf91ba85541..df6502350c06c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionSupplier.java @@ -11,35 +11,35 @@ import org.elasticsearch.compute.operator.DriverContext; /** - * {@link AggregatorFunctionSupplier} implementation for {@link TopListIntAggregator}. + * {@link AggregatorFunctionSupplier} implementation for {@link TopIntAggregator}. * This class is generated. Do not edit it. */ -public final class TopListIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { +public final class TopIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; private final int limit; private final boolean ascending; - public TopListIntAggregatorFunctionSupplier(List channels, int limit, - boolean ascending) { + public TopIntAggregatorFunctionSupplier(List channels, int limit, + boolean ascending) { this.channels = channels; this.limit = limit; this.ascending = ascending; } @Override - public TopListIntAggregatorFunction aggregator(DriverContext driverContext) { - return TopListIntAggregatorFunction.create(driverContext, channels, limit, ascending); + public TopIntAggregatorFunction aggregator(DriverContext driverContext) { + return TopIntAggregatorFunction.create(driverContext, channels, limit, ascending); } @Override - public TopListIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { - return TopListIntGroupingAggregatorFunction.create(channels, driverContext, limit, ascending); + public TopIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return TopIntGroupingAggregatorFunction.create(channels, driverContext, limit, ascending); } @Override public String describe() { - return "top_list of ints"; + return "top of ints"; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntGroupingAggregatorFunction.java similarity index 81% rename from x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListIntGroupingAggregatorFunction.java rename to x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntGroupingAggregatorFunction.java index 820ebb95e530c..dbbc5ea6df650 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntGroupingAggregatorFunction.java @@ -17,14 +17,14 @@ import org.elasticsearch.compute.operator.DriverContext; /** - * {@link GroupingAggregatorFunction} implementation for {@link TopListIntAggregator}. + * {@link GroupingAggregatorFunction} implementation for {@link TopIntAggregator}. * This class is generated. Do not edit it. */ -public final class TopListIntGroupingAggregatorFunction implements GroupingAggregatorFunction { +public final class TopIntGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( new IntermediateStateDesc("topList", ElementType.INT) ); - private final TopListIntAggregator.GroupingState state; + private final TopIntAggregator.GroupingState state; private final List channels; @@ -34,9 +34,9 @@ public final class TopListIntGroupingAggregatorFunction implements GroupingAggre private final boolean ascending; - public TopListIntGroupingAggregatorFunction(List channels, - TopListIntAggregator.GroupingState state, DriverContext driverContext, int limit, - boolean ascending) { + public TopIntGroupingAggregatorFunction(List channels, + TopIntAggregator.GroupingState state, DriverContext driverContext, int limit, + boolean ascending) { this.channels = channels; this.state = state; this.driverContext = driverContext; @@ -44,9 +44,9 @@ public TopListIntGroupingAggregatorFunction(List channels, this.ascending = ascending; } - public static TopListIntGroupingAggregatorFunction create(List channels, - DriverContext driverContext, int limit, boolean ascending) { - return new TopListIntGroupingAggregatorFunction(channels, TopListIntAggregator.initGrouping(driverContext.bigArrays(), limit, ascending), driverContext, limit, ascending); + public static TopIntGroupingAggregatorFunction create(List channels, + DriverContext driverContext, int limit, boolean ascending) { + return new TopIntGroupingAggregatorFunction(channels, TopIntAggregator.initGrouping(driverContext.bigArrays(), limit, ascending), driverContext, limit, ascending); } public static List intermediateStateDesc() { @@ -101,7 +101,7 @@ private void addRawInput(int positionOffset, IntVector groups, IntBlock values) int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); for (int v = valuesStart; v < valuesEnd; v++) { - TopListIntAggregator.combine(state, groupId, values.getInt(v)); + TopIntAggregator.combine(state, groupId, values.getInt(v)); } } } @@ -109,7 +109,7 @@ private void addRawInput(int positionOffset, IntVector groups, IntBlock values) private void addRawInput(int positionOffset, IntVector groups, IntVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); - TopListIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); + TopIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); } } @@ -128,7 +128,7 @@ private void addRawInput(int positionOffset, IntBlock groups, IntBlock values) { int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); for (int v = valuesStart; v < valuesEnd; v++) { - TopListIntAggregator.combine(state, groupId, values.getInt(v)); + TopIntAggregator.combine(state, groupId, values.getInt(v)); } } } @@ -143,7 +143,7 @@ private void addRawInput(int positionOffset, IntBlock groups, IntVector values) int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { int groupId = Math.toIntExact(groups.getInt(g)); - TopListIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); + TopIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); } } } @@ -159,7 +159,7 @@ public void addIntermediateInput(int positionOffset, IntVector groups, Page page IntBlock topList = (IntBlock) topListUncast; for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); - TopListIntAggregator.combineIntermediate(state, groupId, topList, groupPosition + positionOffset); + TopIntAggregator.combineIntermediate(state, groupId, topList, groupPosition + positionOffset); } } @@ -168,9 +168,9 @@ public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction inpu if (input.getClass() != getClass()) { throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); } - TopListIntAggregator.GroupingState inState = ((TopListIntGroupingAggregatorFunction) input).state; + TopIntAggregator.GroupingState inState = ((TopIntGroupingAggregatorFunction) input).state; state.enableGroupIdTracking(new SeenGroupIds.Empty()); - TopListIntAggregator.combineStates(state, groupId, inState, position); + TopIntAggregator.combineStates(state, groupId, inState, position); } @Override @@ -181,7 +181,7 @@ public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) @Override public void evaluateFinal(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { - blocks[offset] = TopListIntAggregator.evaluateFinal(state, selected, driverContext); + blocks[offset] = TopIntAggregator.evaluateFinal(state, selected, driverContext); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunction.java similarity index 75% rename from x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListLongAggregatorFunction.java rename to x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunction.java index 1a09a1a860e2f..1887e958344ee 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunction.java @@ -17,16 +17,16 @@ import org.elasticsearch.compute.operator.DriverContext; /** - * {@link AggregatorFunction} implementation for {@link TopListLongAggregator}. + * {@link AggregatorFunction} implementation for {@link TopLongAggregator}. * This class is generated. Do not edit it. */ -public final class TopListLongAggregatorFunction implements AggregatorFunction { +public final class TopLongAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( new IntermediateStateDesc("topList", ElementType.LONG) ); private final DriverContext driverContext; - private final TopListLongAggregator.SingleState state; + private final TopLongAggregator.SingleState state; private final List channels; @@ -34,8 +34,8 @@ public final class TopListLongAggregatorFunction implements AggregatorFunction { private final boolean ascending; - public TopListLongAggregatorFunction(DriverContext driverContext, List channels, - TopListLongAggregator.SingleState state, int limit, boolean ascending) { + public TopLongAggregatorFunction(DriverContext driverContext, List channels, + TopLongAggregator.SingleState state, int limit, boolean ascending) { this.driverContext = driverContext; this.channels = channels; this.state = state; @@ -43,9 +43,9 @@ public TopListLongAggregatorFunction(DriverContext driverContext, List this.ascending = ascending; } - public static TopListLongAggregatorFunction create(DriverContext driverContext, - List channels, int limit, boolean ascending) { - return new TopListLongAggregatorFunction(driverContext, channels, TopListLongAggregator.initSingle(driverContext.bigArrays(), limit, ascending), limit, ascending); + public static TopLongAggregatorFunction create(DriverContext driverContext, + List channels, int limit, boolean ascending) { + return new TopLongAggregatorFunction(driverContext, channels, TopLongAggregator.initSingle(driverContext.bigArrays(), limit, ascending), limit, ascending); } public static List intermediateStateDesc() { @@ -70,7 +70,7 @@ public void addRawInput(Page page) { private void addRawVector(LongVector vector) { for (int i = 0; i < vector.getPositionCount(); i++) { - TopListLongAggregator.combine(state, vector.getLong(i)); + TopLongAggregator.combine(state, vector.getLong(i)); } } @@ -82,7 +82,7 @@ private void addRawBlock(LongBlock block) { int start = block.getFirstValueIndex(p); int end = start + block.getValueCount(p); for (int i = start; i < end; i++) { - TopListLongAggregator.combine(state, block.getLong(i)); + TopLongAggregator.combine(state, block.getLong(i)); } } } @@ -97,7 +97,7 @@ public void addIntermediateInput(Page page) { } LongBlock topList = (LongBlock) topListUncast; assert topList.getPositionCount() == 1; - TopListLongAggregator.combineIntermediate(state, topList); + TopLongAggregator.combineIntermediate(state, topList); } @Override @@ -107,7 +107,7 @@ public void evaluateIntermediate(Block[] blocks, int offset, DriverContext drive @Override public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { - blocks[offset] = TopListLongAggregator.evaluateFinal(state, driverContext); + blocks[offset] = TopLongAggregator.evaluateFinal(state, driverContext); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionSupplier.java similarity index 57% rename from x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListLongAggregatorFunctionSupplier.java rename to x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionSupplier.java index 617895fbff1a3..3a41143be46ad 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionSupplier.java @@ -11,35 +11,35 @@ import org.elasticsearch.compute.operator.DriverContext; /** - * {@link AggregatorFunctionSupplier} implementation for {@link TopListLongAggregator}. + * {@link AggregatorFunctionSupplier} implementation for {@link TopLongAggregator}. * This class is generated. Do not edit it. */ -public final class TopListLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { +public final class TopLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { private final List channels; private final int limit; private final boolean ascending; - public TopListLongAggregatorFunctionSupplier(List channels, int limit, - boolean ascending) { + public TopLongAggregatorFunctionSupplier(List channels, int limit, + boolean ascending) { this.channels = channels; this.limit = limit; this.ascending = ascending; } @Override - public TopListLongAggregatorFunction aggregator(DriverContext driverContext) { - return TopListLongAggregatorFunction.create(driverContext, channels, limit, ascending); + public TopLongAggregatorFunction aggregator(DriverContext driverContext) { + return TopLongAggregatorFunction.create(driverContext, channels, limit, ascending); } @Override - public TopListLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { - return TopListLongGroupingAggregatorFunction.create(channels, driverContext, limit, ascending); + public TopLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return TopLongGroupingAggregatorFunction.create(channels, driverContext, limit, ascending); } @Override public String describe() { - return "top_list of longs"; + return "top of longs"; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongGroupingAggregatorFunction.java similarity index 81% rename from x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListLongGroupingAggregatorFunction.java rename to x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongGroupingAggregatorFunction.java index cadb48b7d29d4..64564d0c49756 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopListLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongGroupingAggregatorFunction.java @@ -19,14 +19,14 @@ import org.elasticsearch.compute.operator.DriverContext; /** - * {@link GroupingAggregatorFunction} implementation for {@link TopListLongAggregator}. + * {@link GroupingAggregatorFunction} implementation for {@link TopLongAggregator}. * This class is generated. Do not edit it. */ -public final class TopListLongGroupingAggregatorFunction implements GroupingAggregatorFunction { +public final class TopLongGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( new IntermediateStateDesc("topList", ElementType.LONG) ); - private final TopListLongAggregator.GroupingState state; + private final TopLongAggregator.GroupingState state; private final List channels; @@ -36,9 +36,9 @@ public final class TopListLongGroupingAggregatorFunction implements GroupingAggr private final boolean ascending; - public TopListLongGroupingAggregatorFunction(List channels, - TopListLongAggregator.GroupingState state, DriverContext driverContext, int limit, - boolean ascending) { + public TopLongGroupingAggregatorFunction(List channels, + TopLongAggregator.GroupingState state, DriverContext driverContext, int limit, + boolean ascending) { this.channels = channels; this.state = state; this.driverContext = driverContext; @@ -46,9 +46,9 @@ public TopListLongGroupingAggregatorFunction(List channels, this.ascending = ascending; } - public static TopListLongGroupingAggregatorFunction create(List channels, - DriverContext driverContext, int limit, boolean ascending) { - return new TopListLongGroupingAggregatorFunction(channels, TopListLongAggregator.initGrouping(driverContext.bigArrays(), limit, ascending), driverContext, limit, ascending); + public static TopLongGroupingAggregatorFunction create(List channels, + DriverContext driverContext, int limit, boolean ascending) { + return new TopLongGroupingAggregatorFunction(channels, TopLongAggregator.initGrouping(driverContext.bigArrays(), limit, ascending), driverContext, limit, ascending); } public static List intermediateStateDesc() { @@ -103,7 +103,7 @@ private void addRawInput(int positionOffset, IntVector groups, LongBlock values) int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); for (int v = valuesStart; v < valuesEnd; v++) { - TopListLongAggregator.combine(state, groupId, values.getLong(v)); + TopLongAggregator.combine(state, groupId, values.getLong(v)); } } } @@ -111,7 +111,7 @@ private void addRawInput(int positionOffset, IntVector groups, LongBlock values) private void addRawInput(int positionOffset, IntVector groups, LongVector values) { for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); - TopListLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + TopLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); } } @@ -130,7 +130,7 @@ private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); for (int v = valuesStart; v < valuesEnd; v++) { - TopListLongAggregator.combine(state, groupId, values.getLong(v)); + TopLongAggregator.combine(state, groupId, values.getLong(v)); } } } @@ -145,7 +145,7 @@ private void addRawInput(int positionOffset, IntBlock groups, LongVector values) int groupEnd = groupStart + groups.getValueCount(groupPosition); for (int g = groupStart; g < groupEnd; g++) { int groupId = Math.toIntExact(groups.getInt(g)); - TopListLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + TopLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); } } } @@ -161,7 +161,7 @@ public void addIntermediateInput(int positionOffset, IntVector groups, Page page LongBlock topList = (LongBlock) topListUncast; for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); - TopListLongAggregator.combineIntermediate(state, groupId, topList, groupPosition + positionOffset); + TopLongAggregator.combineIntermediate(state, groupId, topList, groupPosition + positionOffset); } } @@ -170,9 +170,9 @@ public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction inpu if (input.getClass() != getClass()) { throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); } - TopListLongAggregator.GroupingState inState = ((TopListLongGroupingAggregatorFunction) input).state; + TopLongAggregator.GroupingState inState = ((TopLongGroupingAggregatorFunction) input).state; state.enableGroupIdTracking(new SeenGroupIds.Empty()); - TopListLongAggregator.combineStates(state, groupId, inState, position); + TopLongAggregator.combineStates(state, groupId, inState, position); } @Override @@ -183,7 +183,7 @@ public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) @Override public void evaluateFinal(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { - blocks[offset] = TopListLongAggregator.evaluateFinal(state, selected, driverContext); + blocks[offset] = TopLongAggregator.evaluateFinal(state, selected, driverContext); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-TopListAggregator.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-TopAggregator.java.st similarity index 97% rename from x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-TopListAggregator.java.st rename to x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-TopAggregator.java.st index 810311154503e..41d0224f37214 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-TopListAggregator.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-TopAggregator.java.st @@ -29,9 +29,9 @@ import org.elasticsearch.search.sort.SortOrder; /** * Aggregates the top N field values for $type$. */ -@Aggregator({ @IntermediateState(name = "topList", type = "$TYPE$_BLOCK") }) +@Aggregator({ @IntermediateState(name = "top", type = "$TYPE$_BLOCK") }) @GroupingAggregator -class TopList$Type$Aggregator { +class Top$Type$Aggregator { public static SingleState initSingle(BigArrays bigArrays, int limit, boolean ascending) { return new SingleState(bigArrays, limit, ascending); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopListDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionTests.java similarity index 87% rename from x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopListDoubleAggregatorFunctionTests.java rename to x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionTests.java index f708038776032..817df4ba47130 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopListDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionTests.java @@ -18,7 +18,7 @@ import static org.hamcrest.Matchers.contains; -public class TopListDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase { +public class TopDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase { private static final int LIMIT = 100; @Override @@ -28,12 +28,12 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { @Override protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new TopListDoubleAggregatorFunctionSupplier(inputChannels, LIMIT, true); + return new TopDoubleAggregatorFunctionSupplier(inputChannels, LIMIT, true); } @Override protected String expectedDescriptionOfAggregator() { - return "top_list of doubles"; + return "top of doubles"; } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopListFloatAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionTests.java similarity index 87% rename from x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopListFloatAggregatorFunctionTests.java rename to x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionTests.java index 98a016783955e..c565a13fb73d4 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopListFloatAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionTests.java @@ -18,7 +18,7 @@ import static org.hamcrest.Matchers.contains; -public class TopListFloatAggregatorFunctionTests extends AggregatorFunctionTestCase { +public class TopFloatAggregatorFunctionTests extends AggregatorFunctionTestCase { private static final int LIMIT = 100; @Override @@ -28,12 +28,12 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { @Override protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new TopListFloatAggregatorFunctionSupplier(inputChannels, LIMIT, true); + return new TopFloatAggregatorFunctionSupplier(inputChannels, LIMIT, true); } @Override protected String expectedDescriptionOfAggregator() { - return "top_list of floats"; + return "top of floats"; } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopListIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionTests.java similarity index 87% rename from x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopListIntAggregatorFunctionTests.java rename to x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionTests.java index 443604efd5c15..a0ac1a685413e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopListIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionTests.java @@ -18,7 +18,7 @@ import static org.hamcrest.Matchers.contains; -public class TopListIntAggregatorFunctionTests extends AggregatorFunctionTestCase { +public class TopIntAggregatorFunctionTests extends AggregatorFunctionTestCase { private static final int LIMIT = 100; @Override @@ -28,12 +28,12 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { @Override protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new TopListIntAggregatorFunctionSupplier(inputChannels, LIMIT, true); + return new TopIntAggregatorFunctionSupplier(inputChannels, LIMIT, true); } @Override protected String expectedDescriptionOfAggregator() { - return "top_list of ints"; + return "top of ints"; } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopListLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionTests.java similarity index 87% rename from x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopListLongAggregatorFunctionTests.java rename to x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionTests.java index 4a6f101e573b8..7ec1fb9c53053 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopListLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionTests.java @@ -18,7 +18,7 @@ import static org.hamcrest.Matchers.contains; -public class TopListLongAggregatorFunctionTests extends AggregatorFunctionTestCase { +public class TopLongAggregatorFunctionTests extends AggregatorFunctionTestCase { private static final int LIMIT = 100; @Override @@ -28,12 +28,12 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { @Override protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { - return new TopListLongAggregatorFunctionSupplier(inputChannels, LIMIT, true); + return new TopLongAggregatorFunctionSupplier(inputChannels, LIMIT, true); } @Override protected String expectedDescriptionOfAggregator() { - return "top_list of longs"; + return "top of longs"; } @Override diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 5cae87850ae99..cf7b1b99843a2 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -110,7 +110,7 @@ double tau() "keyword|text to_upper(str:keyword|text)" "version to_ver(field:keyword|text|version)" "version to_version(field:keyword|text|version)" -"double|integer|long|date top_list(field:double|integer|long|date, limit:integer, order:keyword)" +"double|integer|long|date top(field:double|integer|long|date, limit:integer, order:keyword)" "keyword|text trim(string:keyword|text)" "boolean|date|double|integer|ip|keyword|long|text|version values(field:boolean|date|double|integer|ip|keyword|long|text|version)" ; @@ -229,7 +229,7 @@ to_unsigned_lo|field |"boolean|date|keyword|text|d to_upper |str |"keyword|text" |String expression. If `null`, the function returns `null`. to_ver |field |"keyword|text|version" |Input value. The input can be a single- or multi-valued column or an expression. to_version |field |"keyword|text|version" |Input value. The input can be a single- or multi-valued column or an expression. -top_list |[field, limit, order] |["double|integer|long|date", integer, keyword] |[The field to collect the top values for.,The maximum number of values to collect.,The order to calculate the top values. Either `asc` or `desc`.] +top |[field, limit, order] |["double|integer|long|date", integer, keyword] |[The field to collect the top values for.,The maximum number of values to collect.,The order to calculate the top values. Either `asc` or `desc`.] trim |string |"keyword|text" |String expression. If `null`, the function returns `null`. values |field |"boolean|date|double|integer|ip|keyword|long|text|version" |[""] ; @@ -349,7 +349,7 @@ to_unsigned_lo|Converts an input value to an unsigned long value. If the input p to_upper |Returns a new string representing the input string converted to upper case. to_ver |Converts an input string to a version value. to_version |Converts an input string to a version value. -top_list |Collects the top values for a field. Includes repeated values. +top |Collects the top values for a field. Includes repeated values. trim |Removes leading and trailing whitespaces from a string. values |Collect values for a field. ; @@ -470,7 +470,7 @@ to_unsigned_lo|unsigned_long to_upper |"keyword|text" |false |false |false to_ver |version |false |false |false to_version |version |false |false |false -top_list |"double|integer|long|date" |[false, false, false] |false |true +top |"double|integer|long|date" |[false, false, false] |false |true trim |"keyword|text" |false |false |false values |"boolean|date|double|integer|ip|keyword|long|text|version" |false |false |true ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_top.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_top.csv-spec new file mode 100644 index 0000000000000..d03bdb3c3dfd7 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_top.csv-spec @@ -0,0 +1,156 @@ +top +required_capability: agg_top +// tag::top[] +FROM employees +| STATS top_salaries = TOP(salary, 3, "desc"), top_salary = MAX(salary) +// end::top[] +; + +// tag::top-result[] +top_salaries:integer | top_salary:integer +[74999, 74970, 74572] | 74999 +// end::top-result[] +; + +topAllTypesAsc +required_capability: agg_top +FROM employees +| STATS + date = TOP(hire_date, 2, "asc"), + double = TOP(salary_change, 2, "asc"), + integer = TOP(salary, 2, "asc"), + long = TOP(salary_change.long, 2, "asc") +; + +date:date | double:double | integer:integer | long:long +[1985-02-18T00:00:00.000Z,1985-02-24T00:00:00.000Z] | [-9.81,-9.28] | [25324,25945] | [-9,-9] +; + +topAllTypesDesc +required_capability: agg_top +FROM employees +| STATS + date = TOP(hire_date, 2, "desc"), + double = TOP(salary_change, 2, "desc"), + integer = TOP(salary, 2, "desc"), + long = TOP(salary_change.long, 2, "desc") +; + +date:date | double:double | integer:integer | long:long +[1999-04-30T00:00:00.000Z,1997-05-19T00:00:00.000Z] | [14.74,14.68] | [74999,74970] | [14,14] +; + +topAllTypesRow +required_capability: agg_top +ROW + constant_date=TO_DATETIME("1985-02-18T00:00:00.000Z"), + constant_double=-9.81, + constant_integer=25324, + constant_long=TO_LONG(-9) +| STATS + date = TOP(constant_date, 2, "asc"), + double = TOP(constant_double, 2, "asc"), + integer = TOP(constant_integer, 2, "asc"), + long = TOP(constant_long, 2, "asc") +| keep date, double, integer, long +; + +date:date | double:double | integer:integer | long:long +1985-02-18T00:00:00.000Z | -9.81 | 25324 | -9 +; + +topSomeBuckets +required_capability: agg_top +FROM employees +| STATS top_salary = TOP(salary, 2, "desc") by still_hired +| sort still_hired asc +; + +top_salary:integer | still_hired:boolean +[74999,74970] | false +[74572,73578] | true +; + +topManyBuckets +required_capability: agg_top +FROM employees +| STATS top_salary = TOP(salary, 2, "desc") by x=emp_no, y=emp_no+1 +| sort x asc +| limit 3 +; + +top_salary:integer | x:integer | y:integer +57305 | 10001 | 10002 +56371 | 10002 | 10003 +61805 | 10003 | 10004 +; + +topMultipleStats +required_capability: agg_top +FROM employees +| STATS top_salary = TOP(salary, 1, "desc") by emp_no +| STATS top_salary = TOP(top_salary, 3, "asc") +; + +top_salary:integer +[25324,25945,25976] +; + +topAllTypesMin +required_capability: agg_top +FROM employees +| STATS + date = TOP(hire_date, 1, "asc"), + double = TOP(salary_change, 1, "asc"), + integer = TOP(salary, 1, "asc"), + long = TOP(salary_change.long, 1, "asc") +; + +date:date | double:double | integer:integer | long:long +1985-02-18T00:00:00.000Z | -9.81 | 25324 | -9 +; + +topAllTypesMax +required_capability: agg_top +FROM employees +| STATS + date = TOP(hire_date, 1, "desc"), + double = TOP(salary_change, 1, "desc"), + integer = TOP(salary, 1, "desc"), + long = TOP(salary_change.long, 1, "desc") +; + +date:date | double:double | integer:integer | long:long +1999-04-30T00:00:00.000Z | 14.74 | 74999 | 14 +; + +topAscDesc +required_capability: agg_top +FROM employees +| STATS top_asc = TOP(salary, 3, "asc"), top_desc = TOP(salary, 3, "desc") +; + +top_asc:integer | top_desc:integer +[25324, 25945, 25976] | [74999, 74970, 74572] +; + +topEmpty +required_capability: agg_top +FROM employees +| WHERE salary < 0 +| STATS top = TOP(salary, 3, "asc") +; + +top:integer +null +; + +topDuplicates +required_capability: agg_top +FROM employees +| STATS integer = TOP(languages, 2, "desc") +; + +integer:integer +[5, 5] +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_top_list.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_top_list.csv-spec deleted file mode 100644 index c24f6a7e70954..0000000000000 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_top_list.csv-spec +++ /dev/null @@ -1,156 +0,0 @@ -topList -required_capability: agg_top_list -// tag::top-list[] -FROM employees -| STATS top_salaries = TOP_LIST(salary, 3, "desc"), top_salary = MAX(salary) -// end::top-list[] -; - -// tag::top-list-result[] -top_salaries:integer | top_salary:integer -[74999, 74970, 74572] | 74999 -// end::top-list-result[] -; - -topListAllTypesAsc -required_capability: agg_top_list -FROM employees -| STATS - date = TOP_LIST(hire_date, 2, "asc"), - double = TOP_LIST(salary_change, 2, "asc"), - integer = TOP_LIST(salary, 2, "asc"), - long = TOP_LIST(salary_change.long, 2, "asc") -; - -date:date | double:double | integer:integer | long:long -[1985-02-18T00:00:00.000Z,1985-02-24T00:00:00.000Z] | [-9.81,-9.28] | [25324,25945] | [-9,-9] -; - -topListAllTypesDesc -required_capability: agg_top_list -FROM employees -| STATS - date = TOP_LIST(hire_date, 2, "desc"), - double = TOP_LIST(salary_change, 2, "desc"), - integer = TOP_LIST(salary, 2, "desc"), - long = TOP_LIST(salary_change.long, 2, "desc") -; - -date:date | double:double | integer:integer | long:long -[1999-04-30T00:00:00.000Z,1997-05-19T00:00:00.000Z] | [14.74,14.68] | [74999,74970] | [14,14] -; - -topListAllTypesRow -required_capability: agg_top_list -ROW - constant_date=TO_DATETIME("1985-02-18T00:00:00.000Z"), - constant_double=-9.81, - constant_integer=25324, - constant_long=TO_LONG(-9) -| STATS - date = TOP_LIST(constant_date, 2, "asc"), - double = TOP_LIST(constant_double, 2, "asc"), - integer = TOP_LIST(constant_integer, 2, "asc"), - long = TOP_LIST(constant_long, 2, "asc") -| keep date, double, integer, long -; - -date:date | double:double | integer:integer | long:long -1985-02-18T00:00:00.000Z | -9.81 | 25324 | -9 -; - -topListSomeBuckets -required_capability: agg_top_list -FROM employees -| STATS top_salary = TOP_LIST(salary, 2, "desc") by still_hired -| sort still_hired asc -; - -top_salary:integer | still_hired:boolean -[74999,74970] | false -[74572,73578] | true -; - -topListManyBuckets -required_capability: agg_top_list -FROM employees -| STATS top_salary = TOP_LIST(salary, 2, "desc") by x=emp_no, y=emp_no+1 -| sort x asc -| limit 3 -; - -top_salary:integer | x:integer | y:integer -57305 | 10001 | 10002 -56371 | 10002 | 10003 -61805 | 10003 | 10004 -; - -topListMultipleStats -required_capability: agg_top_list -FROM employees -| STATS top_salary = TOP_LIST(salary, 1, "desc") by emp_no -| STATS top_salary = TOP_LIST(top_salary, 3, "asc") -; - -top_salary:integer -[25324,25945,25976] -; - -topListAllTypesMin -required_capability: agg_top_list -FROM employees -| STATS - date = TOP_LIST(hire_date, 1, "asc"), - double = TOP_LIST(salary_change, 1, "asc"), - integer = TOP_LIST(salary, 1, "asc"), - long = TOP_LIST(salary_change.long, 1, "asc") -; - -date:date | double:double | integer:integer | long:long -1985-02-18T00:00:00.000Z | -9.81 | 25324 | -9 -; - -topListAllTypesMax -required_capability: agg_top_list -FROM employees -| STATS - date = TOP_LIST(hire_date, 1, "desc"), - double = TOP_LIST(salary_change, 1, "desc"), - integer = TOP_LIST(salary, 1, "desc"), - long = TOP_LIST(salary_change.long, 1, "desc") -; - -date:date | double:double | integer:integer | long:long -1999-04-30T00:00:00.000Z | 14.74 | 74999 | 14 -; - -topListAscDesc -required_capability: agg_top_list -FROM employees -| STATS top_asc = TOP_LIST(salary, 3, "asc"), top_desc = TOP_LIST(salary, 3, "desc") -; - -top_asc:integer | top_desc:integer -[25324, 25945, 25976] | [74999, 74970, 74572] -; - -topListEmpty -required_capability: agg_top_list -FROM employees -| WHERE salary < 0 -| STATS top = TOP_LIST(salary, 3, "asc") -; - -top:integer -null -; - -topListDuplicates -required_capability: agg_top_list -FROM employees -| STATS integer = TOP_LIST(languages, 2, "desc") -; - -integer:integer -[5, 5] -; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 43b1ae8d66906..71fccf4af0714 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -45,9 +45,9 @@ public enum Cap { FN_SUBSTRING_EMPTY_NULL, /** - * Support for aggregation function {@code TOP_LIST}. + * Support for aggregation function {@code TOP}. */ - AGG_TOP_LIST, + AGG_TOP, /** * Optimization for ST_CENTROID changed some results in cartesian data. #108713 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 434623814dcb4..643fd2622a6a8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -24,7 +24,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Rate; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; -import org.elasticsearch.xpack.esql.expression.function.aggregate.TopList; +import org.elasticsearch.xpack.esql.expression.function.aggregate.Top; import org.elasticsearch.xpack.esql.expression.function.aggregate.Values; import org.elasticsearch.xpack.esql.expression.function.grouping.Bucket; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; @@ -198,7 +198,7 @@ private FunctionDefinition[][] functions() { def(Min.class, Min::new, "min"), def(Percentile.class, Percentile::new, "percentile"), def(Sum.class, Sum::new, "sum"), - def(TopList.class, TopList::new, "top_list"), + def(Top.class, Top::new, "top"), def(Values.class, Values::new, "values") }, // math new FunctionDefinition[] { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java index 0e355e064a788..da44b15bdb69d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java @@ -41,7 +41,7 @@ public static List getNamedWriteables() { Percentile.ENTRY, SpatialCentroid.ENTRY, Sum.ENTRY, - TopList.ENTRY, + Top.ENTRY, Values.ENTRY, Rate.ENTRY, // internal functions diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopList.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java similarity index 84% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopList.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java index 16cfdad89612b..da7a14e5b28eb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopList.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java @@ -12,9 +12,9 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; -import org.elasticsearch.compute.aggregation.TopListDoubleAggregatorFunctionSupplier; -import org.elasticsearch.compute.aggregation.TopListIntAggregatorFunctionSupplier; -import org.elasticsearch.compute.aggregation.TopListLongAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.TopDoubleAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.TopIntAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.TopLongAggregatorFunctionSupplier; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -40,8 +40,8 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; -public class TopList extends AggregateFunction implements ToAggregator, SurrogateExpression { - public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "TopList", TopList::new); +public class Top extends AggregateFunction implements ToAggregator, SurrogateExpression { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Top", Top::new); private static final String ORDER_ASC = "ASC"; private static final String ORDER_DESC = "DESC"; @@ -50,9 +50,9 @@ public class TopList extends AggregateFunction implements ToAggregator, Surrogat returnType = { "double", "integer", "long", "date" }, description = "Collects the top values for a field. Includes repeated values.", isAggregation = true, - examples = @Example(file = "stats_top_list", tag = "top-list") + examples = @Example(file = "stats_top", tag = "top") ) - public TopList( + public Top( Source source, @Param( name = "field", @@ -69,7 +69,7 @@ public TopList( super(source, field, Arrays.asList(limit, order)); } - private TopList(StreamInput in) throws IOException { + private Top(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), ((PlanStreamInput) in).readExpression(), @@ -156,26 +156,26 @@ public DataType dataType() { } @Override - protected NodeInfo info() { - return NodeInfo.create(this, TopList::new, children().get(0), children().get(1), children().get(2)); + protected NodeInfo info() { + return NodeInfo.create(this, Top::new, children().get(0), children().get(1), children().get(2)); } @Override - public TopList replaceChildren(List newChildren) { - return new TopList(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2)); + public Top replaceChildren(List newChildren) { + return new Top(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2)); } @Override public AggregatorFunctionSupplier supplier(List inputChannels) { DataType type = field().dataType(); if (type == DataType.LONG || type == DataType.DATETIME) { - return new TopListLongAggregatorFunctionSupplier(inputChannels, limitValue(), orderValue()); + return new TopLongAggregatorFunctionSupplier(inputChannels, limitValue(), orderValue()); } if (type == DataType.INTEGER) { - return new TopListIntAggregatorFunctionSupplier(inputChannels, limitValue(), orderValue()); + return new TopIntAggregatorFunctionSupplier(inputChannels, limitValue(), orderValue()); } if (type == DataType.DOUBLE) { - return new TopListDoubleAggregatorFunctionSupplier(inputChannels, limitValue(), orderValue()); + return new TopDoubleAggregatorFunctionSupplier(inputChannels, limitValue(), orderValue()); } throw EsqlIllegalArgumentException.illegalDataType(type); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/package-info.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/package-info.java index a99c7a8b7ac8d..f5b40df6fa619 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/package-info.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/package-info.java @@ -94,7 +94,7 @@ * to keep all the logic in one place. *

    * You can find examples of other aggregations using this method, - * like {@link org.elasticsearch.xpack.esql.expression.function.aggregate.TopList#writeTo(PlanStreamOutput)} + * like {@link org.elasticsearch.xpack.esql.expression.function.aggregate.Top#writeTo(PlanStreamOutput)} *

    * *
  • @@ -155,7 +155,7 @@ *
  • * Create a new StringTemplate file. * Use another as a reference, like - * {@code x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-TopListAggregator.java.st}. + * {@code x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-TopAggregator.java.st}. *
  • *
  • * Add the template scripts to {@code x-pack/plugin/esql/compute/build.gradle}. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java index 55a691a165d56..91433e42033c5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java @@ -36,7 +36,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.esql.expression.function.aggregate.ToPartial; -import org.elasticsearch.xpack.esql.expression.function.aggregate.TopList; +import org.elasticsearch.xpack.esql.expression.function.aggregate.Top; import org.elasticsearch.xpack.esql.expression.function.aggregate.Values; import java.lang.invoke.MethodHandle; @@ -67,7 +67,7 @@ final class AggregateMapper { SpatialCentroid.class, Sum.class, Values.class, - TopList.class, + Top.class, Rate.class, // internal function @@ -154,7 +154,7 @@ private static Stream, Tuple>> typeAndNames(Class } else if (Values.class.isAssignableFrom(clazz)) { // TODO can't we figure this out from the function itself? types = List.of("Int", "Long", "Double", "Boolean", "BytesRef"); - } else if (TopList.class.isAssignableFrom(clazz)) { + } else if (Top.class.isAssignableFrom(clazz)) { types = List.of("Int", "Long", "Double"); } else if (Rate.class.isAssignableFrom(clazz)) { types = List.of("Int", "Long", "Double"); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopListSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopSerializationTests.java similarity index 82% rename from x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopListSerializationTests.java rename to x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopSerializationTests.java index 605d240512e65..2906a1e74e72a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopListSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopSerializationTests.java @@ -15,18 +15,18 @@ import java.io.IOException; import java.util.List; -public class TopListSerializationTests extends AbstractExpressionSerializationTests { +public class TopSerializationTests extends AbstractExpressionSerializationTests { @Override - protected TopList createTestInstance() { + protected Top createTestInstance() { Source source = randomSource(); Expression field = randomChild(); Expression limit = randomChild(); Expression order = randomChild(); - return new TopList(source, field, limit, order); + return new Top(source, field, limit, order); } @Override - protected TopList mutateInstance(TopList instance) throws IOException { + protected Top mutateInstance(Top instance) throws IOException { Source source = instance.source(); Expression field = instance.field(); Expression limit = instance.limitField(); @@ -36,7 +36,7 @@ protected TopList mutateInstance(TopList instance) throws IOException { case 1 -> limit = randomValueOtherThan(limit, AbstractExpressionSerializationTests::randomChild); case 2 -> order = randomValueOtherThan(order, AbstractExpressionSerializationTests::randomChild); } - return new TopList(source, field, limit, order); + return new Top(source, field, limit, order); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopListTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopTests.java similarity index 88% rename from x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopListTests.java rename to x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopTests.java index 33770ff2467ef..7b77decb560a9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopListTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopTests.java @@ -22,8 +22,8 @@ import static org.hamcrest.Matchers.equalTo; -public class TopListTests extends AbstractAggregationTestCase { - public TopListTests(@Name("TestCase") Supplier testCaseSupplier) { +public class TopTests extends AbstractAggregationTestCase { + public TopTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @@ -39,7 +39,7 @@ public static Iterable parameters() { new TestCaseSupplier.TypedData(limit, DataType.INTEGER, "limit").forceLiteral(), new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() ), - "TopList[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", + "Top[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", DataType.INTEGER, equalTo(List.of(200, 8, 5, 0).subList(0, limit)) ); @@ -52,7 +52,7 @@ public static Iterable parameters() { new TestCaseSupplier.TypedData(limit, DataType.INTEGER, "limit").forceLiteral(), new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() ), - "TopList[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", + "Top[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", DataType.LONG, equalTo(List.of(200L, 8L, 5L, 0L).subList(0, limit)) ); @@ -65,7 +65,7 @@ public static Iterable parameters() { new TestCaseSupplier.TypedData(limit, DataType.INTEGER, "limit").forceLiteral(), new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() ), - "TopList[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", + "Top[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", DataType.DOUBLE, equalTo(List.of(200., 8., 5., 0.).subList(0, limit)) ); @@ -78,7 +78,7 @@ public static Iterable parameters() { new TestCaseSupplier.TypedData(limit, DataType.INTEGER, "limit").forceLiteral(), new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() ), - "TopList[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", + "Top[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", DataType.DATETIME, equalTo(List.of(200L, 8L, 5L, 0L).subList(0, limit)) ); @@ -93,7 +93,7 @@ public static Iterable parameters() { new TestCaseSupplier.TypedData(1, DataType.INTEGER, "limit").forceLiteral(), new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() ), - "TopList[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", + "Top[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", DataType.INTEGER, equalTo(200) ) @@ -106,7 +106,7 @@ public static Iterable parameters() { new TestCaseSupplier.TypedData(1, DataType.INTEGER, "limit").forceLiteral(), new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() ), - "TopList[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", + "Top[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", DataType.LONG, equalTo(200L) ) @@ -119,7 +119,7 @@ public static Iterable parameters() { new TestCaseSupplier.TypedData(1, DataType.INTEGER, "limit").forceLiteral(), new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() ), - "TopList[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", + "Top[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", DataType.DOUBLE, equalTo(200.) ) @@ -132,7 +132,7 @@ public static Iterable parameters() { new TestCaseSupplier.TypedData(1, DataType.INTEGER, "limit").forceLiteral(), new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() ), - "TopList[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", + "Top[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", DataType.DATETIME, equalTo(200L) ) @@ -147,7 +147,7 @@ public static Iterable parameters() { new TestCaseSupplier.TypedData(1, DataType.INTEGER, "limit").forceLiteral(), new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() ), - "TopList[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", + "Top[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", DataType.INTEGER, equalTo(200) ) @@ -160,7 +160,7 @@ public static Iterable parameters() { new TestCaseSupplier.TypedData(1, DataType.INTEGER, "limit").forceLiteral(), new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() ), - "TopList[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", + "Top[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", DataType.LONG, equalTo(200L) ) @@ -173,7 +173,7 @@ public static Iterable parameters() { new TestCaseSupplier.TypedData(1, DataType.INTEGER, "limit").forceLiteral(), new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() ), - "TopList[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", + "Top[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", DataType.DOUBLE, equalTo(200.) ) @@ -186,7 +186,7 @@ public static Iterable parameters() { new TestCaseSupplier.TypedData(1, DataType.INTEGER, "limit").forceLiteral(), new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() ), - "TopList[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", + "Top[field=Attribute[channel=0], limit=Attribute[channel=1], order=Attribute[channel=2]]", DataType.DATETIME, equalTo(200L) ) @@ -244,6 +244,6 @@ public static Iterable parameters() { @Override protected Expression build(Source source, List args) { - return new TopList(source, args.get(0), args.get(1), args.get(2)); + return new Top(source, args.get(0), args.get(1), args.get(2)); } } From 42564fc35fb45736a4a9a56857cdb2f054a227a2 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 1 Jul 2024 21:47:55 +0200 Subject: [PATCH 094/216] Automatically adjust ignore_malformed only for the @timestamp (#109948) We introduced automatic disabling ignore_malformed for the @timestamp field with #99346, but the change was applied to any field with name @timestamp under any path, while it should have been applied only to the top-level @timestamp field. Relates to #107760 --- docs/changelog/109948.yaml | 5 ++++ .../DataStreamTimestampFieldMapperTests.java | 29 +++++++++++++++++++ .../index/mapper/DateFieldMapper.java | 2 +- 3 files changed, 35 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/109948.yaml diff --git a/docs/changelog/109948.yaml b/docs/changelog/109948.yaml new file mode 100644 index 0000000000000..3f5a281781bcf --- /dev/null +++ b/docs/changelog/109948.yaml @@ -0,0 +1,5 @@ +pr: 109948 +summary: Automatically adjust `ignore_malformed` only for the @timestamp +area: Mapping +type: bug +issues: [] diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/mapper/DataStreamTimestampFieldMapperTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/mapper/DataStreamTimestampFieldMapperTests.java index c8774b18c7e9e..89625ee8a312e 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/mapper/DataStreamTimestampFieldMapperTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/mapper/DataStreamTimestampFieldMapperTests.java @@ -177,11 +177,29 @@ public void testValidateDefaultIgnoreMalformed() throws Exception { b.startObject("@timestamp"); b.field("type", "date"); b.endObject(); + b.startObject("summary"); + { + b.startObject("properties"); + { + b.startObject("@timestamp"); + b.field("type", "date"); + b.endObject(); + } + b.endObject(); + } + b.endObject(); }) ); assertThat(mapperService, notNullValue()); assertThat(mapperService.documentMapper().mappers().getMapper("@timestamp"), notNullValue()); assertThat(((DateFieldMapper) mapperService.documentMapper().mappers().getMapper("@timestamp")).ignoreMalformed(), is(false)); + DateFieldMapper summaryTimestamp = (DateFieldMapper) (mapperService.documentMapper() + .mappers() + .objectMappers() + .get("summary") + .getMapper("@timestamp")); + assertThat(summaryTimestamp, notNullValue()); + assertThat(summaryTimestamp.ignoreMalformed(), is(true)); } { MapperService mapperService = createMapperService( @@ -193,11 +211,22 @@ public void testValidateDefaultIgnoreMalformed() throws Exception { b.field("type", "date"); b.field("ignore_malformed", false); b.endObject(); + b.startObject("summary.@timestamp"); + b.field("type", "date"); + b.field("ignore_malformed", false); + b.endObject(); }) ); assertThat(mapperService, notNullValue()); assertThat(mapperService.documentMapper().mappers().getMapper("@timestamp"), notNullValue()); assertThat(((DateFieldMapper) mapperService.documentMapper().mappers().getMapper("@timestamp")).ignoreMalformed(), is(false)); + DateFieldMapper summaryTimestamp = (DateFieldMapper) (mapperService.documentMapper() + .mappers() + .objectMappers() + .get("summary") + .getMapper("@timestamp")); + assertThat(summaryTimestamp, notNullValue()); + assertThat(summaryTimestamp.ignoreMalformed(), is(false)); } } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index 7c5d217768105..501d31547ded1 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -359,7 +359,7 @@ public DateFieldMapper build(MapperBuilderContext context) { ); Long nullTimestamp = parseNullValue(ft); - if (leafName().equals(DataStreamTimestampFieldMapper.DEFAULT_PATH) + if (ft.name().equals(DataStreamTimestampFieldMapper.DEFAULT_PATH) && context.isDataStream() && ignoreMalformed.isConfigured() == false) { ignoreMalformed.setValue(false); From bafac221fac3ff0d000c46e187805f6aa3068f4c Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 2 Jul 2024 07:13:10 +1000 Subject: [PATCH 095/216] Mute org.elasticsearch.search.vectors.ExactKnnQueryBuilderTests testToQuery #110357 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index e8c6536b0ce0e..78f01713f7351 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -106,6 +106,9 @@ tests: - class: org.elasticsearch.xpack.esql.tree.EsqlNodeSubclassTests method: testInfoParameters {class org.elasticsearch.xpack.esql.expression.function.aggregate.ToPartial} issue: https://github.com/elastic/elasticsearch/issues/110310 +- class: org.elasticsearch.search.vectors.ExactKnnQueryBuilderTests + method: testToQuery + issue: https://github.com/elastic/elasticsearch/issues/110357 # Examples: # From 80109eb2be91af390ba863d60a05aa98457576a7 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Tue, 2 Jul 2024 13:28:50 +1000 Subject: [PATCH 096/216] Allow ack unpromotable refresh request before shard is created (#110221) If a search node lags behind in cluster state, it is possible that a refresh request arrives before the shard is even ready. This in turn fails the shard and leads to unnecessary failure and recovery. This PR allow search node to ack the refresh request before the shard is created to prevent that from happening. The subsequent recovery process of the search shard should ensure it using the latest commit data. Depends on: serverless 2277 Resolves: serverless 2258 --- ...ansportUnpromotableShardRefreshAction.java | 12 ++- ...rtUnpromotableShardRefreshActionTests.java | 89 +++++++++++++++++++ 2 files changed, 99 insertions(+), 2 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshActionTests.java diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java index 43fbe9513b57b..b4357c69c46ae 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java @@ -16,7 +16,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -63,8 +62,17 @@ protected void unpromotableShardOperation( UnpromotableShardRefreshRequest request, ActionListener responseListener ) { + // In edge cases, the search shard may still in the process of being created when a refresh request arrives. + // We simply respond OK to the request because when the search shard recovers later it will use the latest + // commit from the proper indexing shard. + final var indexService = indicesService.indexService(request.shardId().getIndex()); + final var shard = indexService == null ? null : indexService.getShardOrNull(request.shardId().id()); + if (shard == null) { + responseListener.onResponse(ActionResponse.Empty.INSTANCE); + return; + } + ActionListener.run(responseListener, listener -> { - IndexShard shard = indicesService.indexServiceSafe(request.shardId().getIndex()).getShard(request.shardId().id()); shard.waitForPrimaryTermAndGeneration( request.getPrimaryTerm(), request.getSegmentGeneration(), diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshActionTests.java new file mode 100644 index 0000000000000..4fcb72a02b83a --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshActionTests.java @@ -0,0 +1,89 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.admin.indices.refresh; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.cluster.action.shard.ShardStateAction; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ClusterServiceUtils; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.Matchers.sameInstance; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class TransportUnpromotableShardRefreshActionTests extends ESTestCase { + private ThreadPool threadPool; + private ClusterService clusterService; + + @Override + public void setUp() throws Exception { + super.setUp(); + threadPool = new TestThreadPool("TransportUnpromotableShardRefreshActionTests"); + clusterService = ClusterServiceUtils.createClusterService(threadPool); + } + + @Override + public void tearDown() throws Exception { + super.tearDown(); + clusterService.close(); + ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); + } + + public void testRespondOKToRefreshRequestBeforeShardIsCreated() { + final var shardId = new ShardId(new Index(randomIdentifier(), randomUUID()), between(0, 3)); + final var shardRouting = TestShardRouting.newShardRouting(shardId, randomUUID(), true, ShardRoutingState.STARTED); + final var indexShardRoutingTable = new IndexShardRoutingTable.Builder(shardId).addShard(shardRouting).build(); + + final var request = new UnpromotableShardRefreshRequest( + indexShardRoutingTable, + randomNonNegativeLong(), + randomNonNegativeLong(), + randomBoolean() + ); + + final TransportService transportService = mock(TransportService.class); + when(transportService.getThreadPool()).thenReturn(threadPool); + final IndicesService indicesService = mock(IndicesService.class); + if (randomBoolean()) { + when(indicesService.indexService(shardId.getIndex())).thenReturn(null); + } else { + final IndexService indexService = mock(IndexService.class); + when(indicesService.indexService(shardId.getIndex())).thenReturn(indexService); + when(indexService.hasShard(shardId.id())).thenReturn(false); + } + + final var action = new TransportUnpromotableShardRefreshAction( + clusterService, + transportService, + mock(ShardStateAction.class), + mock(ActionFilters.class), + indicesService + ); + + final PlainActionFuture future = new PlainActionFuture<>(); + action.unpromotableShardOperation(mock(Task.class), request, future); + assertThat(safeGet(future), sameInstance(ActionResponse.Empty.INSTANCE)); + } +} From 48dde48a4ba6b1de9d13336d10307a623d3a8f57 Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Tue, 2 Jul 2024 08:13:24 +0200 Subject: [PATCH 097/216] Do not trip circuit breaker when getting shutdown status (#110332) Update TransportGetShutdownStatusAction so that it can not trip circuit breaker. If circuit breaker is tripped when orchestrator is checking the shutdown status then cluster is shutdown prematurely, leading to availability lose. --- .../xpack/shutdown/TransportGetShutdownStatusAction.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusAction.java index 377016e80f386..33965eca83aee 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusAction.java @@ -83,6 +83,7 @@ public TransportGetShutdownStatusAction( ) { super( GetShutdownStatusAction.NAME, + false, transportService, clusterService, threadPool, From f9898c765cf0708bf749bbd89870008734dbeed1 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Tue, 2 Jul 2024 09:15:37 +0200 Subject: [PATCH 098/216] [Inference API] Use extractOptionalPositiveInteger instead of removeAsType in OpenAiChatCompletionServiceSettings (#110281) --- .../completion/OpenAiChatCompletionServiceSettings.java | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java index 04f77da1b1463..c4ab8bd99b8b0 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java @@ -32,9 +32,9 @@ import static org.elasticsearch.xpack.inference.services.ServiceFields.URL; import static org.elasticsearch.xpack.inference.services.ServiceUtils.convertToUri; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createOptionalUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalPositiveInteger; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; -import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeAsType; import static org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields.ORGANIZATION; /** @@ -58,7 +58,12 @@ public static OpenAiChatCompletionServiceSettings fromMap(Map ma String url = extractOptionalString(map, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); URI uri = convertToUri(url, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); - Integer maxInputTokens = removeAsType(map, MAX_INPUT_TOKENS, Integer.class); + Integer maxInputTokens = extractOptionalPositiveInteger( + map, + MAX_INPUT_TOKENS, + ModelConfigurations.SERVICE_SETTINGS, + validationException + ); RateLimitSettings rateLimitSettings = RateLimitSettings.of( map, From cdc9914891960736ddd7f63cf42e8b9554f76b5f Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Tue, 2 Jul 2024 10:01:38 +0200 Subject: [PATCH 099/216] [Inference API] Deduplicate getPersistedConfigMap (#110277) --- .../elasticsearch/xpack/inference/Utils.java | 7 +++- .../AzureAiStudioServiceTests.java | 15 +------ .../azureopenai/AzureOpenAiServiceTests.java | 22 +---------- .../services/cohere/CohereServiceTests.java | 22 +---------- .../GoogleAiStudioServiceTests.java | 22 +---------- .../GoogleVertexAiServiceTests.java | 24 +----------- .../huggingface/HuggingFaceServiceTests.java | 39 ++++--------------- .../services/mistral/MistralServiceTests.java | 15 +------ .../services/openai/OpenAiServiceTests.java | 23 +---------- 9 files changed, 21 insertions(+), 168 deletions(-) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java index ec36040507ccd..fe33a3d092667 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java @@ -178,13 +178,18 @@ public static PersistedConfig getPersistedConfigMap( Map taskSettings, Map secretSettings ) { + var secrets = secretSettings == null ? null : new HashMap(Map.of(ModelSecrets.SECRET_SETTINGS, secretSettings)); return new PersistedConfig( new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, serviceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)), - new HashMap<>(Map.of(ModelSecrets.SECRET_SETTINGS, secretSettings)) + secrets ); } + public static PersistedConfig getPersistedConfigMap(Map serviceSettings) { + return Utils.getPersistedConfigMap(serviceSettings, new HashMap<>(), null); + } + public static PersistedConfig getPersistedConfigMap(Map serviceSettings, Map taskSettings) { return new PersistedConfig( new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, serviceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)), diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java index d26b02ddba62b..4cc91249ad244 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; -import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; @@ -60,8 +59,8 @@ import java.util.Set; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.xpack.inference.Utils.PersistedConfig; import static org.elasticsearch.xpack.inference.Utils.getInvalidModel; +import static org.elasticsearch.xpack.inference.Utils.getPersistedConfigMap; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; @@ -1069,18 +1068,6 @@ private Map getRequestConfigMap( ); } - private PersistedConfig getPersistedConfigMap( - Map serviceSettings, - Map taskSettings, - Map secretSettings - ) { - - return new PersistedConfig( - new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, serviceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)), - new HashMap<>(Map.of(ModelSecrets.SECRET_SETTINGS, secretSettings)) - ); - } - private static Map getEmbeddingsServiceSettingsMap( String target, String provider, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java index c3e8eb5c621d2..57bae3f172e6d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; -import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; @@ -54,8 +53,8 @@ import java.util.Set; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.xpack.inference.Utils.PersistedConfig; import static org.elasticsearch.xpack.inference.Utils.getInvalidModel; +import static org.elasticsearch.xpack.inference.Utils.getPersistedConfigMap; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; @@ -1159,23 +1158,4 @@ private Map getRequestConfigMap( Map.of(ModelConfigurations.SERVICE_SETTINGS, builtServiceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings) ); } - - private PersistedConfig getPersistedConfigMap( - Map serviceSettings, - Map taskSettings, - Map secretSettings - ) { - - return new PersistedConfig( - new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, serviceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)), - new HashMap<>(Map.of(ModelSecrets.SECRET_SETTINGS, secretSettings)) - ); - } - - private PersistedConfig getPersistedConfigMap(Map serviceSettings, Map taskSettings) { - return new PersistedConfig( - new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, serviceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)), - null - ); - } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index 174bb4dfed109..f9db4313dcead 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -23,7 +23,6 @@ import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; -import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; @@ -58,8 +57,8 @@ import java.util.Set; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.xpack.inference.Utils.PersistedConfig; import static org.elasticsearch.xpack.inference.Utils.getInvalidModel; +import static org.elasticsearch.xpack.inference.Utils.getPersistedConfigMap; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; @@ -1373,23 +1372,4 @@ private CohereService createCohereService() { return new CohereService(mock(HttpRequestSender.Factory.class), createWithEmptySettings(threadPool)); } - private PersistedConfig getPersistedConfigMap( - Map serviceSettings, - Map taskSettings, - Map secretSettings - ) { - - return new PersistedConfig( - new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, serviceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)), - new HashMap<>(Map.of(ModelSecrets.SECRET_SETTINGS, secretSettings)) - ); - } - - private PersistedConfig getPersistedConfigMap(Map serviceSettings, Map taskSettings) { - - return new PersistedConfig( - new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, serviceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)), - null - ); - } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java index 45dd8ad7b33bd..f807f8e5205b2 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; -import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; @@ -57,8 +56,8 @@ import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; -import static org.elasticsearch.xpack.inference.Utils.PersistedConfig; import static org.elasticsearch.xpack.inference.Utils.getInvalidModel; +import static org.elasticsearch.xpack.inference.Utils.getPersistedConfigMap; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; @@ -945,23 +944,4 @@ private Map getRequestConfigMap( private GoogleAiStudioService createGoogleAiStudioService() { return new GoogleAiStudioService(mock(HttpRequestSender.Factory.class), createWithEmptySettings(threadPool)); } - - private PersistedConfig getPersistedConfigMap( - Map serviceSettings, - Map taskSettings, - Map secretSettings - ) { - - return new PersistedConfig( - new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, serviceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)), - new HashMap<>(Map.of(ModelSecrets.SECRET_SETTINGS, secretSettings)) - ); - } - - private PersistedConfig getPersistedConfigMap(Map serviceSettings, Map taskSettings) { - return new PersistedConfig( - new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, serviceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)), - null - ); - } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiServiceTests.java index 614ce71c36078..d8c727c5a58bc 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiServiceTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; -import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.http.MockWebServer; @@ -36,6 +35,7 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.xpack.inference.Utils.getPersistedConfigMap; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; @@ -574,28 +574,6 @@ private Map getRequestConfigMap( ); } - // TODO: deduplicate - private PersistedConfig getPersistedConfigMap( - Map serviceSettings, - Map taskSettings, - Map secretSettings - ) { - - return new PersistedConfig( - new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, serviceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)), - new HashMap<>(Map.of(ModelSecrets.SECRET_SETTINGS, secretSettings)) - ); - } - - private PersistedConfig getPersistedConfigMap(Map serviceSettings, Map taskSettings) { - return new PersistedConfig( - new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, serviceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)), - null - ); - } - - private record PersistedConfig(Map config, Map secrets) {} - private static Map getSecretSettingsMap(String serviceAccountJson) { return new HashMap<>(Map.of(GoogleVertexAiSecretSettings.SERVICE_ACCOUNT_JSON, serviceAccountJson)); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java index 14fe1451ebace..61504603e62ee 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; @@ -22,7 +21,6 @@ import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; -import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; @@ -56,7 +54,7 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResultsTests.asMapWithListsInsteadOfArrays; -import static org.elasticsearch.xpack.inference.Utils.PersistedConfig; +import static org.elasticsearch.xpack.inference.Utils.getPersistedConfigMap; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; @@ -198,7 +196,7 @@ public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInSecretSettingsMap public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModel() throws IOException { try (var service = createHuggingFaceService()) { - var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), getSecretSettingsMap("secret")); + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), new HashMap<>(), getSecretSettingsMap("secret")); var model = service.parsePersistedConfigWithSecrets( "id", @@ -217,7 +215,7 @@ public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModel() throw public void testParsePersistedConfigWithSecrets_CreatesAnElserModel() throws IOException { try (var service = createHuggingFaceService()) { - var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), getSecretSettingsMap("secret")); + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), new HashMap<>(), getSecretSettingsMap("secret")); var model = service.parsePersistedConfigWithSecrets( "id", @@ -236,7 +234,7 @@ public void testParsePersistedConfigWithSecrets_CreatesAnElserModel() throws IOE public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInConfig() throws IOException { try (var service = createHuggingFaceService()) { - var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), getSecretSettingsMap("secret")); + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), new HashMap<>(), getSecretSettingsMap("secret")); persistedConfig.config().put("extra_key", "value"); var model = service.parsePersistedConfigWithSecrets( @@ -259,7 +257,7 @@ public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExists var secretSettingsMap = getSecretSettingsMap("secret"); secretSettingsMap.put("extra_key", "value"); - var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), secretSettingsMap); + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), new HashMap<>(), secretSettingsMap); var model = service.parsePersistedConfigWithSecrets( "id", @@ -278,7 +276,7 @@ public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExists public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInSecrets() throws IOException { try (var service = createHuggingFaceService()) { - var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), getSecretSettingsMap("secret")); + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), new HashMap<>(), getSecretSettingsMap("secret")); persistedConfig.secrets().put("extra_key", "value"); var model = service.parsePersistedConfigWithSecrets( @@ -301,7 +299,7 @@ public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExists var serviceSettingsMap = getServiceSettingsMap("url"); serviceSettingsMap.put("extra_key", "value"); - var persistedConfig = getPersistedConfigMap(serviceSettingsMap, getSecretSettingsMap("secret")); + var persistedConfig = getPersistedConfigMap(serviceSettingsMap, new HashMap<>(), getSecretSettingsMap("secret")); var model = service.parsePersistedConfigWithSecrets( "id", @@ -356,7 +354,7 @@ public void testParsePersistedConfig_CreatesAnEmbeddingsModel() throws IOExcepti public void testParsePersistedConfig_CreatesAnElserModel() throws IOException { try (var service = createHuggingFaceService()) { - var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url")); + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), new HashMap<>()); var model = service.parsePersistedConfig("id", TaskType.SPARSE_EMBEDDING, persistedConfig.config()); @@ -717,25 +715,4 @@ private Map getRequestConfigMap(Map serviceSetti return new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, builtServiceSettings)); } - private PersistedConfig getPersistedConfigMap(Map serviceSettings) { - return getPersistedConfigMap(serviceSettings, Map.of(), null); - } - - private PersistedConfig getPersistedConfigMap(Map serviceSettings, @Nullable Map secretSettings) { - return getPersistedConfigMap(serviceSettings, Map.of(), secretSettings); - } - - private PersistedConfig getPersistedConfigMap( - Map serviceSettings, - Map taskSettings, - Map secretSettings - ) { - - var secrets = secretSettings == null ? null : new HashMap(Map.of(ModelSecrets.SECRET_SETTINGS, secretSettings)); - - return new PersistedConfig( - new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, serviceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)), - secrets - ); - } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java index ba37203d9e5d6..1e3dd1e348f55 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; -import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; @@ -54,8 +53,8 @@ import java.util.Set; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.xpack.inference.Utils.PersistedConfig; import static org.elasticsearch.xpack.inference.Utils.getInvalidModel; +import static org.elasticsearch.xpack.inference.Utils.getPersistedConfigMap; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; @@ -587,18 +586,6 @@ private Map getRequestConfigMap( ); } - private PersistedConfig getPersistedConfigMap( - Map serviceSettings, - Map taskSettings, - Map secretSettings - ) { - - return new PersistedConfig( - new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, serviceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)), - new HashMap<>(Map.of(ModelSecrets.SECRET_SETTINGS, secretSettings)) - ); - } - private static Map getEmbeddingsServiceSettingsMap( String model, @Nullable Integer dimensions, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index 2fc049dd3a5f6..9e35180547bf2 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; -import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; @@ -55,8 +54,8 @@ import java.util.Set; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.xpack.inference.Utils.PersistedConfig; import static org.elasticsearch.xpack.inference.Utils.getInvalidModel; +import static org.elasticsearch.xpack.inference.Utils.getPersistedConfigMap; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; @@ -1308,24 +1307,4 @@ private Map getRequestConfigMap( Map.of(ModelConfigurations.SERVICE_SETTINGS, builtServiceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings) ); } - - private PersistedConfig getPersistedConfigMap( - Map serviceSettings, - Map taskSettings, - Map secretSettings - ) { - - return new PersistedConfig( - new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, serviceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)), - new HashMap<>(Map.of(ModelSecrets.SECRET_SETTINGS, secretSettings)) - ); - } - - private PersistedConfig getPersistedConfigMap(Map serviceSettings, Map taskSettings) { - - return new PersistedConfig( - new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, serviceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)), - null - ); - } } From 80caf2e37c588d05170045dbfce0ebbfc6fde359 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Tue, 2 Jul 2024 10:11:15 +0200 Subject: [PATCH 100/216] [Inference API] Use extractOptionalPositiveInteger instead of remove as type in HuggingFaceServiceSettings (#110278) --- .../huggingface/HuggingFaceServiceSettings.java | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java index fc31b1e518dd9..eb9c99f5bfd91 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java @@ -33,9 +33,9 @@ import static org.elasticsearch.xpack.inference.services.ServiceFields.URL; import static org.elasticsearch.xpack.inference.services.ServiceUtils.convertToUri; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalPositiveInteger; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractSimilarity; -import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeAsType; public class HuggingFaceServiceSettings extends FilteredXContentObject implements ServiceSettings, HuggingFaceRateLimitServiceSettings { public static final String NAME = "hugging_face_service_settings"; @@ -49,8 +49,13 @@ public static HuggingFaceServiceSettings fromMap(Map map, Config var uri = extractUri(map, URL, validationException); SimilarityMeasure similarityMeasure = extractSimilarity(map, ModelConfigurations.SERVICE_SETTINGS, validationException); - Integer dims = removeAsType(map, DIMENSIONS, Integer.class); - Integer maxInputTokens = removeAsType(map, MAX_INPUT_TOKENS, Integer.class); + Integer dims = extractOptionalPositiveInteger(map, DIMENSIONS, ModelConfigurations.SERVICE_SETTINGS, validationException); + Integer maxInputTokens = extractOptionalPositiveInteger( + map, + MAX_INPUT_TOKENS, + ModelConfigurations.SERVICE_SETTINGS, + validationException + ); RateLimitSettings rateLimitSettings = RateLimitSettings.of( map, DEFAULT_RATE_LIMIT_SETTINGS, From 3f4bb15bdbcaf79ee8fa0fe84dde3dca6a827259 Mon Sep 17 00:00:00 2001 From: Felix Barnsteiner Date: Tue, 2 Jul 2024 11:04:22 +0200 Subject: [PATCH 101/216] Support ignore_above on keyword dimensions (#110337) At the moment, it's not possible to set `ignore_above` on `keyword` dimension fields. This flag is used a lot in our integrations and in ECS. It helps to reduce the number of logs/metrics we need to reject. This is also somewhat inconsistent as it's possible to set `ignore_malformed` on numeric dimension fields. Both options end up adding malformed values to `_ignored` and don't add doc_values and postings. I'd like to propose adding support for `ignore_above` to be consistent with allowing `ignore_malformed` on numeric fields. The other option would be to take away the support for `ignore_malformed` but that seems to be a breaking change. I've added a test that verifies that ignoring those dimensions is safe in the sense that it doesn't lead to document rejections due to duplicate detection if the only dimensions that differ are ignored. --- docs/changelog/110337.yaml | 5 + .../test/data_stream/150_tsdb.yml | 94 +++++++++++++++++++ .../index/mapper/KeywordFieldMapper.java | 5 +- .../index/mapper/MapperFeatures.java | 3 +- .../index/mapper/KeywordFieldMapperTests.java | 12 +-- 5 files changed, 110 insertions(+), 9 deletions(-) create mode 100644 docs/changelog/110337.yaml diff --git a/docs/changelog/110337.yaml b/docs/changelog/110337.yaml new file mode 100644 index 0000000000000..bf21a95c9157f --- /dev/null +++ b/docs/changelog/110337.yaml @@ -0,0 +1,5 @@ +pr: 110337 +summary: Support `ignore_above` on keyword dimensions +area: TSDB +type: enhancement +issues: [] diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml index 0f7752bd43bd2..8c23232bb457c 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml @@ -958,3 +958,97 @@ passthrough objects with duplicate priority: resource.attributes: type: passthrough priority: 1 + +--- +dimensions with ignore_malformed and ignore_above: + - requires: + cluster_features: ["mapper.keyword_dimension_ignore_above"] + reason: support for ignore_above on keyword dimensions + - do: + allowed_warnings: + - "index template [my-dynamic-template] has index patterns [k9s*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-dynamic-template] will take precedence during new index creation" + indices.put_index_template: + name: my-dynamic-template + body: + index_patterns: [k9s*] + data_stream: {} + template: + settings: + index: + number_of_shards: 1 + mode: time_series + time_series: + start_time: 2023-08-31T13:03:08.138Z + + mappings: + properties: + attributes: + type: passthrough + time_series_dimension: true + priority: 0 + properties: + keyword_dim: + type: keyword + keyword_dim_ignored: + type: keyword + ignore_above: 2 + long_dim_ignored: + type: long + ignore_malformed: true + data: + type: long + time_series_metric: gauge + ignore_malformed: true + + - do: + bulk: + index: k9s + refresh: true + body: + - '{ "create": { } }' + - '{ "@timestamp": "2023-09-01T13:03:08.138Z", "data": 10, "attributes.keyword_dim": "foo", "attributes.keyword_dim_ignored": "ignored", "attributes.long_dim_ignored": "ignored" }' + - '{ "create": { } }' + - '{ "@timestamp": "2023-09-01T13:03:08.138Z", "data": 20, "attributes.keyword_dim": "foo", "attributes.keyword_dim_ignored": "ignored too", "attributes.long_dim_ignored": "ignored" }' + - '{ "create": { } }' + - '{ "@timestamp": "2023-09-01T13:03:08.138Z", "data": 30, "attributes.keyword_dim_ignored": "ignored 3" }' + - '{ "create": { } }' + - '{ "@timestamp": "2023-09-01T13:03:08.138Z", "data": 40, "attributes.keyword_dim_ignored": "ignored 4" }' + - '{ "create": { } }' + - '{ "@timestamp": "2023-09-01T13:03:08.138Z", "data": 50, "attributes.keyword_dim_ignored": "duplicate" }' + - '{ "create": { } }' + - '{ "@timestamp": "2023-09-01T13:03:08.138Z", "data": 60, "attributes.keyword_dim_ignored": "duplicate" }' + - match: { errors: true } + - match: { items.0.create.result: created } + - match: { items.1.create.result: created } + - match: { items.2.create.result: created } + - match: { items.3.create.result: created } + - match: { items.4.create.result: created } + - match: { items.5.create.error.type: version_conflict_engine_exception } + + - do: + search: + index: k9s + body: + sort: + - data: asc + + - match: { hits.total.value: 5 } + - match: { hits.hits.0._ignored: ["attributes.keyword_dim_ignored", "attributes.long_dim_ignored"]} + - match: { hits.hits.1._ignored: ["attributes.keyword_dim_ignored", "attributes.long_dim_ignored"]} + - match: { hits.hits.2._ignored: ["attributes.keyword_dim_ignored"]} + - match: { hits.hits.3._ignored: ["attributes.keyword_dim_ignored"]} + - match: { hits.hits.4._ignored: ["attributes.keyword_dim_ignored"]} + + - do: + search: + index: k9s + body: + size: 0 + aggs: + keyword_dims: + terms: + field: keyword_dim + + - length: { aggregations.keyword_dims.buckets: 1 } + - match: { aggregations.keyword_dims.buckets.0.key: "foo" } + - match: { aggregations.keyword_dims.buckets.0.doc_count: 2 } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 34c518a93404b..16aa827e6a251 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.lucene.search.AutomatonQueries; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.core.Nullable; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -86,6 +87,8 @@ public final class KeywordFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "keyword"; + static final NodeFeature KEYWORD_DIMENSION_IGNORE_ABOVE = new NodeFeature("mapper.keyword_dimension_ignore_above"); + public static class Defaults { public static final FieldType FIELD_TYPE; @@ -210,7 +213,7 @@ public Builder(String name, IndexAnalyzers indexAnalyzers, ScriptCompiler script + "] are true" ); } - }).precludesParameters(normalizer, ignoreAbove); + }).precludesParameters(normalizer); } public Builder(String name, IndexVersion indexCreatedVersion) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java index a8a81fab654da..755c2d94571d3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java @@ -27,7 +27,8 @@ public Set getFeatures() { SourceFieldMapper.SYNTHETIC_SOURCE_FALLBACK, DenseVectorFieldMapper.INT4_QUANTIZATION, DenseVectorFieldMapper.BIT_VECTORS, - DocumentMapper.INDEX_SORTING_ON_NESTED + DocumentMapper.INDEX_SORTING_ON_NESTED, + KeywordFieldMapper.KEYWORD_DIMENSION_IGNORE_ABOVE ); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java index afebe1a008468..67cd92477eedb 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java @@ -320,15 +320,13 @@ public void testDimension() throws IOException { assertDimension(false, KeywordFieldMapper.KeywordFieldType::isDimension); } - public void testDimensionAndIgnoreAbove() { - Exception e = expectThrows(MapperParsingException.class, () -> createDocumentMapper(fieldMapping(b -> { + public void testDimensionAndIgnoreAbove() throws IOException { + DocumentMapper documentMapper = createDocumentMapper(fieldMapping(b -> { minimalMapping(b); b.field("time_series_dimension", true).field("ignore_above", 2048); - }))); - assertThat( - e.getCause().getMessage(), - containsString("Field [ignore_above] cannot be set in conjunction with field [time_series_dimension]") - ); + })); + KeywordFieldMapper field = (KeywordFieldMapper) documentMapper.mappers().getMapper("field"); + assertEquals(2048, field.fieldType().ignoreAbove()); } public void testDimensionAndNormalizer() { From 94fe32ab208f45b8d1acdc12b0e9c28c92206a86 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Tue, 2 Jul 2024 19:31:28 +1000 Subject: [PATCH 102/216] [Test] Reuse existing TLS setup infrastructure in tests (#110358) Relates: #109899 Resolves: #110356 --- x-pack/plugin/security/build.gradle | 5 ----- .../SecurityNetty4TransportCloseNotifyIT.java | 20 ++++--------------- ...y4HttpServerTransportCloseNotifyTests.java | 15 +++++--------- 3 files changed, 9 insertions(+), 31 deletions(-) diff --git a/x-pack/plugin/security/build.gradle b/x-pack/plugin/security/build.gradle index 0bba1e845b338..07308d5d29a9a 100644 --- a/x-pack/plugin/security/build.gradle +++ b/x-pack/plugin/security/build.gradle @@ -151,11 +151,6 @@ dependencies { testImplementation('org.apache.directory.server:ldap-client-test:2.0.0-M24') testImplementation('org.apache.directory.server:apacheds-interceptor-kerberos:2.0.0-M24') testImplementation('org.apache.directory.mavibot:mavibot:1.0.0-M8') - - // netty self signed certificate dependency - testImplementation('org.bouncycastle:bcprov-jdk18on:1.78.1') - testImplementation ('org.bouncycastle:bcutil-jdk18on:1.78.1') - testImplementation('org.bouncycastle:bcpkix-jdk18on:1.78.1') } tasks.named("test").configure { diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4TransportCloseNotifyIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4TransportCloseNotifyIT.java index f09007bebd80c..8a7bd0af817f7 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4TransportCloseNotifyIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4TransportCloseNotifyIT.java @@ -25,18 +25,15 @@ import io.netty.handler.ssl.SslContextBuilder; import io.netty.handler.ssl.SslHandler; import io.netty.handler.ssl.util.InsecureTrustManagerFactory; -import io.netty.handler.ssl.util.SelfSignedCertificate; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; import org.elasticsearch.action.support.CancellableActionTestPlugin; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; -import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecurityIntegTestCase; import java.util.Collection; @@ -45,12 +42,11 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; +import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForNodePEMFiles; import static org.elasticsearch.test.TaskAssertions.assertAllTasksHaveFinished; import static org.elasticsearch.test.rest.ESRestTestCase.basicAuthHeaderValue; @ClusterScope(numDataNodes = 0, scope = Scope.TEST) -@ESTestCase.WithoutSecurityManager -@SuppressForbidden(reason = "requires java.io.File for netty self-signed certificate") public class SecurityNetty4TransportCloseNotifyIT extends SecurityIntegTestCase { @Override @@ -60,17 +56,9 @@ protected boolean addMockHttpTransport() { @Override protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { - try { - var ssc = new SelfSignedCertificate(); - return Settings.builder() - .put(super.nodeSettings(nodeOrdinal, otherSettings)) - .put("xpack.security.http.ssl.enabled", true) - .put("xpack.security.http.ssl.key", ssc.privateKey().getPath()) - .put("xpack.security.http.ssl.certificate", ssc.certificate().getPath()) - .build(); - } catch (Exception e) { - throw new RuntimeException(e); - } + final Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)); + addSSLSettingsForNodePEMFiles(builder, "xpack.security.http.", randomBoolean()); + return builder.put("xpack.security.http.ssl.enabled", true).build(); } @Override diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportCloseNotifyTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportCloseNotifyTests.java index e61f1e4e21661..0ac6ddc8245a1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportCloseNotifyTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportCloseNotifyTests.java @@ -25,11 +25,9 @@ import io.netty.handler.ssl.SslContextBuilder; import io.netty.handler.ssl.SslHandler; import io.netty.handler.ssl.util.InsecureTrustManagerFactory; -import io.netty.handler.ssl.util.SelfSignedCertificate; import io.netty.util.concurrent.Future; import org.elasticsearch.common.network.NetworkService; -import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.SuppressForbidden; @@ -62,6 +60,8 @@ import javax.net.ssl.SSLException; +import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForNodePEMFiles; + @ESTestCase.WithoutSecurityManager @SuppressForbidden(reason = "requires java.io.File for netty self-signed certificate") public class SecurityNetty4HttpServerTransportCloseNotifyTests extends AbstractHttpServerTransportTestCase { @@ -93,17 +93,12 @@ private static void safeAwait(Future nettyFuture) { * The server will not reply to request automatically, to send response poll the queue. */ private HttpServer setupHttpServer(String tlsProtocols) throws CertificateException { - var ssc = new SelfSignedCertificate(); var threadPool = new TestThreadPool("tls-close-notify"); var dispatcher = new QueuedDispatcher(); - var secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.security.http.ssl.secure_key_passphrase", "testnode"); - var settings = Settings.builder() - .put("xpack.security.http.ssl.enabled", true) - .put("xpack.security.http.ssl.key", ssc.privateKey().getPath()) - .put("xpack.security.http.ssl.certificate", ssc.certificate().getPath()) + final Settings.Builder builder = Settings.builder(); + addSSLSettingsForNodePEMFiles(builder, "xpack.security.http.", randomBoolean()); + var settings = builder.put("xpack.security.http.ssl.enabled", true) .put("path.home", createTempDir()) - .setSecureSettings(secureSettings) .put("xpack.security.http.ssl.supported_protocols", tlsProtocols) .build(); var env = TestEnvironment.newEnvironment(settings); From 58b73c1a5adaa6193a1418b25b91ba4b3f884924 Mon Sep 17 00:00:00 2001 From: Pooya Salehi Date: Tue, 2 Jul 2024 11:44:11 +0200 Subject: [PATCH 103/216] Remove test logging for closed issue (#110367) Relates #108237. --- .../elasticsearch/snapshots/ConcurrentSnapshotsIT.java | 8 -------- 1 file changed, 8 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java index 45b9828e3dbaa..e03fafd5646e3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java @@ -46,7 +46,6 @@ import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.disruption.NetworkDisruption; -import org.elasticsearch.test.junit.annotations.TestIssueLogging; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.transport.RemoteTransportException; @@ -675,13 +674,6 @@ public void testQueuedOperationsOnMasterRestart() throws Exception { awaitNoMoreRunningOperations(); } - @TestIssueLogging( - issueUrl = "https://github.com/elastic/elasticsearch/issues/108237", - value = "org.elasticsearch.snapshots.SnapshotsService:DEBUG," - + "org.elasticsearch.cluster.service.MasterService:DEBUG," - + "org.elasticsearch.repositories.blobstore.BlobStoreRepository:DEBUG," - + "org.elasticsearch.snapshots.mockstore.MockRepository:DEBUG" - ) public void testQueuedOperationsOnMasterDisconnect() throws Exception { internalCluster().startMasterOnlyNodes(3); final String dataNode = internalCluster().startDataOnlyNode(); From e0d71d660d6fa02992d5d02972412cee8c8dac7d Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 2 Jul 2024 12:19:09 +0200 Subject: [PATCH 104/216] Disallow index.time_series.end_time setting from being set or updated in normal indices (#110268) The index.mode setting validates other index settings. When updating the index.time_series.end_time setting and the index.mode setting isn't wasn't defined at index creation time (meaning that default is active), then this validation is skipped which results into (worse) errors at a later point in time. This problem is fixed by enforced by making index.mode setting a dependency of index.time_series.end_time setting. Note that this problem doesn't exist for the index.time_series.start_time and index.routing_path index settings, because these index settings are final, which mean these can only be defined when an index is being created. Closes #110265 --- docs/changelog/110268.yaml | 6 ++ .../DataStreamIndexSettingsProviderTests.java | 62 +++++++++++++++---- ...etadataDataStreamRolloverServiceTests.java | 9 +-- .../rest-api-spec/test/tsdb/10_settings.yml | 61 ++++++++++++++++++ .../MetadataIndexTemplateService.java | 4 +- .../elasticsearch/index/IndexSettings.java | 13 +++- 6 files changed, 133 insertions(+), 22 deletions(-) create mode 100644 docs/changelog/110268.yaml diff --git a/docs/changelog/110268.yaml b/docs/changelog/110268.yaml new file mode 100644 index 0000000000000..adfb467f92e8b --- /dev/null +++ b/docs/changelog/110268.yaml @@ -0,0 +1,6 @@ +pr: 110268 +summary: Disallow index.time_series.end_time setting from being set or updated in normal indices +area: TSDB +type: bug +issues: + - 110265 diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java index 85f0d354576a4..cf6911850921b 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java @@ -83,7 +83,11 @@ public void testGetAdditionalIndexSettings() throws Exception { settings, List.of(new CompressedXContent(mapping)) ); - assertThat(result.size(), equalTo(3)); + // The index.time_series.end_time setting requires index.mode to be set to time_series adding it here so that we read this setting: + // (in production the index.mode setting is usually provided in an index or component template) + result = builder().put(result).put("index.mode", "time_series").build(); + assertThat(result.size(), equalTo(4)); + assertThat(IndexSettings.MODE.get(result), equalTo(IndexMode.TIME_SERIES)); assertThat(IndexSettings.TIME_SERIES_START_TIME.get(result), equalTo(now.minusMillis(DEFAULT_LOOK_BACK_TIME.getMillis()))); assertThat(IndexSettings.TIME_SERIES_END_TIME.get(result), equalTo(now.plusMillis(DEFAULT_LOOK_AHEAD_TIME.getMillis()))); assertThat(IndexMetadata.INDEX_ROUTING_PATH.get(result), contains("field3")); @@ -124,7 +128,11 @@ public void testGetAdditionalIndexSettingsIndexRoutingPathAlreadyDefined() throw settings, List.of(new CompressedXContent(mapping)) ); - assertThat(result.size(), equalTo(2)); + // The index.time_series.end_time setting requires index.mode to be set to time_series adding it here so that we read this setting: + // (in production the index.mode setting is usually provided in an index or component template) + result = builder().put(result).put("index.mode", "time_series").build(); + assertThat(result.size(), equalTo(3)); + assertThat(result.get(IndexSettings.MODE.getKey()), equalTo("time_series")); assertThat(IndexSettings.TIME_SERIES_START_TIME.get(result), equalTo(now.minusMillis(DEFAULT_LOOK_BACK_TIME.getMillis()))); assertThat(IndexSettings.TIME_SERIES_END_TIME.get(result), equalTo(now.plusMillis(DEFAULT_LOOK_AHEAD_TIME.getMillis()))); } @@ -190,7 +198,11 @@ public void testGetAdditionalIndexSettingsMappingsMerging() throws Exception { settings, List.of(new CompressedXContent(mapping1), new CompressedXContent(mapping2), new CompressedXContent(mapping3)) ); - assertThat(result.size(), equalTo(3)); + // The index.time_series.end_time setting requires index.mode to be set to time_series adding it here so that we read this setting: + // (in production the index.mode setting is usually provided in an index or component template) + result = builder().put(result).put("index.mode", "time_series").build(); + assertThat(result.size(), equalTo(4)); + assertThat(IndexSettings.MODE.get(result), equalTo(IndexMode.TIME_SERIES)); assertThat(IndexSettings.TIME_SERIES_START_TIME.get(result), equalTo(now.minusMillis(DEFAULT_LOOK_BACK_TIME.getMillis()))); assertThat(IndexSettings.TIME_SERIES_END_TIME.get(result), equalTo(now.plusMillis(DEFAULT_LOOK_AHEAD_TIME.getMillis()))); assertThat(IndexMetadata.INDEX_ROUTING_PATH.get(result), containsInAnyOrder("field1", "field3")); @@ -211,7 +223,11 @@ public void testGetAdditionalIndexSettingsNoMappings() { settings, List.of() ); - assertThat(result.size(), equalTo(2)); + // The index.time_series.end_time setting requires index.mode to be set to time_series adding it here so that we read this setting: + // (in production the index.mode setting is usually provided in an index or component template) + result = builder().put(result).put("index.mode", "time_series").build(); + assertThat(result.size(), equalTo(3)); + assertThat(result.get(IndexSettings.MODE.getKey()), equalTo("time_series")); assertThat(IndexSettings.TIME_SERIES_START_TIME.get(result), equalTo(now.minusMillis(DEFAULT_LOOK_BACK_TIME.getMillis()))); assertThat(IndexSettings.TIME_SERIES_END_TIME.get(result), equalTo(now.plusMillis(DEFAULT_LOOK_AHEAD_TIME.getMillis()))); } @@ -232,7 +248,11 @@ public void testGetAdditionalIndexSettingsLookAheadTime() throws Exception { settings, List.of(new CompressedXContent("{}")) ); - assertThat(result.size(), equalTo(2)); + // The index.time_series.end_time setting requires index.mode to be set to time_series adding it here so that we read this setting: + // (in production the index.mode setting is usually provided in an index or component template) + result = builder().put(result).put("index.mode", "time_series").build(); + assertThat(result.size(), equalTo(3)); + assertThat(result.get(IndexSettings.MODE.getKey()), equalTo("time_series")); assertThat(IndexSettings.TIME_SERIES_START_TIME.get(result), equalTo(now.minusMillis(DEFAULT_LOOK_BACK_TIME.getMillis()))); assertThat(IndexSettings.TIME_SERIES_END_TIME.get(result), equalTo(now.plusMillis(lookAheadTime.getMillis()))); } @@ -253,7 +273,11 @@ public void testGetAdditionalIndexSettingsLookBackTime() throws Exception { settings, List.of(new CompressedXContent("{}")) ); - assertThat(result.size(), equalTo(2)); + // The index.time_series.end_time setting requires index.mode to be set to time_series adding it here so that we read this setting: + // (in production the index.mode setting is usually provided in an index or component template) + result = builder().put(result).put("index.mode", "time_series").build(); + assertThat(result.size(), equalTo(3)); + assertThat(result.get(IndexSettings.MODE.getKey()), equalTo("time_series")); assertThat(IndexSettings.TIME_SERIES_START_TIME.get(result), equalTo(now.minusMillis(lookBackTime.getMillis()))); assertThat(IndexSettings.TIME_SERIES_END_TIME.get(result), equalTo(now.plusMillis(DEFAULT_LOOK_AHEAD_TIME.getMillis()))); } @@ -363,7 +387,11 @@ public void testGetAdditionalIndexSettingsMigrateToTsdb() { settings, List.of() ); - assertThat(result.size(), equalTo(2)); + // The index.time_series.end_time setting requires index.mode to be set to time_series adding it here so that we read this setting: + // (in production the index.mode setting is usually provided in an index or component template) + result = builder().put(result).put("index.mode", "time_series").build(); + assertThat(result.size(), equalTo(3)); + assertThat(result.get(IndexSettings.MODE.getKey()), equalTo("time_series")); assertThat(IndexSettings.TIME_SERIES_START_TIME.get(result), equalTo(now.minusMillis(DEFAULT_LOOK_BACK_TIME.getMillis()))); assertThat(IndexSettings.TIME_SERIES_END_TIME.get(result), equalTo(now.plusMillis(DEFAULT_LOOK_AHEAD_TIME.getMillis()))); } @@ -428,7 +456,8 @@ public void testGenerateRoutingPathFromDynamicTemplate() throws Exception { } """; Settings result = generateTsdbSettings(mapping, now); - assertThat(result.size(), equalTo(3)); + assertThat(result.size(), equalTo(4)); + assertThat(IndexSettings.MODE.get(result), equalTo(IndexMode.TIME_SERIES)); assertThat(IndexSettings.TIME_SERIES_START_TIME.get(result), equalTo(now.minusMillis(DEFAULT_LOOK_BACK_TIME.getMillis()))); assertThat(IndexSettings.TIME_SERIES_END_TIME.get(result), equalTo(now.plusMillis(DEFAULT_LOOK_AHEAD_TIME.getMillis()))); assertThat(IndexMetadata.INDEX_ROUTING_PATH.get(result), containsInAnyOrder("host.id", "prometheus.labels.*")); @@ -467,7 +496,8 @@ public void testGenerateRoutingPathFromDynamicTemplateWithMultiplePathMatchEntri } """; Settings result = generateTsdbSettings(mapping, now); - assertThat(result.size(), equalTo(3)); + assertThat(result.size(), equalTo(4)); + assertThat(IndexSettings.MODE.get(result), equalTo(IndexMode.TIME_SERIES)); assertThat(IndexSettings.TIME_SERIES_START_TIME.get(result), equalTo(now.minusMillis(DEFAULT_LOOK_BACK_TIME.getMillis()))); assertThat(IndexSettings.TIME_SERIES_END_TIME.get(result), equalTo(now.plusMillis(DEFAULT_LOOK_AHEAD_TIME.getMillis()))); assertThat( @@ -516,7 +546,8 @@ public void testGenerateRoutingPathFromDynamicTemplateWithMultiplePathMatchEntri } """; Settings result = generateTsdbSettings(mapping, now); - assertThat(result.size(), equalTo(3)); + assertThat(result.size(), equalTo(4)); + assertThat(IndexSettings.MODE.get(result), equalTo(IndexMode.TIME_SERIES)); assertThat(IndexSettings.TIME_SERIES_START_TIME.get(result), equalTo(now.minusMillis(DEFAULT_LOOK_BACK_TIME.getMillis()))); assertThat(IndexSettings.TIME_SERIES_END_TIME.get(result), equalTo(now.plusMillis(DEFAULT_LOOK_AHEAD_TIME.getMillis()))); assertThat( @@ -569,7 +600,8 @@ public void testGenerateRoutingPathFromDynamicTemplate_templateWithNoPathMatch() } """; Settings result = generateTsdbSettings(mapping, now); - assertThat(result.size(), equalTo(3)); + assertThat(result.size(), equalTo(4)); + assertThat(IndexSettings.MODE.get(result), equalTo(IndexMode.TIME_SERIES)); assertThat(IndexSettings.TIME_SERIES_START_TIME.get(result), equalTo(now.minusMillis(DEFAULT_LOOK_BACK_TIME.getMillis()))); assertThat(IndexSettings.TIME_SERIES_END_TIME.get(result), equalTo(now.plusMillis(DEFAULT_LOOK_AHEAD_TIME.getMillis()))); assertThat(IndexMetadata.INDEX_ROUTING_PATH.get(result), containsInAnyOrder("host.id", "prometheus.labels.*")); @@ -646,7 +678,8 @@ public void testGenerateRoutingPathFromPassThroughObject() throws Exception { } """; Settings result = generateTsdbSettings(mapping, now); - assertThat(result.size(), equalTo(3)); + assertThat(result.size(), equalTo(4)); + assertThat(IndexSettings.MODE.get(result), equalTo(IndexMode.TIME_SERIES)); assertThat(IndexSettings.TIME_SERIES_START_TIME.get(result), equalTo(now.minusMillis(DEFAULT_LOOK_BACK_TIME.getMillis()))); assertThat(IndexSettings.TIME_SERIES_END_TIME.get(result), equalTo(now.plusMillis(DEFAULT_LOOK_AHEAD_TIME.getMillis()))); assertThat(IndexMetadata.INDEX_ROUTING_PATH.get(result), containsInAnyOrder("labels.*")); @@ -657,7 +690,7 @@ private Settings generateTsdbSettings(String mapping, Instant now) throws IOExce String dataStreamName = "logs-app1"; Settings settings = Settings.EMPTY; - return provider.getAdditionalIndexSettings( + var result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 1), dataStreamName, true, @@ -666,6 +699,9 @@ private Settings generateTsdbSettings(String mapping, Instant now) throws IOExce settings, List.of(new CompressedXContent(mapping)) ); + // The index.time_series.end_time setting requires index.mode to be set to time_series adding it here so that we read this setting: + // (in production the index.mode setting is usually provided in an index or component template) + return builder().put(result).put("index.mode", "time_series").build(); } } diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataDataStreamRolloverServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataDataStreamRolloverServiceTests.java index 86f6dea220e84..8156345b83b4c 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataDataStreamRolloverServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataDataStreamRolloverServiceTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; @@ -398,11 +397,9 @@ public void testRolloverClusterStateWithBrokenOlderTsdbDataStream() throws Excep for (int i = 0; i < numberOfBackingIndices; i++) { var im = rolloverMetadata.index(rolloverMetadata.dataStreams().get(dataStreamName).getIndices().get(i)); - var startTime1 = IndexSettings.TIME_SERIES_START_TIME.get(im.getSettings()); - var endTime1 = IndexSettings.TIME_SERIES_END_TIME.get(im.getSettings()); - assertThat(startTime1.toEpochMilli(), equalTo(DateUtils.MAX_MILLIS_BEFORE_MINUS_9999)); - assertThat(endTime1.toEpochMilli(), equalTo(DateUtils.MAX_MILLIS_BEFORE_9999)); - assertThat(im.getIndexMode(), equalTo(null)); + assertThat(im.getTimeSeriesStart(), nullValue()); + assertThat(im.getTimeSeriesEnd(), nullValue()); + assertThat(im.getIndexMode(), nullValue()); } { var im = rolloverMetadata.index( diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/10_settings.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/10_settings.yml index 485b5b1796ec4..46476fd071b30 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/10_settings.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/10_settings.yml @@ -336,6 +336,67 @@ set start_time and end_time without timeseries mode: time_series: end_time: 1632625782000 +--- +set start_time, end_time and routing_path via put settings api without time_series mode: + - requires: + cluster_features: [ "gte_v8.15.0" ] + reason: bug fixed in 8.15.0 + + - do: + indices.create: + index: test-index + - match: { acknowledged: true } + + - do: + catch: /\[index.time_series.end_time\] requires \[index.mode=time_series\]/ + indices.put_settings: + index: test-index + body: + index.time_series.end_time: 1632625782000 + + - do: + catch: /Can't update non dynamic settings \[\[index.time_series.start_time\]\] for open indices/ + indices.put_settings: + index: test-index + body: + index.time_series.start_time: 1632625782000 + + - do: + catch: /Can't update non dynamic settings \[\[index.routing_path\]\] for open indices/ + indices.put_settings: + index: test-index + body: + settings: + index: + routing_path: foo + + - do: + indices.close: + index: test-index + + - do: + catch: /\[index.time_series.end_time\] requires \[index.mode=time_series\]/ + indices.put_settings: + index: test-index + body: + index.time_series.end_time: 1632625782000 + + - do: + catch: /final test-index setting \[index.time_series.start_time\], not updateable/ + indices.put_settings: + index: test-index + body: + index.time_series.start_time: 1632625782000 + + - do: + catch: /final test-index setting \[index.routing_path\], not updateable/ + indices.put_settings: + index: test-index + body: + settings: + index: + routing_path: foo + --- set bad start_time and end_time: - requires: diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java index e9da9629cb6a3..e9658e71f895e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java @@ -714,7 +714,9 @@ private void validateIndexTemplateV2(String name, ComposableIndexTemplate indexT ) ); } - // Then apply settings resolved from templates: + // Then apply setting from component templates: + finalSettings.put(combinedSettings); + // Then finally apply settings resolved from index template: finalSettings.put(finalTemplate.map(Template::settings).orElse(Settings.EMPTY)); var templateToValidate = indexTemplate.toBuilder() diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettings.java b/server/src/main/java/org/elasticsearch/index/IndexSettings.java index 5d864e4fa1e24..1e718fba0d08d 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -618,16 +618,25 @@ public void validate(Instant value) {} @Override public void validate(Instant value, Map, Object> settings) { - @SuppressWarnings("unchecked") Instant startTime = (Instant) settings.get(TIME_SERIES_START_TIME); if (startTime.toEpochMilli() > value.toEpochMilli()) { throw new IllegalArgumentException("index.time_series.end_time must be larger than index.time_series.start_time"); } + + // The index.time_series.end_time setting can only be specified if the index.mode setting has been set to time_series + // This check here is specifically needed because in case of updating index settings the validation the gets executed + // in IndexSettings constructor when reading the index.mode setting doesn't get executed. + IndexMode indexMode = (IndexMode) settings.get(MODE); + if (indexMode != IndexMode.TIME_SERIES) { + throw new IllegalArgumentException( + "[" + TIME_SERIES_END_TIME.getKey() + "] requires [index.mode=" + IndexMode.TIME_SERIES + "]" + ); + } } @Override public Iterator> settings() { - List> settings = List.of(TIME_SERIES_START_TIME); + List> settings = List.of(TIME_SERIES_START_TIME, MODE); return settings.iterator(); } }, From 2441340ebb0d78af6c6b4a624d0e3c8cdd09abd1 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 2 Jul 2024 13:05:30 +0200 Subject: [PATCH 105/216] Run terms concurrently when cardinality is only lower than shard size (#110369) To make sure the results are the same between concurrent and non-concurrent runs, we need to exclude situations where shard size == cardinality. --- docs/changelog/110369.yaml | 6 ++++++ .../bucket/terms/TermsAggregationBuilder.java | 3 ++- .../search/aggregations/bucket/TermsTests.java | 4 ++-- .../multiterms/MultiTermsAggregationBuilderTests.java | 8 ++++---- 4 files changed, 14 insertions(+), 7 deletions(-) create mode 100644 docs/changelog/110369.yaml diff --git a/docs/changelog/110369.yaml b/docs/changelog/110369.yaml new file mode 100644 index 0000000000000..770294605b444 --- /dev/null +++ b/docs/changelog/110369.yaml @@ -0,0 +1,6 @@ +pr: 110369 +summary: Run terms concurrently when cardinality is only lower than shard size +area: Aggregations +type: bug +issues: + - 105505 diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregationBuilder.java index 7190589de38c4..bf923339c73f5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregationBuilder.java @@ -172,7 +172,8 @@ public static boolean supportsParallelCollection(long cardinality, BucketOrder o return cardinality <= KEY_ORDER_CONCURRENCY_THRESHOLD; } BucketCountThresholds adjusted = TermsAggregatorFactory.adjustBucketCountThresholds(bucketCountThresholds, order); - return cardinality <= adjusted.getShardSize(); + // for cardinality equal to shard size, we don't know if there were more terms when merging. + return cardinality < adjusted.getShardSize(); } return false; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsTests.java index d431a3a156957..7243db95bf826 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsTests.java @@ -211,8 +211,8 @@ public boolean supportsParallelCollection(ToLongFunction fieldCardinalit { TermsAggregationBuilder terms = new TermsAggregationBuilder("terms"); terms.shardSize(10); - assertTrue(terms.supportsParallelCollection(field -> randomIntBetween(1, 10))); - assertFalse(terms.supportsParallelCollection(field -> randomIntBetween(11, 100))); + assertTrue(terms.supportsParallelCollection(field -> randomIntBetween(1, 9))); + assertFalse(terms.supportsParallelCollection(field -> randomIntBetween(10, 100))); } { TermsAggregationBuilder terms = new TermsAggregationBuilder("terms"); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationBuilderTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationBuilderTests.java index ff345b1dac59c..7eea5b0f741c2 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationBuilderTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationBuilderTests.java @@ -161,10 +161,10 @@ public void testSupportsParallelCollection() { List fields = new ArrayList<>(); assertTrue(builder.supportsParallelCollection(field -> { fields.add(field); - return randomIntBetween(0, 10); + return randomIntBetween(0, 9); })); assertEquals(List.of("field1", "field2"), fields); - assertFalse(builder.supportsParallelCollection(field -> randomIntBetween(11, 100))); + assertFalse(builder.supportsParallelCollection(field -> randomIntBetween(10, 100))); terms.terms( List.of( sourceBuilder1.build(), @@ -183,14 +183,14 @@ public void testSupportsParallelCollection() { List.of(sourceBuilder1.build(), sourceBuilder2.build()) ); terms.shardSize(10); - assertTrue(terms.supportsParallelCollection(field -> randomIntBetween(0, 10))); + assertTrue(terms.supportsParallelCollection(field -> randomIntBetween(0, 9))); terms.subAggregation(new TermsAggregationBuilder("name") { @Override public boolean supportsParallelCollection(ToLongFunction fieldCardinalityResolver) { return false; } }); - assertFalse(terms.supportsParallelCollection(field -> randomIntBetween(0, 10))); + assertFalse(terms.supportsParallelCollection(field -> randomIntBetween(0, 9))); } { MultiValuesSourceFieldConfig.Builder sourceBuilder1 = new MultiValuesSourceFieldConfig.Builder(); From d288dbf94ee8b327ead57861e012186104d692ae Mon Sep 17 00:00:00 2001 From: Mike Pellegrini Date: Tue, 2 Jul 2024 08:07:35 -0400 Subject: [PATCH 106/216] Fix Semantic Query Parameter Formatting (#110355) --- docs/reference/query-dsl/semantic-query.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/query-dsl/semantic-query.asciidoc b/docs/reference/query-dsl/semantic-query.asciidoc index 23bcb4a52ef38..d0eb2da95ebc6 100644 --- a/docs/reference/query-dsl/semantic-query.asciidoc +++ b/docs/reference/query-dsl/semantic-query.asciidoc @@ -32,11 +32,11 @@ GET my-index-000001/_search [[semantic-query-params]] ==== Top-level parameters for `semantic` -field:: +`field`:: (Required, string) The `semantic_text` field to perform the query on. -query:: +`query`:: (Required, string) The query text to be searched for on the field. From 390439ad9f1630894feb774e65821f200dfc1a1f Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Tue, 2 Jul 2024 14:47:14 +0200 Subject: [PATCH 107/216] [Inference API] Add Google Vertex AI text embeddings docs (#110317) --- .../inference/delete-inference.asciidoc | 2 +- .../inference/get-inference.asciidoc | 2 +- .../inference/inference-apis.asciidoc | 1 + .../inference/post-inference.asciidoc | 2 +- .../inference/put-inference.asciidoc | 5 +- .../service-google-vertex-ai.asciidoc | 111 ++++++++++++++++++ 6 files changed, 118 insertions(+), 5 deletions(-) create mode 100644 docs/reference/inference/service-google-vertex-ai.asciidoc diff --git a/docs/reference/inference/delete-inference.asciidoc b/docs/reference/inference/delete-inference.asciidoc index c4fcb3b7f8379..2f9d9511e6326 100644 --- a/docs/reference/inference/delete-inference.asciidoc +++ b/docs/reference/inference/delete-inference.asciidoc @@ -7,7 +7,7 @@ experimental[] Deletes an {infer} endpoint. IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in -{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio or +{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI or Hugging Face. For built-in models and models uploaded though Eland, the {infer} APIs offer an alternative way to use and manage trained models. However, if you do not plan to use the {infer} APIs to use these models or if you want to use diff --git a/docs/reference/inference/get-inference.asciidoc b/docs/reference/inference/get-inference.asciidoc index 0b45ad92322f6..7f4dc1c496837 100644 --- a/docs/reference/inference/get-inference.asciidoc +++ b/docs/reference/inference/get-inference.asciidoc @@ -7,7 +7,7 @@ experimental[] Retrieves {infer} endpoint information. IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in -{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio or +{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI or Hugging Face. For built-in models and models uploaded though Eland, the {infer} APIs offer an alternative way to use and manage trained models. However, if you do not plan to use the {infer} APIs to use these models or if you want to use diff --git a/docs/reference/inference/inference-apis.asciidoc b/docs/reference/inference/inference-apis.asciidoc index f9c41bc0cde81..896cb02a9e699 100644 --- a/docs/reference/inference/inference-apis.asciidoc +++ b/docs/reference/inference/inference-apis.asciidoc @@ -31,6 +31,7 @@ include::service-cohere.asciidoc[] include::service-elasticsearch.asciidoc[] include::service-elser.asciidoc[] include::service-google-ai-studio.asciidoc[] +include::service-google-vertex-ai.asciidoc[] include::service-hugging-face.asciidoc[] include::service-mistral.asciidoc[] include::service-openai.asciidoc[] diff --git a/docs/reference/inference/post-inference.asciidoc b/docs/reference/inference/post-inference.asciidoc index 1414e45c07616..3ad23ac3300cc 100644 --- a/docs/reference/inference/post-inference.asciidoc +++ b/docs/reference/inference/post-inference.asciidoc @@ -7,7 +7,7 @@ experimental[] Performs an inference task on an input text by using an {infer} endpoint. IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in -{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio or +{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Azure, Google AI Studio, Google Vertex AI or Hugging Face. For built-in models and models uploaded though Eland, the {infer} APIs offer an alternative way to use and manage trained models. However, if you do not plan to use the {infer} APIs to use these models or if you want to use diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 626721ed146e3..101c0a24b66b7 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -6,7 +6,8 @@ experimental[] Creates an {infer} endpoint to perform an {infer} task. -IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in {ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Mistral, Azure OpenAI, Google AI Studio or Hugging Face. +IMPORTANT: The {infer} APIs enable you to use certain services, such as built-in +{ml} models (ELSER, E5), models uploaded through Eland, Cohere, OpenAI, Mistral, Azure OpenAI, Google AI Studio, Google Vertex AI or Hugging Face. For built-in models and models uploaded though Eland, the {infer} APIs offer an alternative way to use and manage trained models. However, if you do not plan to use the {infer} APIs to use these models or if you want to use non-NLP models, use the <>. @@ -29,7 +30,6 @@ However, if you do not plan to use the {infer} APIs to use these models or if yo [[put-inference-api-desc]] ==== {api-description-title} - The create {infer} API enables you to create an {infer} endpoint and configure a {ml} model to perform a specific {infer} task. The following services are available through the {infer} API, click the links to review the configuration details of the services: @@ -40,6 +40,7 @@ The following services are available through the {infer} API, click the links to * <> (for built-in models and models uploaded through Eland) * <> * <> +* <> * <> * <> * <> diff --git a/docs/reference/inference/service-google-vertex-ai.asciidoc b/docs/reference/inference/service-google-vertex-ai.asciidoc new file mode 100644 index 0000000000000..1e7e2b185a296 --- /dev/null +++ b/docs/reference/inference/service-google-vertex-ai.asciidoc @@ -0,0 +1,111 @@ +[[infer-service-google-vertex-ai]] +=== Google Vertex AI {infer} service + +Creates an {infer} endpoint to perform an {infer} task with the `googlevertexai` service. + + +[discrete] +[[infer-service-google-vertex-ai-api-request]] +==== {api-request-title} + +`PUT /_inference//` + +[discrete] +[[infer-service-google-vertex-ai-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +include::inference-shared.asciidoc[tag=inference-id] + +``:: +(Required, string) +include::inference-shared.asciidoc[tag=task-type] ++ +-- +Available task types: + +* `text_embedding`. +-- + +[discrete] +[[infer-service-google-vertex-ai-api-request-body]] +==== {api-request-body-title} + +`service`:: +(Required, string) +The type of service supported for the specified task type. In this case, +`googlevertexai`. + +`service_settings`:: +(Required, object) +include::inference-shared.asciidoc[tag=service-settings] ++ +-- +These settings are specific to the `googlevertexai` service. +-- + +`service_account_json`::: +(Required, string) +A valid service account in json format for the Google Vertex AI API. + +`model_id`::: +(Required, string) +The name of the model to use for the {infer} task. +You can find the supported models at https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/text-embeddings-api[Text embeddings API]. + +`location`::: +(Required, string) +The name of the location to use for the {infer} task. +You find the supported locations at https://cloud.google.com/vertex-ai/generative-ai/docs/learn/locations[Generative AI on Vertex AI locations]. + +`project_id`::: +(Required, string) +The name of the project to use for the {infer} task. + +`rate_limit`::: +(Optional, object) +By default, the `googlevertexai` service sets the number of requests allowed per minute to `30.000`. +This helps to minimize the number of rate limit errors returned from Google Vertex AI. +To modify this, set the `requests_per_minute` setting of this object in your service settings: ++ +-- +include::inference-shared.asciidoc[tag=request-per-minute-example] + +More information about the rate limits for Google Vertex AI can be found in the https://cloud.google.com/vertex-ai/docs/quotas[Google Vertex AI Quotas docs]. +-- + +`task_settings`:: +(Optional, object) +include::inference-shared.asciidoc[tag=task-settings] ++ +.`task_settings` for the `text_embedding` task type +[%collapsible%closed] +===== +`auto_truncate`::: +(optional, boolean) +For `googlevertexai` service only. +Specifies if the API truncates inputs longer than the maximum token length automatically. +===== + +[discrete] +[[inference-example-google-vertex-ai]] +==== Google Vertex AI service example + +The following example shows how to create an {infer} endpoint called +`google_vertex_ai_embeddings` to perform a `text_embedding` task type. + +[source,console] +------------------------------------------------------------ +PUT _inference/text_embedding/google_vertex_ai_embeddings +{ + "service": "googlevertexai", + "service_settings": { + "service_account_json": "", + "model_id": "", + "location": "", + "project_id": "" + } +} +------------------------------------------------------------ +// TEST[skip:TBD] From e7c3e353f61aef51e34a888da387f313b731700d Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Tue, 2 Jul 2024 14:47:47 +0200 Subject: [PATCH 108/216] [Inference API] Use ObjectParser instead of manual parsing in GoogleVertexAiRerankResponseEntity (#110363) --- .../GoogleVertexAiRerankResponseEntity.java | 74 +++++++++++++++---- 1 file changed, 58 insertions(+), 16 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntity.java index cd37628e8e17e..24946ee5875a5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntity.java @@ -8,7 +8,9 @@ package org.elasticsearch.xpack.inference.external.response.googlevertexai; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -21,10 +23,9 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.common.xcontent.XContentParserUtils.parseList; -import static org.elasticsearch.xpack.inference.external.response.XContentUtils.consumeUntilObjectEnd; +import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField; -import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterFieldCurrentFlatObj; public class GoogleVertexAiRerankResponseEntity { @@ -90,27 +91,68 @@ public static RankedDocsResults fromResponse(HttpResult response) throws IOExcep positionParserAtTokenAfterField(jsonParser, "records", FAILED_TO_FIND_FIELD_TEMPLATE); - List rankedDocs = parseList(jsonParser, GoogleVertexAiRerankResponseEntity::parseRankedDoc); + var rankedDocs = doParse(jsonParser); return new RankedDocsResults(rankedDocs); } } - private static RankedDocsResults.RankedDoc parseRankedDoc(XContentParser parser, Integer index) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + private static List doParse(XContentParser parser) throws IOException { + return parseList(parser, (listParser, index) -> { + var parsedRankedDoc = RankedDoc.parse(parser); - positionParserAtTokenAfterFieldCurrentFlatObj(parser, "content", FAILED_TO_FIND_FIELD_TEMPLATE); - XContentParser.Token token = parser.currentToken(); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_STRING, token, parser); - String content = parser.text(); + if (parsedRankedDoc.content == null) { + throw new IllegalStateException(format(FAILED_TO_FIND_FIELD_TEMPLATE, RankedDoc.CONTENT.getPreferredName())); + } - positionParserAtTokenAfterFieldCurrentFlatObj(parser, "score", FAILED_TO_FIND_FIELD_TEMPLATE); - token = parser.currentToken(); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser); - float score = parser.floatValue(); + if (parsedRankedDoc.score == null) { + throw new IllegalStateException(format(FAILED_TO_FIND_FIELD_TEMPLATE, RankedDoc.SCORE.getPreferredName())); + } - consumeUntilObjectEnd(parser); + return new RankedDocsResults.RankedDoc(index, parsedRankedDoc.score, parsedRankedDoc.content); + }); + } + + private record RankedDoc(@Nullable Float score, @Nullable String content) { + + private static final ParseField CONTENT = new ParseField("content"); + private static final ParseField SCORE = new ParseField("score"); + private static final ObjectParser PARSER = new ObjectParser<>( + "google_vertex_ai_rerank_response", + true, + Builder::new + ); + + static { + PARSER.declareString(Builder::setContent, CONTENT); + PARSER.declareFloat(Builder::setScore, SCORE); + } + + public static RankedDoc parse(XContentParser parser) { + Builder builder = PARSER.apply(parser, null); + return builder.build(); + } - return new RankedDocsResults.RankedDoc(index, score, content); + private static final class Builder { + + private String content; + private Float score; + + private Builder() {} + + public Builder setScore(Float score) { + this.score = score; + return this; + } + + public Builder setContent(String content) { + this.content = content; + return this; + } + + public RankedDoc build() { + return new RankedDoc(score, content); + } + } } } From 95da99ee6d0a34cdb6e93f2257afe3c44aa62f52 Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Tue, 2 Jul 2024 08:50:45 -0400 Subject: [PATCH 109/216] [ESQL] Use DataType instead of Strings in ColumnInfo (#110288) The ColumnInfo class, which is used for request and response serialization, stores a data type. Prior to this PR, it stored that type as a String, and there were several places (seen below) where we needed to switch behavior based on that string. Switching on strings is brittle, as there's no way for the compiler to enforce that all cases are covered, so we have to rely on a default -> throw pattern, and hope that a test catches that path. On the other hand, we can instead switch on the actual DataType enum value, which the compiler can then enforce checking all values. Eventually, it would make sense for most or all of these switches to become functions on DataType, but the visibility between esql.core and esql right now makes that difficult. --- .../xpack/core/esql/action/ColumnInfo.java | 45 +--- .../test/esql/qa/action/CoreEsqlActionIT.java | 19 +- .../xpack/esql/CsvTestUtils.java | 3 +- .../esql/action/AsyncEsqlQueryActionIT.java | 9 +- .../xpack/esql/action/EnrichIT.java | 20 +- .../xpack/esql/action/EsqlActionIT.java | 196 ++++++++++-------- .../xpack/esql/action/TimeSeriesIT.java | 78 +++---- .../xpack/esql/action/ColumnInfoImpl.java | 105 ++++++++++ .../xpack/esql/action/EsqlQueryResponse.java | 16 +- .../xpack/esql/action/PositionToXContent.java | 32 +-- .../xpack/esql/action/ResponseValueUtils.java | 77 ++++--- .../esql/action/ResponseXContentUtils.java | 10 +- .../esql/plugin/TransportEsqlQueryAction.java | 6 +- .../elasticsearch/xpack/esql/CsvTests.java | 5 +- .../esql/action/EsqlQueryResponseTests.java | 82 ++++---- .../xpack/esql/formatter/TextFormatTests.java | 16 +- .../esql/formatter/TextFormatterTests.java | 26 +-- 17 files changed, 426 insertions(+), 319 deletions(-) create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ColumnInfoImpl.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/ColumnInfo.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/ColumnInfo.java index b3248077397c2..0c86b8ae4b757 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/ColumnInfo.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/ColumnInfo.java @@ -7,52 +7,23 @@ package org.elasticsearch.xpack.core.esql.action; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.xcontent.InstantiatingObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public record ColumnInfo(String name, String type) implements Writeable { - - private static final InstantiatingObjectParser PARSER; - static { - InstantiatingObjectParser.Builder parser = InstantiatingObjectParser.builder( - "esql/column_info", - true, - ColumnInfo.class - ); - parser.declareString(constructorArg(), new ParseField("name")); - parser.declareString(constructorArg(), new ParseField("type")); - PARSER = parser.build(); +public interface ColumnInfo extends Writeable { + /* + static ColumnInfo fromXContent(XContentParser parser) { + return ColumnInfoImpl.PARSER.apply(parser, null); } - public static ColumnInfo fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } + */ - public ColumnInfo(StreamInput in) throws IOException { - this(in.readString(), in.readString()); - } + XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException; - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(name); - out.writeString(type); - } + String name(); - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field("name", name); - builder.field("type", type); - builder.endObject(); - return builder; - } + String outputType(); } diff --git a/x-pack/plugin/esql/qa/action/src/internalClusterTest/java/org/elasticsearch/test/esql/qa/action/CoreEsqlActionIT.java b/x-pack/plugin/esql/qa/action/src/internalClusterTest/java/org/elasticsearch/test/esql/qa/action/CoreEsqlActionIT.java index 8728b605134ac..46fff385b5398 100644 --- a/x-pack/plugin/esql/qa/action/src/internalClusterTest/java/org/elasticsearch/test/esql/qa/action/CoreEsqlActionIT.java +++ b/x-pack/plugin/esql/qa/action/src/internalClusterTest/java/org/elasticsearch/test/esql/qa/action/CoreEsqlActionIT.java @@ -18,6 +18,9 @@ import org.elasticsearch.xpack.core.esql.action.EsqlQueryRequest; import org.elasticsearch.xpack.core.esql.action.EsqlQueryRequestBuilder; import org.elasticsearch.xpack.core.esql.action.EsqlQueryResponse; +import org.elasticsearch.xpack.core.esql.action.EsqlResponse; +import org.elasticsearch.xpack.esql.action.ColumnInfoImpl; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.junit.Before; import java.util.ArrayList; @@ -52,11 +55,14 @@ protected Collection> nodePlugins() { public void testRowTypesAndValues() { var query = "row a = 1, b = \"x\", c = 1000000000000, d = 1.1"; var request = EsqlQueryRequestBuilder.newRequestBuilder(client()).query(query); - try (var queryResp = run(request)) { + try (EsqlQueryResponse queryResp = run(request)) { logger.info("response=" + queryResp); - var resp = queryResp.response(); + EsqlResponse resp = queryResp.response(); assertThat(resp.columns().stream().map(ColumnInfo::name).toList(), contains("a", "b", "c", "d")); - assertThat(resp.columns().stream().map(ColumnInfo::type).toList(), contains("integer", "keyword", "long", "double")); + assertThat( + resp.columns().stream().map(c -> ((ColumnInfoImpl) c).type()).toList(), + contains(DataType.INTEGER, DataType.KEYWORD, DataType.LONG, DataType.DOUBLE) + ); assertThat(getValuesList(resp.rows()), contains(List.of(1, "x", 1000000000000L, 1.1d))); } } @@ -68,7 +74,7 @@ public void testRowStatsProjectGroupByInt() { logger.info("response=" + queryResp); var resp = queryResp.response(); assertThat(resp.columns().stream().map(ColumnInfo::name).toList(), contains("a")); - assertThat(resp.columns().stream().map(ColumnInfo::type).toList(), contains("integer")); + assertThat(resp.columns().stream().map(c -> ((ColumnInfoImpl) c).type()).toList(), contains(DataType.INTEGER)); assertThat(getValuesList(resp.rows()), contains(List.of(1))); } } @@ -80,7 +86,10 @@ public void testFrom() { var resp = queryResp.response(); logger.info("response=" + queryResp); assertThat(resp.columns().stream().map(ColumnInfo::name).toList(), contains("item", "cost", "color", "sale")); - assertThat(resp.columns().stream().map(ColumnInfo::type).toList(), contains("long", "double", "keyword", "date")); + assertThat( + resp.columns().stream().map(c -> ((ColumnInfoImpl) c).type()).toList(), + contains(DataType.LONG, DataType.DOUBLE, DataType.KEYWORD, DataType.DATETIME) + ); // columnar values assertThat(columnValues(resp.column(0)), contains(1L, 2L, 3L, 4L)); assertThat(columnValues(resp.column(1)), contains(1.1d, 2.1d, 3.1d, 4.1d)); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java index ad7c3fba1683e..d88d7f9b9448f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java @@ -28,6 +28,7 @@ import org.elasticsearch.logging.Logger; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xpack.esql.action.ResponseValueUtils; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.StringUtils; import org.supercsv.io.CsvListReader; import org.supercsv.prefs.CsvPreference; @@ -537,7 +538,7 @@ public Comparator comparator() { record ActualResults( List columnNames, List columnTypes, - List dataTypes, + List dataTypes, List pages, Map> responseHeaders ) { diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AsyncEsqlQueryActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AsyncEsqlQueryActionIT.java index 54c5e8511426a..da9aa96876fd7 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AsyncEsqlQueryActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AsyncEsqlQueryActionIT.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.core.async.DeleteAsyncResultRequest; import org.elasticsearch.xpack.core.async.GetAsyncResultRequest; import org.elasticsearch.xpack.core.async.TransportDeleteAsyncResultAction; -import org.elasticsearch.xpack.core.esql.action.ColumnInfo; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import org.hamcrest.core.IsEqual; @@ -90,7 +89,7 @@ public void testBasicAsyncExecution() throws Exception { try (var finalResponse = future.get()) { assertThat(finalResponse, notNullValue()); assertThat(finalResponse.isRunning(), is(false)); - assertThat(finalResponse.columns(), equalTo(List.of(new ColumnInfo("sum(pause_me)", "long")))); + assertThat(finalResponse.columns(), equalTo(List.of(new ColumnInfoImpl("sum(pause_me)", "long")))); assertThat(getValuesList(finalResponse).size(), equalTo(1)); } @@ -99,7 +98,7 @@ public void testBasicAsyncExecution() throws Exception { try (var finalResponse = again.get()) { assertThat(finalResponse, notNullValue()); assertThat(finalResponse.isRunning(), is(false)); - assertThat(finalResponse.columns(), equalTo(List.of(new ColumnInfo("sum(pause_me)", "long")))); + assertThat(finalResponse.columns(), equalTo(List.of(new ColumnInfoImpl("sum(pause_me)", "long")))); assertThat(getValuesList(finalResponse).size(), equalTo(1)); } @@ -174,7 +173,7 @@ private void testFinishingBeforeTimeout(boolean keepOnCompletion) { try (var response = request.execute().actionGet(60, TimeUnit.SECONDS)) { assertThat(response.isRunning(), is(false)); - assertThat(response.columns(), equalTo(List.of(new ColumnInfo("sum(pause_me)", "long")))); + assertThat(response.columns(), equalTo(List.of(new ColumnInfoImpl("sum(pause_me)", "long")))); assertThat(getValuesList(response).size(), equalTo(1)); if (keepOnCompletion) { @@ -187,7 +186,7 @@ private void testFinishingBeforeTimeout(boolean keepOnCompletion) { try (var resp = future.actionGet(60, TimeUnit.SECONDS)) { assertThat(resp.asyncExecutionId().get(), equalTo(id)); assertThat(resp.isRunning(), is(false)); - assertThat(resp.columns(), equalTo(List.of(new ColumnInfo("sum(pause_me)", "long")))); + assertThat(resp.columns(), equalTo(List.of(new ColumnInfoImpl("sum(pause_me)", "long")))); assertThat(getValuesList(resp).size(), equalTo(1)); } } else { diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java index 5806cb8ef0982..5be816712cf20 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java @@ -40,9 +40,9 @@ import org.elasticsearch.xpack.core.enrich.action.DeleteEnrichPolicyAction; import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; -import org.elasticsearch.xpack.core.esql.action.ColumnInfo; import org.elasticsearch.xpack.enrich.EnrichPlugin; import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.junit.After; @@ -226,12 +226,12 @@ private static String enrichSongCommand() { public void testSumDurationByArtist() { Function> extractStats = resp -> { - List columns = resp.columns(); + List columns = resp.columns(); assertThat(columns, hasSize(2)); assertThat(columns.get(0).name(), equalTo("sum(duration)")); - assertThat(columns.get(0).type(), equalTo("double")); + assertThat(columns.get(0).type(), equalTo(DataType.DOUBLE)); assertThat(columns.get(1).name(), equalTo("artist")); - assertThat(columns.get(1).type(), equalTo("keyword")); + assertThat(columns.get(1).type(), equalTo(DataType.KEYWORD)); Iterator> rows = resp.values(); Map actualValues = new HashMap<>(); while (rows.hasNext()) { @@ -256,12 +256,12 @@ public void testSumDurationByArtist() { public void testAvgDurationByArtist() { Function> extractStats = resp -> { - List columns = resp.columns(); + List columns = resp.columns(); assertThat(columns, hasSize(2)); assertThat(columns.get(0).name(), equalTo("avg(duration)")); - assertThat(columns.get(0).type(), equalTo("double")); + assertThat(columns.get(0).type(), equalTo(DataType.DOUBLE)); assertThat(columns.get(1).name(), equalTo("artist")); - assertThat(columns.get(1).type(), equalTo("keyword")); + assertThat(columns.get(1).type(), equalTo(DataType.KEYWORD)); Iterator> rows = resp.values(); Map actualValues = new HashMap<>(); while (rows.hasNext()) { @@ -282,12 +282,12 @@ public void testAvgDurationByArtist() { public void testListeningRatio() { Function> extractStats = resp -> { - List columns = resp.columns(); + List columns = resp.columns(); assertThat(columns, hasSize(2)); assertThat(columns.get(0).name(), equalTo("ratio")); - assertThat(columns.get(0).type(), equalTo("double")); + assertThat(columns.get(0).type(), equalTo(DataType.DOUBLE)); assertThat(columns.get(1).name(), equalTo("artist")); - assertThat(columns.get(1).type(), equalTo("keyword")); + assertThat(columns.get(1).type(), equalTo(DataType.KEYWORD)); Iterator> rows = resp.values(); Map actualValues = new HashMap<>(); while (rows.hasNext()) { diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java index 686fb831aa042..0ec2f0da2d2a6 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java @@ -33,6 +33,7 @@ import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.esql.action.ColumnInfo; import org.elasticsearch.xpack.esql.VerificationException; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.parser.ParsingException; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; @@ -100,7 +101,7 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { public void testProjectConstant() { try (EsqlQueryResponse results = run("from test | eval x = 1 | keep x")) { - assertThat(results.columns(), equalTo(List.of(new ColumnInfo("x", "integer")))); + assertThat(results.columns(), equalTo(List.of(new ColumnInfoImpl("x", "integer")))); assertThat(getValuesList(results).size(), equalTo(40)); assertThat(getValuesList(results).get(0).get(0), equalTo(1)); } @@ -108,7 +109,7 @@ public void testProjectConstant() { public void testStatsOverConstant() { try (EsqlQueryResponse results = run("from test | eval x = 1 | stats x = count(x)")) { - assertThat(results.columns(), equalTo(List.of(new ColumnInfo("x", "long")))); + assertThat(results.columns(), equalTo(List.of(new ColumnInfoImpl("x", "long")))); assertThat(getValuesList(results).size(), equalTo(1)); assertThat(getValuesList(results).get(0).get(0), equalTo(40L)); } @@ -139,12 +140,12 @@ private void testFromStatsGroupingAvgImpl(String command, String expectedGroupNa assertEquals(2, results.columns().size()); // assert column metadata - ColumnInfo valuesColumn = results.columns().get(0); + ColumnInfoImpl valuesColumn = results.columns().get(0); assertEquals(expectedFieldName, valuesColumn.name()); - assertEquals("double", valuesColumn.type()); - ColumnInfo groupColumn = results.columns().get(1); + assertEquals(DataType.DOUBLE, valuesColumn.type()); + ColumnInfoImpl groupColumn = results.columns().get(1); assertEquals(expectedGroupName, groupColumn.name()); - assertEquals("long", groupColumn.type()); + assertEquals(DataType.LONG, groupColumn.type()); // assert column values List> valueValues = getValuesList(results); @@ -178,12 +179,12 @@ private void testFromStatsGroupingCountImpl(String command, String expectedField assertEquals(2, results.columns().size()); // assert column metadata - ColumnInfo groupColumn = results.columns().get(0); + ColumnInfoImpl groupColumn = results.columns().get(0); assertEquals(expectedGroupName, groupColumn.name()); - assertEquals("long", groupColumn.type()); - ColumnInfo valuesColumn = results.columns().get(1); + assertEquals(DataType.LONG, groupColumn.type()); + ColumnInfoImpl valuesColumn = results.columns().get(1); assertEquals(expectedFieldName, valuesColumn.name()); - assertEquals("long", valuesColumn.type()); + assertEquals(DataType.LONG, valuesColumn.type()); // assert column values List> valueValues = getValuesList(results); @@ -212,9 +213,9 @@ public void testFromStatsGroupingByDate() { // assert column metadata assertEquals("avg(count)", results.columns().get(0).name()); - assertEquals("double", results.columns().get(0).type()); + assertEquals(DataType.DOUBLE, results.columns().get(0).type()); assertEquals("time", results.columns().get(1).name()); - assertEquals("long", results.columns().get(1).type()); + assertEquals(DataType.LONG, results.columns().get(1).type()); // assert column values List expectedValues = LongStream.range(0, 40).map(i -> epoch + i).sorted().boxed().toList(); @@ -244,9 +245,9 @@ public void testFromGroupingByNumericFieldWithNulls() { assertThat(results.columns(), hasSize(2)); assertEquals("avg(count)", results.columns().get(0).name()); - assertEquals("double", results.columns().get(0).type()); + assertEquals(DataType.DOUBLE, results.columns().get(0).type()); assertEquals("data", results.columns().get(1).name()); - assertEquals("long", results.columns().get(1).type()); + assertEquals(DataType.LONG, results.columns().get(1).type()); record Group(Long data, Double avg) {} List expectedGroups = List.of(new Group(1L, 42.0), new Group(2L, 44.0), new Group(99L, null), new Group(null, 12.0)); @@ -263,9 +264,9 @@ public void testFromStatsGroupingByKeyword() { // assert column metadata assertEquals("avg(count)", results.columns().get(0).name()); - assertEquals("double", results.columns().get(0).type()); + assertEquals(DataType.DOUBLE, results.columns().get(0).type()); assertEquals("color", results.columns().get(1).name()); - assertEquals("keyword", results.columns().get(1).type()); + assertEquals(DataType.KEYWORD, results.columns().get(1).type()); record Group(String color, double avg) { } @@ -298,9 +299,9 @@ public void testFromStatsGroupingByKeywordWithNulls() { // assert column metadata assertEquals("avg", results.columns().get(0).name()); - assertEquals("double", results.columns().get(0).type()); + assertEquals(DataType.DOUBLE, results.columns().get(0).type()); assertEquals("color", results.columns().get(1).name()); - assertEquals("keyword", results.columns().get(1).type()); + assertEquals(DataType.KEYWORD, results.columns().get(1).type()); record Group(String color, Double avg) { } @@ -332,17 +333,17 @@ public void testFromStatsMultipleAggs() { // assert column metadata assertEquals("a", results.columns().get(0).name()); - assertEquals("double", results.columns().get(0).type()); + assertEquals(DataType.DOUBLE, results.columns().get(0).type()); assertEquals("mi", results.columns().get(1).name()); - assertEquals("long", results.columns().get(1).type()); + assertEquals(DataType.LONG, results.columns().get(1).type()); assertEquals("ma", results.columns().get(2).name()); - assertEquals("long", results.columns().get(2).type()); + assertEquals(DataType.LONG, results.columns().get(2).type()); assertEquals("s", results.columns().get(3).name()); - assertEquals("long", results.columns().get(3).type()); + assertEquals(DataType.LONG, results.columns().get(3).type()); assertEquals("c", results.columns().get(4).name()); - assertEquals("long", results.columns().get(4).type()); + assertEquals(DataType.LONG, results.columns().get(4).type()); assertEquals("color", results.columns().get(5).name()); - assertEquals("keyword", results.columns().get(5).type()); + assertEquals(DataType.KEYWORD, results.columns().get(5).type()); record Group(double avg, long mi, long ma, long s, long c, String color) {} List expectedGroups = List.of( new Group(42, 42, 42, 420, 10, "blue"), @@ -380,7 +381,7 @@ public void testFromStatsProjectGroup() { try (EsqlQueryResponse results = run("from test | stats avg_count = avg(count) by data | keep data")) { logger.info(results); assertThat(results.columns().stream().map(ColumnInfo::name).toList(), contains("data")); - assertThat(results.columns().stream().map(ColumnInfo::type).toList(), contains("long")); + assertThat(results.columns().stream().map(ColumnInfoImpl::type).toList(), contains(DataType.LONG)); assertThat(getValuesList(results), containsInAnyOrder(List.of(1L), List.of(2L))); } } @@ -389,7 +390,7 @@ public void testRowStatsProjectGroupByInt() { try (EsqlQueryResponse results = run("row a = 1, b = 2 | stats count(b) by a | keep a")) { logger.info(results); assertThat(results.columns().stream().map(ColumnInfo::name).toList(), contains("a")); - assertThat(results.columns().stream().map(ColumnInfo::type).toList(), contains("integer")); + assertThat(results.columns().stream().map(ColumnInfoImpl::type).toList(), contains(DataType.INTEGER)); assertThat(getValuesList(results), contains(List.of(1))); } } @@ -398,7 +399,7 @@ public void testRowStatsProjectGroupByLong() { try (EsqlQueryResponse results = run("row a = 1000000000000, b = 2 | stats count(b) by a | keep a")) { logger.info(results); assertThat(results.columns().stream().map(ColumnInfo::name).toList(), contains("a")); - assertThat(results.columns().stream().map(ColumnInfo::type).toList(), contains("long")); + assertThat(results.columns().stream().map(ColumnInfoImpl::type).toList(), contains(DataType.LONG)); assertThat(getValuesList(results), contains(List.of(1000000000000L))); } } @@ -407,7 +408,7 @@ public void testRowStatsProjectGroupByDouble() { try (EsqlQueryResponse results = run("row a = 1.0, b = 2 | stats count(b) by a | keep a")) { logger.info(results); assertThat(results.columns().stream().map(ColumnInfo::name).toList(), contains("a")); - assertThat(results.columns().stream().map(ColumnInfo::type).toList(), contains("double")); + assertThat(results.columns().stream().map(ColumnInfoImpl::type).toList(), contains(DataType.DOUBLE)); assertThat(getValuesList(results), contains(List.of(1.0))); } } @@ -416,7 +417,7 @@ public void testRowStatsProjectGroupByKeyword() { try (EsqlQueryResponse results = run("row a = \"hello\", b = 2 | stats count(b) by a | keep a")) { logger.info(results); assertThat(results.columns().stream().map(ColumnInfo::name).toList(), contains("a")); - assertThat(results.columns().stream().map(ColumnInfo::type).toList(), contains("keyword")); + assertThat(results.columns().stream().map(ColumnInfoImpl::type).toList(), contains(DataType.KEYWORD)); assertThat(getValuesList(results), contains(List.of("hello"))); } } @@ -425,7 +426,7 @@ public void testFromStatsProjectGroupByDouble() { try (EsqlQueryResponse results = run("from test | stats count(count) by data_d | keep data_d")) { logger.info(results); assertThat(results.columns().stream().map(ColumnInfo::name).toList(), contains("data_d")); - assertThat(results.columns().stream().map(ColumnInfo::type).toList(), contains("double")); + assertThat(results.columns().stream().map(ColumnInfoImpl::type).toList(), contains(DataType.DOUBLE)); assertThat(getValuesList(results), containsInAnyOrder(List.of(1.0), List.of(2.0))); } } @@ -435,7 +436,7 @@ public void testFromStatsProjectGroupWithAlias() { try (EsqlQueryResponse results = run(query)) { logger.info(results); assertThat(results.columns().stream().map(ColumnInfo::name).toList(), contains("d", "d2")); - assertThat(results.columns().stream().map(ColumnInfo::type).toList(), contains("long", "long")); + assertThat(results.columns().stream().map(ColumnInfoImpl::type).toList(), contains(DataType.LONG, DataType.LONG)); assertThat(getValuesList(results), containsInAnyOrder(List.of(1L, 1L), List.of(2L, 2L))); } } @@ -444,7 +445,7 @@ public void testFromStatsProjectAgg() { try (EsqlQueryResponse results = run("from test | stats a = avg(count) by data | keep a")) { logger.info(results); assertThat(results.columns().stream().map(ColumnInfo::name).toList(), contains("a")); - assertThat(results.columns().stream().map(ColumnInfo::type).toList(), contains("double")); + assertThat(results.columns().stream().map(ColumnInfoImpl::type).toList(), contains(DataType.DOUBLE)); assertThat(getValuesList(results), containsInAnyOrder(List.of(42d), List.of(44d))); } } @@ -453,7 +454,7 @@ public void testFromStatsProjectAggWithAlias() { try (EsqlQueryResponse results = run("from test | stats a = avg(count) by data | rename a as b | keep b")) { logger.info(results); assertThat(results.columns().stream().map(ColumnInfo::name).toList(), contains("b")); - assertThat(results.columns().stream().map(ColumnInfo::type).toList(), contains("double")); + assertThat(results.columns().stream().map(ColumnInfoImpl::type).toList(), contains(DataType.DOUBLE)); assertThat(getValuesList(results), containsInAnyOrder(List.of(42d), List.of(44d))); } } @@ -462,7 +463,7 @@ public void testFromProjectStatsGroupByAlias() { try (EsqlQueryResponse results = run("from test | rename data as d | keep d, count | stats avg(count) by d")) { logger.info(results); assertThat(results.columns().stream().map(ColumnInfo::name).toList(), contains("avg(count)", "d")); - assertThat(results.columns().stream().map(ColumnInfo::type).toList(), contains("double", "long")); + assertThat(results.columns().stream().map(ColumnInfoImpl::type).toList(), contains(DataType.DOUBLE, DataType.LONG)); assertThat(getValuesList(results), containsInAnyOrder(List.of(42d, 1L), List.of(44d, 2L))); } } @@ -471,7 +472,7 @@ public void testFromProjectStatsAggregateAlias() { try (EsqlQueryResponse results = run("from test | rename count as c | keep c, data | stats avg(c) by data")) { logger.info(results); assertThat(results.columns().stream().map(ColumnInfo::name).toList(), contains("avg(c)", "data")); - assertThat(results.columns().stream().map(ColumnInfo::type).toList(), contains("double", "long")); + assertThat(results.columns().stream().map(ColumnInfoImpl::type).toList(), contains(DataType.DOUBLE, DataType.LONG)); assertThat(getValuesList(results), containsInAnyOrder(List.of(42d, 1L), List.of(44d, 2L))); } } @@ -482,7 +483,7 @@ public void testFromEvalStats() { assertEquals(1, results.columns().size()); assertEquals(1, getValuesList(results).size()); assertEquals("avg(ratio)", results.columns().get(0).name()); - assertEquals("double", results.columns().get(0).type()); + assertEquals(DataType.DOUBLE, results.columns().get(0).type()); assertEquals(1, getValuesList(results).get(0).size()); assertEquals(0.034d, (double) getValuesList(results).get(0).get(0), 0.001d); } @@ -494,7 +495,7 @@ public void testUngroupedCountAll() { assertEquals(1, results.columns().size()); assertEquals(1, getValuesList(results).size()); assertEquals("count(*)", results.columns().get(0).name()); - assertEquals("long", results.columns().get(0).type()); + assertEquals(DataType.LONG, results.columns().get(0).type()); var values = getValuesList(results).get(0); assertEquals(1, values.size()); assertEquals(40, (long) values.get(0)); @@ -507,7 +508,7 @@ public void testUngroupedCountAllWithFilter() { assertEquals(1, results.columns().size()); assertEquals(1, getValuesList(results).size()); assertEquals("count(*)", results.columns().get(0).name()); - assertEquals("long", results.columns().get(0).type()); + assertEquals(DataType.LONG, results.columns().get(0).type()); var values = getValuesList(results).get(0); assertEquals(1, values.size()); assertEquals(20, (long) values.get(0)); @@ -520,9 +521,9 @@ public void testGroupedCountAllWithFilter() { assertEquals(2, results.columns().size()); assertEquals(1, getValuesList(results).size()); assertEquals("count(*)", results.columns().get(0).name()); - assertEquals("long", results.columns().get(0).type()); + assertEquals(DataType.LONG, results.columns().get(0).type()); assertEquals("data", results.columns().get(1).name()); - assertEquals("long", results.columns().get(1).type()); + assertEquals(DataType.LONG, results.columns().get(1).type()); var values = getValuesList(results).get(0); assertEquals(2, values.size()); assertEquals(20, (long) values.get(0)); @@ -536,10 +537,10 @@ public void testFromStatsEvalWithPragma() { logger.info(results); assertEquals(1, getValuesList(results).size()); assertEquals(2, getValuesList(results).get(0).size()); - assertEquals(50, (double) getValuesList(results).get(0).get(results.columns().indexOf(new ColumnInfo("x", "double"))), 1d); + assertEquals(50, (double) getValuesList(results).get(0).get(results.columns().indexOf(new ColumnInfoImpl("x", "double"))), 1d); assertEquals( 43, - (double) getValuesList(results).get(0).get(results.columns().indexOf(new ColumnInfo("avg_count", "double"))), + (double) getValuesList(results).get(0).get(results.columns().indexOf(new ColumnInfoImpl("avg_count", "double"))), 1d ); } @@ -549,7 +550,7 @@ public void testWhere() { try (EsqlQueryResponse results = run("from test | where count > 40")) { logger.info(results); assertEquals(30, getValuesList(results).size()); - var countIndex = results.columns().indexOf(new ColumnInfo("count", "long")); + var countIndex = results.columns().indexOf(new ColumnInfoImpl("count", "long")); for (List values : getValuesList(results)) { assertThat((Long) values.get(countIndex), greaterThan(40L)); } @@ -560,7 +561,7 @@ public void testProjectWhere() { try (EsqlQueryResponse results = run("from test | keep count | where count > 40")) { logger.info(results); assertEquals(30, getValuesList(results).size()); - int countIndex = results.columns().indexOf(new ColumnInfo("count", "long")); + int countIndex = results.columns().indexOf(new ColumnInfoImpl("count", "long")); for (List values : getValuesList(results)) { assertThat((Long) values.get(countIndex), greaterThan(40L)); } @@ -571,7 +572,7 @@ public void testEvalWhere() { try (EsqlQueryResponse results = run("from test | eval x = count / 2 | where x > 20")) { logger.info(results); assertEquals(30, getValuesList(results).size()); - int countIndex = results.columns().indexOf(new ColumnInfo("x", "long")); + int countIndex = results.columns().indexOf(new ColumnInfoImpl("x", "long")); for (List values : getValuesList(results)) { assertThat((Long) values.get(countIndex), greaterThan(20L)); } @@ -589,7 +590,7 @@ public void testStringLength() { try (EsqlQueryResponse results = run("from test | eval l = length(color)")) { logger.info(results); assertThat(getValuesList(results), hasSize(40)); - int countIndex = results.columns().indexOf(new ColumnInfo("l", "integer")); + int countIndex = results.columns().indexOf(new ColumnInfoImpl("l", "integer")); for (List values : getValuesList(results)) { assertThat((Integer) values.get(countIndex), greaterThanOrEqualTo(3)); } @@ -608,11 +609,11 @@ public void testFilterWithNullAndEvalFromIndex() { try (EsqlQueryResponse results = run("from test | eval newCount = count + 1 | where newCount > 1")) { logger.info(results); assertEquals(40, getValuesList(results).size()); - assertThat(results.columns(), hasItem(equalTo(new ColumnInfo("count", "long")))); - assertThat(results.columns(), hasItem(equalTo(new ColumnInfo("count_d", "double")))); - assertThat(results.columns(), hasItem(equalTo(new ColumnInfo("data", "long")))); - assertThat(results.columns(), hasItem(equalTo(new ColumnInfo("data_d", "double")))); - assertThat(results.columns(), hasItem(equalTo(new ColumnInfo("time", "long")))); + assertThat(results.columns(), hasItem(equalTo(new ColumnInfoImpl("count", "long")))); + assertThat(results.columns(), hasItem(equalTo(new ColumnInfoImpl("count_d", "double")))); + assertThat(results.columns(), hasItem(equalTo(new ColumnInfoImpl("data", "long")))); + assertThat(results.columns(), hasItem(equalTo(new ColumnInfoImpl("data_d", "double")))); + assertThat(results.columns(), hasItem(equalTo(new ColumnInfoImpl("time", "long")))); } } @@ -646,7 +647,7 @@ public void testEvalOverride() { assertEquals(40, getValuesList(results).size()); assertEquals(1, results.columns().stream().filter(c -> c.name().equals("count")).count()); int countIndex = results.columns().size() - 1; - assertEquals(new ColumnInfo("count", "long"), results.columns().get(countIndex)); + assertEquals(new ColumnInfoImpl("count", "long"), results.columns().get(countIndex)); for (List values : getValuesList(results)) { assertThat((Long) values.get(countIndex), greaterThanOrEqualTo(42L)); } @@ -657,7 +658,7 @@ public void testProjectRename() { try (var results = run("from test | eval y = count | rename count as x | keep x, y")) { logger.info(results); assertEquals(40, getValuesList(results).size()); - assertThat(results.columns(), contains(new ColumnInfo("x", "long"), new ColumnInfo("y", "long"))); + assertThat(results.columns(), contains(new ColumnInfoImpl("x", "long"), new ColumnInfoImpl("y", "long"))); for (List values : getValuesList(results)) { assertThat((Long) values.get(0), greaterThanOrEqualTo(40L)); assertThat(values.get(1), is(values.get(0))); @@ -672,10 +673,10 @@ public void testProjectRenameEval() { assertThat( results.columns(), contains( - new ColumnInfo("x", "long"), - new ColumnInfo("y", "long"), - new ColumnInfo("x2", "long"), - new ColumnInfo("y2", "long") + new ColumnInfoImpl("x", "long"), + new ColumnInfoImpl("y", "long"), + new ColumnInfoImpl("x2", "long"), + new ColumnInfoImpl("y2", "long") ) ); for (List values : getValuesList(results)) { @@ -691,7 +692,10 @@ public void testProjectRenameEvalProject() { try (var results = run("from test | eval y = count | rename count as x | keep x, y | eval z = x + y | keep x, y, z")) { logger.info(results); assertEquals(40, getValuesList(results).size()); - assertThat(results.columns(), contains(new ColumnInfo("x", "long"), new ColumnInfo("y", "long"), new ColumnInfo("z", "long"))); + assertThat( + results.columns(), + contains(new ColumnInfoImpl("x", "long"), new ColumnInfoImpl("y", "long"), new ColumnInfoImpl("z", "long")) + ); for (List values : getValuesList(results)) { assertThat((Long) values.get(0), greaterThanOrEqualTo(40L)); assertThat(values.get(1), is(values.get(0))); @@ -704,7 +708,7 @@ public void testProjectOverride() { try (var results = run("from test | eval cnt = count | rename count as data | keep cnt, data")) { logger.info(results); assertEquals(40, getValuesList(results).size()); - assertThat(results.columns(), contains(new ColumnInfo("cnt", "long"), new ColumnInfo("data", "long"))); + assertThat(results.columns(), contains(new ColumnInfoImpl("cnt", "long"), new ColumnInfoImpl("data", "long"))); for (List values : getValuesList(results)) { assertThat(values.get(1), is(values.get(0))); } @@ -865,7 +869,7 @@ public void testEvalWithNullAndAvg() { assertEquals(1, results.columns().size()); assertEquals(1, getValuesList(results).size()); assertEquals("avg(nullsum)", results.columns().get(0).name()); - assertEquals("double", results.columns().get(0).type()); + assertEquals(DataType.DOUBLE, results.columns().get(0).type()); assertEquals(1, getValuesList(results).get(0).size()); assertNull(getValuesList(results).get(0).get(0)); } @@ -874,7 +878,7 @@ public void testEvalWithNullAndAvg() { public void testFromStatsLimit() { try (EsqlQueryResponse results = run("from test | stats ac = avg(count) by data | limit 1")) { logger.info(results); - assertThat(results.columns(), contains(new ColumnInfo("ac", "double"), new ColumnInfo("data", "long"))); + assertThat(results.columns(), contains(new ColumnInfoImpl("ac", "double"), new ColumnInfoImpl("data", "long"))); assertThat(getValuesList(results), contains(anyOf(contains(42.0, 1L), contains(44.0, 2L)))); } } @@ -882,7 +886,7 @@ public void testFromStatsLimit() { public void testFromLimit() { try (EsqlQueryResponse results = run("from test | keep data | limit 2")) { logger.info(results); - assertThat(results.columns(), contains(new ColumnInfo("data", "long"))); + assertThat(results.columns(), contains(new ColumnInfoImpl("data", "long"))); assertThat(getValuesList(results), contains(anyOf(contains(1L), contains(2L)), anyOf(contains(1L), contains(2L)))); } } @@ -891,7 +895,7 @@ public void testDropAllColumns() { try (EsqlQueryResponse results = run("from test | keep data | drop data | eval a = 1")) { logger.info(results); assertThat(results.columns(), hasSize(1)); - assertThat(results.columns(), contains(new ColumnInfo("a", "integer"))); + assertThat(results.columns(), contains(new ColumnInfoImpl("a", "integer"))); assertThat(getValuesList(results), is(empty())); } } @@ -1010,7 +1014,7 @@ public void testErrorMessageForEmptyParams() { public void testEmptyIndex() { assertAcked(client().admin().indices().prepareCreate("test_empty").setMapping("k", "type=keyword", "v", "type=long").get()); try (EsqlQueryResponse results = run("from test_empty")) { - assertThat(results.columns(), equalTo(List.of(new ColumnInfo("k", "keyword"), new ColumnInfo("v", "long")))); + assertThat(results.columns(), equalTo(List.of(new ColumnInfoImpl("k", "keyword"), new ColumnInfoImpl("v", "long")))); assertThat(getValuesList(results), empty()); } } @@ -1019,7 +1023,13 @@ public void testShowInfo() { try (EsqlQueryResponse results = run("show info")) { assertThat( results.columns(), - equalTo(List.of(new ColumnInfo("version", "keyword"), new ColumnInfo("date", "keyword"), new ColumnInfo("hash", "keyword"))) + equalTo( + List.of( + new ColumnInfoImpl("version", "keyword"), + new ColumnInfoImpl("date", "keyword"), + new ColumnInfoImpl("hash", "keyword") + ) + ) ); assertThat(getValuesList(results).size(), equalTo(1)); assertThat(getValuesList(results).get(0).get(0), equalTo(Build.current().version())); @@ -1034,16 +1044,16 @@ public void testMetaFunctions() { results.columns(), equalTo( List.of( - new ColumnInfo("name", "keyword"), - new ColumnInfo("synopsis", "keyword"), - new ColumnInfo("argNames", "keyword"), - new ColumnInfo("argTypes", "keyword"), - new ColumnInfo("argDescriptions", "keyword"), - new ColumnInfo("returnType", "keyword"), - new ColumnInfo("description", "keyword"), - new ColumnInfo("optionalArgs", "boolean"), - new ColumnInfo("variadic", "boolean"), - new ColumnInfo("isAggregation", "boolean") + new ColumnInfoImpl("name", "keyword"), + new ColumnInfoImpl("synopsis", "keyword"), + new ColumnInfoImpl("argNames", "keyword"), + new ColumnInfoImpl("argTypes", "keyword"), + new ColumnInfoImpl("argDescriptions", "keyword"), + new ColumnInfoImpl("returnType", "keyword"), + new ColumnInfoImpl("description", "keyword"), + new ColumnInfoImpl("optionalArgs", "boolean"), + new ColumnInfoImpl("variadic", "boolean"), + new ColumnInfoImpl("isAggregation", "boolean") ) ) ); @@ -1053,7 +1063,7 @@ public void testMetaFunctions() { public void testInWithNullValue() { try (EsqlQueryResponse results = run("from test | where null in (data, 2) | keep data")) { - assertThat(results.columns(), equalTo(List.of(new ColumnInfo("data", "long")))); + assertThat(results.columns(), equalTo(List.of(new ColumnInfoImpl("data", "long")))); assertThat(getValuesList(results).size(), equalTo(0)); } } @@ -1088,11 +1098,11 @@ public void testTopNPushedToLucene() { // assert column metadata assertEquals("data", results.columns().get(0).name()); - assertEquals("long", results.columns().get(0).type()); + assertEquals(DataType.LONG, results.columns().get(0).type()); assertEquals("count", results.columns().get(1).name()); - assertEquals("long", results.columns().get(1).type()); + assertEquals(DataType.LONG, results.columns().get(1).type()); assertEquals("color", results.columns().get(2).name()); - assertEquals("keyword", results.columns().get(2).type()); + assertEquals(DataType.KEYWORD, results.columns().get(2).type()); record Group(Long data, Long count, String color) { Group(Long data, Long count) { this(data, count, "yellow"); @@ -1139,7 +1149,7 @@ public void testTopNPushedToLuceneOnSortedIndex() { // assert column metadata assertEquals("time", results.columns().get(0).name()); - assertEquals("long", results.columns().get(0).type()); + assertEquals(DataType.LONG, results.columns().get(0).type()); boolean sortedDesc = "desc".equals(sortOrder); var expected = LongStream.range(0, 40) @@ -1214,7 +1224,7 @@ public void testGroupingMultiValueByOrdinals() { public void testLoadId() { try (EsqlQueryResponse results = run("from test metadata _id | keep _id | sort _id ")) { - assertThat(results.columns(), equalTo(List.of(new ColumnInfo("_id", "keyword")))); + assertThat(results.columns(), equalTo(List.of(new ColumnInfoImpl("_id", "keyword")))); ListMatcher values = matchesList(); for (int i = 10; i < 50; i++) { values = values.item(List.of(Integer.toString(i))); @@ -1420,12 +1430,12 @@ public void testQueryOnEmptyMappingIndex() { try (EsqlQueryResponse resp = run(from + "METADATA _source | EVAL x = 123")) { assertFalse(resp.values().hasNext()); - assertThat(resp.columns(), equalTo(List.of(new ColumnInfo("_source", "_source"), new ColumnInfo("x", "integer")))); + assertThat(resp.columns(), equalTo(List.of(new ColumnInfoImpl("_source", "_source"), new ColumnInfoImpl("x", "integer")))); } try (EsqlQueryResponse resp = run(from)) { assertFalse(resp.values().hasNext()); - assertThat(resp.columns(), equalTo(List.of(new ColumnInfo("", "null")))); + assertThat(resp.columns(), equalTo(List.of(new ColumnInfoImpl("", "null")))); } } @@ -1450,32 +1460,38 @@ public void testQueryOnEmptyDataIndex() { assertFalse(resp.values().hasNext()); assertThat( resp.columns(), - equalTo(List.of(new ColumnInfo("name", "text"), new ColumnInfo("_source", "_source"), new ColumnInfo("x", "integer"))) + equalTo( + List.of( + new ColumnInfoImpl("name", "text"), + new ColumnInfoImpl("_source", "_source"), + new ColumnInfoImpl("x", "integer") + ) + ) ); } try (EsqlQueryResponse resp = run(from)) { assertFalse(resp.values().hasNext()); - assertThat(resp.columns(), equalTo(List.of(new ColumnInfo("name", "text")))); + assertThat(resp.columns(), equalTo(List.of(new ColumnInfoImpl("name", "text")))); } } private void assertEmptyIndexQueries(String from) { try (EsqlQueryResponse resp = run(from + "METADATA _source | KEEP _source | LIMIT 1")) { assertFalse(resp.values().hasNext()); - assertThat(resp.columns(), equalTo(List.of(new ColumnInfo("_source", "_source")))); + assertThat(resp.columns(), equalTo(List.of(new ColumnInfoImpl("_source", "_source")))); } try (EsqlQueryResponse resp = run(from + "| EVAL y = 1 | KEEP y | LIMIT 1 | EVAL x = 1")) { assertFalse(resp.values().hasNext()); - assertThat(resp.columns(), equalTo(List.of(new ColumnInfo("y", "integer"), new ColumnInfo("x", "integer")))); + assertThat(resp.columns(), equalTo(List.of(new ColumnInfoImpl("y", "integer"), new ColumnInfoImpl("x", "integer")))); } try (EsqlQueryResponse resp = run(from + "| STATS c = count()")) { assertTrue(resp.values().hasNext()); Iterator row = resp.values().next(); assertThat(row.next(), equalTo((long) 0)); - assertThat(resp.columns(), equalTo(List.of(new ColumnInfo("c", "long")))); + assertThat(resp.columns(), equalTo(List.of(new ColumnInfoImpl("c", "long")))); } try (EsqlQueryResponse resp = run(from + "| STATS c = count() | EVAL x = 123")) { @@ -1484,7 +1500,7 @@ private void assertEmptyIndexQueries(String from) { assertThat(row.next(), equalTo((long) 0)); assertThat(row.next(), equalTo(123)); assertFalse(row.hasNext()); - assertThat(resp.columns(), equalTo(List.of(new ColumnInfo("c", "long"), new ColumnInfo("x", "integer")))); + assertThat(resp.columns(), equalTo(List.of(new ColumnInfoImpl("c", "long"), new ColumnInfoImpl("x", "integer")))); } } @@ -1561,7 +1577,7 @@ private void createAlias(List indices, String alias) throws InterruptedE private void assertNoNestedDocuments(String query, int docsCount, long minValue, long maxValue) { try (EsqlQueryResponse results = run(query)) { - assertThat(results.columns(), contains(new ColumnInfo("data", "long"))); + assertThat(results.columns(), contains(new ColumnInfoImpl("data", "long"))); assertThat(results.columns().size(), is(1)); assertThat(getValuesList(results).size(), is(docsCount)); for (List row : getValuesList(results)) { diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java index 2ee6ef57e6571..02cecc63dbd0f 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.Rounding; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xpack.core.esql.action.ColumnInfo; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.junit.Before; @@ -196,7 +195,7 @@ record RateKey(String cluster, String host) { } } try (var resp = run("METRICS hosts sum(rate(request_count, 1second))")) { - assertThat(resp.columns(), equalTo(List.of(new ColumnInfo("sum(rate(request_count, 1second))", "double")))); + assertThat(resp.columns(), equalTo(List.of(new ColumnInfoImpl("sum(rate(request_count, 1second))", "double")))); List> values = EsqlTestUtils.getValuesList(resp); assertThat(values, hasSize(1)); assertThat(values.get(0), hasSize(1)); @@ -205,7 +204,12 @@ record RateKey(String cluster, String host) { try (var resp = run("METRICS hosts max(rate(request_count)), min(rate(request_count))")) { assertThat( resp.columns(), - equalTo(List.of(new ColumnInfo("max(rate(request_count))", "double"), new ColumnInfo("min(rate(request_count))", "double"))) + equalTo( + List.of( + new ColumnInfoImpl("max(rate(request_count))", "double"), + new ColumnInfoImpl("min(rate(request_count))", "double") + ) + ) ); List> values = EsqlTestUtils.getValuesList(resp); assertThat(values, hasSize(1)); @@ -218,9 +222,9 @@ record RateKey(String cluster, String host) { resp.columns(), equalTo( List.of( - new ColumnInfo("max(rate(request_count))", "double"), - new ColumnInfo("avg(rate(request_count))", "double"), - new ColumnInfo("max(rate(request_count, 1minute))", "double") + new ColumnInfoImpl("max(rate(request_count))", "double"), + new ColumnInfoImpl("avg(rate(request_count))", "double"), + new ColumnInfoImpl("max(rate(request_count, 1minute))", "double") ) ) ); @@ -237,8 +241,8 @@ record RateKey(String cluster, String host) { resp.columns(), equalTo( List.of( - new ColumnInfo("avg(rate(request_count))", "double"), - new ColumnInfo("avg(rate(request_count, 1second))", "double") + new ColumnInfoImpl("avg(rate(request_count))", "double"), + new ColumnInfoImpl("avg(rate(request_count, 1second))", "double") ) ) ); @@ -292,7 +296,7 @@ record RateKey(String cluster, String host) { try (var resp = run("METRICS hosts sum(rate(request_count)) BY cluster | SORT cluster")) { assertThat( resp.columns(), - equalTo(List.of(new ColumnInfo("sum(rate(request_count))", "double"), new ColumnInfo("cluster", "keyword"))) + equalTo(List.of(new ColumnInfoImpl("sum(rate(request_count))", "double"), new ColumnInfoImpl("cluster", "keyword"))) ); List> values = EsqlTestUtils.getValuesList(resp); assertThat(values, hasSize(bucketToRates.size())); @@ -307,7 +311,7 @@ record RateKey(String cluster, String host) { try (var resp = run("METRICS hosts avg(rate(request_count)) BY cluster | SORT cluster")) { assertThat( resp.columns(), - equalTo(List.of(new ColumnInfo("avg(rate(request_count))", "double"), new ColumnInfo("cluster", "keyword"))) + equalTo(List.of(new ColumnInfoImpl("avg(rate(request_count))", "double"), new ColumnInfoImpl("cluster", "keyword"))) ); List> values = EsqlTestUtils.getValuesList(resp); assertThat(values, hasSize(bucketToRates.size())); @@ -330,9 +334,9 @@ record RateKey(String cluster, String host) { resp.columns(), equalTo( List.of( - new ColumnInfo("avg(rate(request_count, 1minute))", "double"), - new ColumnInfo("avg(rate(request_count))", "double"), - new ColumnInfo("cluster", "keyword") + new ColumnInfoImpl("avg(rate(request_count, 1minute))", "double"), + new ColumnInfoImpl("avg(rate(request_count))", "double"), + new ColumnInfoImpl("cluster", "keyword") ) ) ); @@ -376,7 +380,7 @@ record RateKey(String host, String cluster, long interval) {} try (var resp = run("METRICS hosts sum(rate(request_count)) BY ts=bucket(@timestamp, 1 minute) | SORT ts | LIMIT 5")) { assertThat( resp.columns(), - equalTo(List.of(new ColumnInfo("sum(rate(request_count))", "double"), new ColumnInfo("ts", "date"))) + equalTo(List.of(new ColumnInfoImpl("sum(rate(request_count))", "double"), new ColumnInfoImpl("ts", "date"))) ); List> values = EsqlTestUtils.getValuesList(resp); assertThat(values, hasSize(sortedKeys.size())); @@ -396,7 +400,7 @@ record RateKey(String host, String cluster, long interval) {} try (var resp = run("METRICS hosts avg(rate(request_count)) BY ts=bucket(@timestamp, 1minute) | SORT ts | LIMIT 5")) { assertThat( resp.columns(), - equalTo(List.of(new ColumnInfo("avg(rate(request_count))", "double"), new ColumnInfo("ts", "date"))) + equalTo(List.of(new ColumnInfoImpl("avg(rate(request_count))", "double"), new ColumnInfoImpl("ts", "date"))) ); List> values = EsqlTestUtils.getValuesList(resp); assertThat(values, hasSize(sortedKeys.size())); @@ -423,9 +427,9 @@ METRICS hosts avg(rate(request_count, 1minute)), avg(rate(request_count)) BY ts= resp.columns(), equalTo( List.of( - new ColumnInfo("avg(rate(request_count, 1minute))", "double"), - new ColumnInfo("avg(rate(request_count))", "double"), - new ColumnInfo("ts", "date") + new ColumnInfoImpl("avg(rate(request_count, 1minute))", "double"), + new ColumnInfoImpl("avg(rate(request_count))", "double"), + new ColumnInfoImpl("ts", "date") ) ) ); @@ -485,9 +489,9 @@ METRICS hosts sum(rate(request_count)) BY ts=bucket(@timestamp, 1 minute), clust resp.columns(), equalTo( List.of( - new ColumnInfo("sum(rate(request_count))", "double"), - new ColumnInfo("ts", "date"), - new ColumnInfo("cluster", "keyword") + new ColumnInfoImpl("sum(rate(request_count))", "double"), + new ColumnInfoImpl("ts", "date"), + new ColumnInfoImpl("cluster", "keyword") ) ) ); @@ -515,9 +519,9 @@ METRICS hosts avg(rate(request_count)) BY ts=bucket(@timestamp, 1minute), cluste resp.columns(), equalTo( List.of( - new ColumnInfo("avg(rate(request_count))", "double"), - new ColumnInfo("ts", "date"), - new ColumnInfo("cluster", "keyword") + new ColumnInfoImpl("avg(rate(request_count))", "double"), + new ColumnInfoImpl("ts", "date"), + new ColumnInfoImpl("cluster", "keyword") ) ) ); @@ -546,10 +550,10 @@ METRICS hosts avg(rate(request_count, 1minute)), avg(rate(request_count)) BY ts= resp.columns(), equalTo( List.of( - new ColumnInfo("avg(rate(request_count, 1minute))", "double"), - new ColumnInfo("avg(rate(request_count))", "double"), - new ColumnInfo("ts", "date"), - new ColumnInfo("cluster", "keyword") + new ColumnInfoImpl("avg(rate(request_count, 1minute))", "double"), + new ColumnInfoImpl("avg(rate(request_count))", "double"), + new ColumnInfoImpl("ts", "date"), + new ColumnInfoImpl("cluster", "keyword") ) ) ); @@ -588,11 +592,11 @@ METRICS hosts avg(rate(request_count, 1minute)), avg(rate(request_count)) BY ts= resp.columns(), equalTo( List.of( - new ColumnInfo("avg_rate", "double"), - new ColumnInfo("max(rate(request_count))", "double"), - new ColumnInfo("avg(rate(request_count))", "double"), - new ColumnInfo("ts", "date"), - new ColumnInfo("cluster", "keyword") + new ColumnInfoImpl("avg_rate", "double"), + new ColumnInfoImpl("max(rate(request_count))", "double"), + new ColumnInfoImpl("avg(rate(request_count))", "double"), + new ColumnInfoImpl("ts", "date"), + new ColumnInfoImpl("cluster", "keyword") ) ) ); @@ -711,14 +715,14 @@ record RateKey(String cluster, String host) { } } try (var resp = run("METRICS hosts sum(abs(rate(request_count, 1second)))")) { - assertThat(resp.columns(), equalTo(List.of(new ColumnInfo("sum(abs(rate(request_count, 1second)))", "double")))); + assertThat(resp.columns(), equalTo(List.of(new ColumnInfoImpl("sum(abs(rate(request_count, 1second)))", "double")))); List> values = EsqlTestUtils.getValuesList(resp); assertThat(values, hasSize(1)); assertThat(values.get(0), hasSize(1)); assertThat((double) values.get(0).get(0), closeTo(rates.stream().mapToDouble(d -> d).sum(), 0.1)); } try (var resp = run("METRICS hosts sum(10.0 * rate(request_count, 1second))")) { - assertThat(resp.columns(), equalTo(List.of(new ColumnInfo("sum(10.0 * rate(request_count, 1second))", "double")))); + assertThat(resp.columns(), equalTo(List.of(new ColumnInfoImpl("sum(10.0 * rate(request_count, 1second))", "double")))); List> values = EsqlTestUtils.getValuesList(resp); assertThat(values, hasSize(1)); assertThat(values.get(0), hasSize(1)); @@ -728,7 +732,9 @@ record RateKey(String cluster, String host) { assertThat( resp.columns(), equalTo( - List.of(new ColumnInfo("sum(20 * rate(request_count, 1second) + 10 * floor(rate(request_count, 1second)))", "double")) + List.of( + new ColumnInfoImpl("sum(20 * rate(request_count, 1second) + 10 * floor(rate(request_count, 1second)))", "double") + ) ) ); List> values = EsqlTestUtils.getValuesList(resp); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ColumnInfoImpl.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ColumnInfoImpl.java new file mode 100644 index 0000000000000..94da383b40957 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ColumnInfoImpl.java @@ -0,0 +1,105 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.InstantiatingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ParserConstructor; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.esql.action.ColumnInfo; +import org.elasticsearch.xpack.esql.core.type.DataType; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public class ColumnInfoImpl implements ColumnInfo { + + public static final InstantiatingObjectParser PARSER; + static { + InstantiatingObjectParser.Builder parser = InstantiatingObjectParser.builder( + "esql/column_info", + true, + ColumnInfoImpl.class + ); + parser.declareString(constructorArg(), new ParseField("name")); + parser.declareString(constructorArg(), new ParseField("type")); + PARSER = parser.build(); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if ((o instanceof ColumnInfoImpl that)) { + return Objects.equals(name, that.name) && Objects.equals(type, that.type); + } + return false; + } + + @Override + public int hashCode() { + return Objects.hash(name, type); + } + + public static ColumnInfo fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + + private String name; + private DataType type; + + @ParserConstructor + public ColumnInfoImpl(String name, String type) { + this(name, DataType.fromEs(type)); + } + + public ColumnInfoImpl(String name, DataType type) { + this.name = name; + this.type = type; + } + + public ColumnInfoImpl(StreamInput in) throws IOException { + this(in.readString(), in.readString()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(name); + out.writeString(type.outputType()); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field("name", name); + builder.field("type", type.outputType()); + builder.endObject(); + return builder; + } + + @Override + public String name() { + return name; + } + + @Override + public String outputType() { + return type.outputType(); + } + + public DataType type() { + return type; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java index fdf39545a396b..81fbda2ad6fee 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java @@ -25,8 +25,8 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xpack.core.esql.action.ColumnInfo; import org.elasticsearch.xpack.core.esql.action.EsqlResponse; +import org.elasticsearch.xpack.esql.core.type.DataType; import java.io.IOException; import java.util.Collections; @@ -45,7 +45,7 @@ public class EsqlQueryResponse extends org.elasticsearch.xpack.core.esql.action. public static final String DROP_NULL_COLUMNS_OPTION = "drop_null_columns"; - private final List columns; + private final List columns; private final List pages; private final Profile profile; private final boolean columnar; @@ -55,7 +55,7 @@ public class EsqlQueryResponse extends org.elasticsearch.xpack.core.esql.action. private final boolean isAsync; public EsqlQueryResponse( - List columns, + List columns, List pages, @Nullable Profile profile, boolean columnar, @@ -72,7 +72,7 @@ public EsqlQueryResponse( this.isAsync = isAsync; } - public EsqlQueryResponse(List columns, List pages, @Nullable Profile profile, boolean columnar, boolean isAsync) { + public EsqlQueryResponse(List columns, List pages, @Nullable Profile profile, boolean columnar, boolean isAsync) { this(columns, pages, profile, columnar, null, false, isAsync); } @@ -97,7 +97,7 @@ static EsqlQueryResponse deserialize(BlockStreamInput in) throws IOException { isRunning = in.readBoolean(); isAsync = in.readBoolean(); } - List columns = in.readCollectionAsList(ColumnInfo::new); + List columns = in.readCollectionAsList(ColumnInfoImpl::new); List pages = in.readCollectionAsList(Page::new); if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { profile = in.readOptionalWriteable(Profile::new); @@ -121,7 +121,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(columnar); } - public List columns() { + public List columns() { return columns; } @@ -130,12 +130,12 @@ List pages() { } public Iterator> values() { - List dataTypes = columns.stream().map(ColumnInfo::type).toList(); + List dataTypes = columns.stream().map(ColumnInfoImpl::type).toList(); return ResponseValueUtils.pagesToValues(dataTypes, pages); } public Iterable> rows() { - List dataTypes = columns.stream().map(ColumnInfo::type).toList(); + List dataTypes = columns.stream().map(ColumnInfoImpl::type).toList(); return ResponseValueUtils.valuesForRowsInPages(dataTypes, pages); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java index 1d07ccc276949..2cdbd9f5f93f1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java @@ -22,7 +22,6 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.esql.action.ColumnInfo; import java.io.IOException; @@ -59,30 +58,30 @@ public XContentBuilder positionToXContent(XContentBuilder builder, ToXContent.Pa protected abstract XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException; - public static PositionToXContent positionToXContent(ColumnInfo columnInfo, Block block, BytesRef scratch) { + public static PositionToXContent positionToXContent(ColumnInfoImpl columnInfo, Block block, BytesRef scratch) { return switch (columnInfo.type()) { - case "long", "counter_long" -> new PositionToXContent(block) { + case LONG, COUNTER_LONG -> new PositionToXContent(block) { @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { return builder.value(((LongBlock) block).getLong(valueIndex)); } }; - case "integer", "counter_integer" -> new PositionToXContent(block) { + case INTEGER, COUNTER_INTEGER -> new PositionToXContent(block) { @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { return builder.value(((IntBlock) block).getInt(valueIndex)); } }; - case "double", "counter_double" -> new PositionToXContent(block) { + case DOUBLE, COUNTER_DOUBLE -> new PositionToXContent(block) { @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { return builder.value(((DoubleBlock) block).getDouble(valueIndex)); } }; - case "unsigned_long" -> new PositionToXContent(block) { + case UNSIGNED_LONG -> new PositionToXContent(block) { @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { @@ -90,7 +89,7 @@ protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Pa return builder.value(unsignedLongAsNumber(l)); } }; - case "keyword", "text" -> new PositionToXContent(block) { + case KEYWORD, TEXT -> new PositionToXContent(block) { @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { @@ -103,7 +102,7 @@ protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Pa return builder.utf8Value(val.bytes, val.offset, val.length); } }; - case "ip" -> new PositionToXContent(block) { + case IP -> new PositionToXContent(block) { @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { @@ -111,7 +110,7 @@ protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Pa return builder.value(ipToString(val)); } }; - case "date" -> new PositionToXContent(block) { + case DATETIME -> new PositionToXContent(block) { @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { @@ -119,21 +118,21 @@ protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Pa return builder.value(dateTimeToString(longVal)); } }; - case "geo_point", "geo_shape", "cartesian_point", "cartesian_shape" -> new PositionToXContent(block) { + case GEO_POINT, GEO_SHAPE, CARTESIAN_POINT, CARTESIAN_SHAPE -> new PositionToXContent(block) { @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { return builder.value(spatialToString(((BytesRefBlock) block).getBytesRef(valueIndex, scratch))); } }; - case "boolean" -> new PositionToXContent(block) { + case BOOLEAN -> new PositionToXContent(block) { @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { return builder.value(((BooleanBlock) block).getBoolean(valueIndex)); } }; - case "version" -> new PositionToXContent(block) { + case VERSION -> new PositionToXContent(block) { @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { @@ -141,21 +140,21 @@ protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Pa return builder.value(versionToString(val)); } }; - case "null" -> new PositionToXContent(block) { + case NULL -> new PositionToXContent(block) { @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { return builder.nullValue(); } }; - case "unsupported" -> new PositionToXContent(block) { + case UNSUPPORTED -> new PositionToXContent(block) { @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { return builder.value(UnsupportedValueSource.UNSUPPORTED_OUTPUT); } }; - case "_source" -> new PositionToXContent(block) { + case SOURCE -> new PositionToXContent(block) { @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { @@ -166,7 +165,8 @@ protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Pa } } }; - default -> throw new IllegalArgumentException("can't convert values of type [" + columnInfo.type() + "]"); + case DATE_PERIOD, TIME_DURATION, DOC_DATA_TYPE, TSID_DATA_TYPE, SHORT, BYTE, OBJECT, NESTED, FLOAT, HALF_FLOAT, SCALED_FLOAT -> + throw new IllegalArgumentException("can't convert values of type [" + columnInfo.type() + "]"); }; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java index 70ec7504ed3d2..d99da4500a3b0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java @@ -25,7 +25,6 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.json.JsonXContent; -import org.elasticsearch.xpack.core.esql.action.ColumnInfo; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.planner.PlannerUtils; @@ -57,7 +56,7 @@ public final class ResponseValueUtils { * Returns an iterator of iterators over the values in the given pages. There is one iterator * for each block. */ - public static Iterator> pagesToValues(List dataTypes, List pages) { + public static Iterator> pagesToValues(List dataTypes, List pages) { BytesRef scratch = new BytesRef(); return Iterators.flatMap( pages.iterator(), @@ -70,18 +69,18 @@ public static Iterator> pagesToValues(List dataTypes, L } /** Returns an iterable of iterables over the values in the given pages. There is one iterables for each row. */ - static Iterable> valuesForRowsInPages(List dataTypes, List pages) { + static Iterable> valuesForRowsInPages(List dataTypes, List pages) { BytesRef scratch = new BytesRef(); return () -> Iterators.flatMap(pages.iterator(), page -> valuesForRowsInPage(dataTypes, page, scratch)); } /** Returns an iterable of iterables over the values in the given page. There is one iterables for each row. */ - static Iterator> valuesForRowsInPage(List dataTypes, Page page, BytesRef scratch) { + static Iterator> valuesForRowsInPage(List dataTypes, Page page, BytesRef scratch) { return Iterators.forRange(0, page.getPositionCount(), position -> valuesForRow(dataTypes, page, position, scratch)); } /** Returns an iterable over the values in the given row in a page. */ - static Iterable valuesForRow(List dataTypes, Page page, int position, BytesRef scratch) { + static Iterable valuesForRow(List dataTypes, Page page, int position, BytesRef scratch) { return () -> Iterators.forRange( 0, page.getBlockCount(), @@ -90,7 +89,7 @@ static Iterable valuesForRow(List dataTypes, Page page, int posi } /** Returns an iterator of values for the given column. */ - static Iterator valuesForColumn(int columnIndex, String dataType, List pages) { + static Iterator valuesForColumn(int columnIndex, DataType dataType, List pages) { BytesRef scratch = new BytesRef(); return Iterators.flatMap( pages.iterator(), @@ -103,7 +102,7 @@ static Iterator valuesForColumn(int columnIndex, String dataType, List

    unsignedLongAsNumber(((LongBlock) block).getLong(offset)); - case "long", "counter_long" -> ((LongBlock) block).getLong(offset); - case "integer", "counter_integer" -> ((IntBlock) block).getInt(offset); - case "double", "counter_double" -> ((DoubleBlock) block).getDouble(offset); - case "keyword", "text" -> ((BytesRefBlock) block).getBytesRef(offset, scratch).utf8ToString(); - case "ip" -> { + case UNSIGNED_LONG -> unsignedLongAsNumber(((LongBlock) block).getLong(offset)); + case LONG, COUNTER_LONG -> ((LongBlock) block).getLong(offset); + case INTEGER, COUNTER_INTEGER -> ((IntBlock) block).getInt(offset); + case DOUBLE, COUNTER_DOUBLE -> ((DoubleBlock) block).getDouble(offset); + case KEYWORD, TEXT -> ((BytesRefBlock) block).getBytesRef(offset, scratch).utf8ToString(); + case IP -> { BytesRef val = ((BytesRefBlock) block).getBytesRef(offset, scratch); yield ipToString(val); } - case "date" -> { + case DATETIME -> { long longVal = ((LongBlock) block).getLong(offset); yield dateTimeToString(longVal); } - case "boolean" -> ((BooleanBlock) block).getBoolean(offset); - case "version" -> versionToString(((BytesRefBlock) block).getBytesRef(offset, scratch)); - case "geo_point", "geo_shape", "cartesian_point", "cartesian_shape" -> spatialToString( + case BOOLEAN -> ((BooleanBlock) block).getBoolean(offset); + case VERSION -> versionToString(((BytesRefBlock) block).getBytesRef(offset, scratch)); + case GEO_POINT, GEO_SHAPE, CARTESIAN_POINT, CARTESIAN_SHAPE -> spatialToString( ((BytesRefBlock) block).getBytesRef(offset, scratch) ); - case "unsupported" -> UnsupportedValueSource.UNSUPPORTED_OUTPUT; - case "_source" -> { + case UNSUPPORTED -> UnsupportedValueSource.UNSUPPORTED_OUTPUT; + case SOURCE -> { BytesRef val = ((BytesRefBlock) block).getBytesRef(offset, scratch); try { try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, new BytesArray(val))) { @@ -152,7 +151,8 @@ private static Object valueAt(String dataType, Block block, int offset, BytesRef throw new UncheckedIOException(e); } } - default -> throw EsqlIllegalArgumentException.illegalDataType(dataType); + case SHORT, BYTE, FLOAT, HALF_FLOAT, SCALED_FLOAT, OBJECT, NESTED, DATE_PERIOD, TIME_DURATION, DOC_DATA_TYPE, TSID_DATA_TYPE, + NULL -> throw EsqlIllegalArgumentException.illegalDataType(dataType); }; } @@ -160,10 +160,10 @@ private static Object valueAt(String dataType, Block block, int offset, BytesRef * Converts a list of values to Pages so that we can parse from xcontent. It's not * super efficient, but it doesn't really have to be. */ - static Page valuesToPage(BlockFactory blockFactory, List columns, List> values) { - List dataTypes = columns.stream().map(ColumnInfo::type).toList(); + static Page valuesToPage(BlockFactory blockFactory, List columns, List> values) { + List dataTypes = columns.stream().map(ColumnInfoImpl::type).toList(); List results = dataTypes.stream() - .map(c -> PlannerUtils.toElementType(DataType.fromEs(c)).newBlockBuilder(values.size(), blockFactory)) + .map(c -> PlannerUtils.toElementType(c).newBlockBuilder(values.size(), blockFactory)) .toList(); for (List row : values) { @@ -171,24 +171,20 @@ static Page valuesToPage(BlockFactory blockFactory, List columns, Li var builder = results.get(c); var value = row.get(c); switch (dataTypes.get(c)) { - case "unsigned_long" -> ((LongBlock.Builder) builder).appendLong( - longToUnsignedLong(((Number) value).longValue(), true) - ); - case "long", "counter_long" -> ((LongBlock.Builder) builder).appendLong(((Number) value).longValue()); - case "integer", "counter_integer" -> ((IntBlock.Builder) builder).appendInt(((Number) value).intValue()); - case "double", "counter_double" -> ((DoubleBlock.Builder) builder).appendDouble(((Number) value).doubleValue()); - case "keyword", "text", "unsupported" -> ((BytesRefBlock.Builder) builder).appendBytesRef( - new BytesRef(value.toString()) - ); - case "ip" -> ((BytesRefBlock.Builder) builder).appendBytesRef(stringToIP(value.toString())); - case "date" -> { + case UNSIGNED_LONG -> ((LongBlock.Builder) builder).appendLong(longToUnsignedLong(((Number) value).longValue(), true)); + case LONG, COUNTER_LONG -> ((LongBlock.Builder) builder).appendLong(((Number) value).longValue()); + case INTEGER, COUNTER_INTEGER -> ((IntBlock.Builder) builder).appendInt(((Number) value).intValue()); + case DOUBLE, COUNTER_DOUBLE -> ((DoubleBlock.Builder) builder).appendDouble(((Number) value).doubleValue()); + case KEYWORD, TEXT, UNSUPPORTED -> ((BytesRefBlock.Builder) builder).appendBytesRef(new BytesRef(value.toString())); + case IP -> ((BytesRefBlock.Builder) builder).appendBytesRef(stringToIP(value.toString())); + case DATETIME -> { long longVal = dateTimeToLong(value.toString()); ((LongBlock.Builder) builder).appendLong(longVal); } - case "boolean" -> ((BooleanBlock.Builder) builder).appendBoolean(((Boolean) value)); - case "null" -> builder.appendNull(); - case "version" -> ((BytesRefBlock.Builder) builder).appendBytesRef(stringToVersion(new BytesRef(value.toString()))); - case "_source" -> { + case BOOLEAN -> ((BooleanBlock.Builder) builder).appendBoolean(((Boolean) value)); + case NULL -> builder.appendNull(); + case VERSION -> ((BytesRefBlock.Builder) builder).appendBytesRef(stringToVersion(new BytesRef(value.toString()))); + case SOURCE -> { @SuppressWarnings("unchecked") Map o = (Map) value; try { @@ -200,12 +196,11 @@ static Page valuesToPage(BlockFactory blockFactory, List columns, Li throw new UncheckedIOException(e); } } - case "geo_point", "geo_shape", "cartesian_point", "cartesian_shape" -> { + case GEO_POINT, GEO_SHAPE, CARTESIAN_POINT, CARTESIAN_SHAPE -> { // This just converts WKT to WKB, so does not need CRS knowledge, we could merge GEO and CARTESIAN here BytesRef wkb = stringToSpatial(value.toString()); ((BytesRefBlock.Builder) builder).appendBytesRef(wkb); } - default -> throw EsqlIllegalArgumentException.illegalDataType(dataTypes.get(c)); } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseXContentUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseXContentUtils.java index ad76fde7eca26..d7d0d9033d3b9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseXContentUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseXContentUtils.java @@ -27,7 +27,7 @@ final class ResponseXContentUtils { /** * Returns the column headings for the given columns. */ - static Iterator allColumns(List columns, String name) { + static Iterator allColumns(List columns, String name) { return ChunkedToXContentHelper.singleChunk((builder, params) -> { builder.startArray(name); for (ColumnInfo col : columns) { @@ -41,7 +41,7 @@ static Iterator allColumns(List columns, Strin * Returns the column headings for the given columns, moving the heading * for always-null columns to a {@code null_columns} section. */ - static Iterator nonNullColumns(List columns, boolean[] nullColumns, String name) { + static Iterator nonNullColumns(List columns, boolean[] nullColumns, String name) { return ChunkedToXContentHelper.singleChunk((builder, params) -> { builder.startArray(name); for (int c = 0; c < columns.size(); c++) { @@ -55,7 +55,7 @@ static Iterator nonNullColumns(List columns, b /** Returns the column values for the given pages (described by the column infos). */ static Iterator columnValues( - List columns, + List columns, List pages, boolean columnar, boolean[] nullColumns @@ -70,7 +70,7 @@ static Iterator columnValues( } /** Returns a columnar based representation of the values in the given pages (described by the column infos). */ - static Iterator columnarValues(List columns, List pages, boolean[] nullColumns) { + static Iterator columnarValues(List columns, List pages, boolean[] nullColumns) { final BytesRef scratch = new BytesRef(); return Iterators.flatMap(Iterators.forRange(0, columns.size(), column -> { if (nullColumns != null && nullColumns[column]) { @@ -96,7 +96,7 @@ static Iterator columnarValues(List columns, L } /** Returns a row based representation of the values in the given pages (described by the column infos). */ - static Iterator rowValues(List columns, List pages, boolean[] nullColumns) { + static Iterator rowValues(List columns, List pages, boolean[] nullColumns) { final BytesRef scratch = new BytesRef(); return Iterators.flatMap(pages.iterator(), page -> { final int columnCount = columns.size(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index c83840b384dbd..9328992120c08 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -28,7 +28,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.async.AsyncExecutionId; -import org.elasticsearch.xpack.core.esql.action.ColumnInfo; +import org.elasticsearch.xpack.esql.action.ColumnInfoImpl; import org.elasticsearch.xpack.esql.action.EsqlQueryAction; import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; @@ -169,9 +169,9 @@ private void innerExecute(Task task, EsqlQueryRequest request, ActionListener { - List columns = physicalPlan.output() + List columns = physicalPlan.output() .stream() - .map(c -> new ColumnInfo(c.qualifiedName(), c.dataType().outputType())) + .map(c -> new ColumnInfoImpl(c.qualifiedName(), c.dataType().outputType())) .toList(); EsqlQueryResponse.Profile profile = configuration.profile() ? new EsqlQueryResponse.Profile(result.profiles()) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index ebeb62ee02df6..ab9133a8aa523 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -59,6 +59,7 @@ import org.elasticsearch.xpack.esql.core.index.EsIndex; import org.elasticsearch.xpack.esql.core.index.IndexResolution; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; @@ -413,10 +414,10 @@ private ActualResults executePlan(BigArrays bigArrays) throws Exception { } List columnNames = Expressions.names(coordinatorPlan.output()); - List dataTypes = new ArrayList<>(columnNames.size()); + List dataTypes = new ArrayList<>(columnNames.size()); List columnTypes = coordinatorPlan.output() .stream() - .peek(o -> dataTypes.add(o.dataType().outputType())) + .peek(o -> dataTypes.add(o.dataType())) .map(o -> Type.asType(o.dataType().nameUpper())) .toList(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index 4d41218b2165f..cff4d274dc49c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -44,7 +44,6 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; -import org.elasticsearch.xpack.core.esql.action.ColumnInfo; import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.planner.PlannerUtils; @@ -109,7 +108,7 @@ EsqlQueryResponse randomResponse(boolean columnar, EsqlQueryResponse.Profile pro EsqlQueryResponse randomResponseAsync(boolean columnar, EsqlQueryResponse.Profile profile, boolean async) { int noCols = randomIntBetween(1, 10); - List columns = randomList(noCols, noCols, this::randomColumnInfo); + List columns = randomList(noCols, noCols, this::randomColumnInfo); int noPages = randomIntBetween(1, 20); List values = randomList(noPages, noPages, () -> randomPage(columns)); String id = null; @@ -121,12 +120,12 @@ EsqlQueryResponse randomResponseAsync(boolean columnar, EsqlQueryResponse.Profil return new EsqlQueryResponse(columns, values, profile, columnar, id, isRunning, async); } - private ColumnInfo randomColumnInfo() { + private ColumnInfoImpl randomColumnInfo() { DataType type = randomValueOtherThanMany( t -> false == DataType.isPrimitive(t) || t == DataType.DATE_PERIOD || t == DataType.TIME_DURATION || t == DataType.PARTIAL_AGG, () -> randomFrom(DataType.types()) ).widenSmallNumeric(); - return new ColumnInfo(randomAlphaOfLength(10), type.esType()); + return new ColumnInfoImpl(randomAlphaOfLength(10), type.esType()); } private EsqlQueryResponse.Profile randomProfile() { @@ -136,34 +135,34 @@ private EsqlQueryResponse.Profile randomProfile() { return new EsqlQueryResponseProfileTests().createTestInstance(); } - private Page randomPage(List columns) { + private Page randomPage(List columns) { return new Page(columns.stream().map(c -> { - Block.Builder builder = PlannerUtils.toElementType(DataType.fromEs(c.type())).newBlockBuilder(1, blockFactory); + Block.Builder builder = PlannerUtils.toElementType(c.type()).newBlockBuilder(1, blockFactory); switch (c.type()) { - case "unsigned_long", "long", "counter_long" -> ((LongBlock.Builder) builder).appendLong(randomLong()); - case "integer", "counter_integer" -> ((IntBlock.Builder) builder).appendInt(randomInt()); - case "double", "counter_double" -> ((DoubleBlock.Builder) builder).appendDouble(randomDouble()); - case "keyword" -> ((BytesRefBlock.Builder) builder).appendBytesRef(new BytesRef(randomAlphaOfLength(10))); - case "text" -> ((BytesRefBlock.Builder) builder).appendBytesRef(new BytesRef(randomAlphaOfLength(10000))); - case "ip" -> ((BytesRefBlock.Builder) builder).appendBytesRef( + case UNSIGNED_LONG, LONG, COUNTER_LONG -> ((LongBlock.Builder) builder).appendLong(randomLong()); + case INTEGER, COUNTER_INTEGER -> ((IntBlock.Builder) builder).appendInt(randomInt()); + case DOUBLE, COUNTER_DOUBLE -> ((DoubleBlock.Builder) builder).appendDouble(randomDouble()); + case KEYWORD -> ((BytesRefBlock.Builder) builder).appendBytesRef(new BytesRef(randomAlphaOfLength(10))); + case TEXT -> ((BytesRefBlock.Builder) builder).appendBytesRef(new BytesRef(randomAlphaOfLength(10000))); + case IP -> ((BytesRefBlock.Builder) builder).appendBytesRef( new BytesRef(InetAddressPoint.encode(randomIp(randomBoolean()))) ); - case "date" -> ((LongBlock.Builder) builder).appendLong(randomInstant().toEpochMilli()); - case "boolean" -> ((BooleanBlock.Builder) builder).appendBoolean(randomBoolean()); - case "unsupported" -> ((BytesRefBlock.Builder) builder).appendBytesRef( + case DATETIME -> ((LongBlock.Builder) builder).appendLong(randomInstant().toEpochMilli()); + case BOOLEAN -> ((BooleanBlock.Builder) builder).appendBoolean(randomBoolean()); + case UNSUPPORTED -> ((BytesRefBlock.Builder) builder).appendBytesRef( new BytesRef(UnsupportedValueSource.UNSUPPORTED_OUTPUT) ); - case "version" -> ((BytesRefBlock.Builder) builder).appendBytesRef(new Version(randomIdentifier()).toBytesRef()); - case "geo_point" -> ((BytesRefBlock.Builder) builder).appendBytesRef(GEO.asWkb(GeometryTestUtils.randomPoint())); - case "cartesian_point" -> ((BytesRefBlock.Builder) builder).appendBytesRef(CARTESIAN.asWkb(ShapeTestUtils.randomPoint())); - case "geo_shape" -> ((BytesRefBlock.Builder) builder).appendBytesRef( + case VERSION -> ((BytesRefBlock.Builder) builder).appendBytesRef(new Version(randomIdentifier()).toBytesRef()); + case GEO_POINT -> ((BytesRefBlock.Builder) builder).appendBytesRef(GEO.asWkb(GeometryTestUtils.randomPoint())); + case CARTESIAN_POINT -> ((BytesRefBlock.Builder) builder).appendBytesRef(CARTESIAN.asWkb(ShapeTestUtils.randomPoint())); + case GEO_SHAPE -> ((BytesRefBlock.Builder) builder).appendBytesRef( GEO.asWkb(GeometryTestUtils.randomGeometry(randomBoolean())) ); - case "cartesian_shape" -> ((BytesRefBlock.Builder) builder).appendBytesRef( + case CARTESIAN_SHAPE -> ((BytesRefBlock.Builder) builder).appendBytesRef( CARTESIAN.asWkb(ShapeTestUtils.randomGeometry(randomBoolean())) ); - case "null" -> builder.appendNull(); - case "_source" -> { + case NULL -> builder.appendNull(); + case SOURCE -> { try { ((BytesRefBlock.Builder) builder).appendBytesRef( BytesReference.bytes( @@ -177,7 +176,7 @@ private Page randomPage(List columns) { throw new UncheckedIOException(e); } } - default -> throw new UnsupportedOperationException("unsupported data type [" + c + "]"); + // default -> throw new UnsupportedOperationException("unsupported data type [" + c + "]"); } return builder.build(); }).toArray(Block[]::new)); @@ -186,17 +185,17 @@ private Page randomPage(List columns) { @Override protected EsqlQueryResponse mutateInstance(EsqlQueryResponse instance) { boolean allNull = true; - for (ColumnInfo info : instance.columns()) { - if (false == info.type().equals("null")) { + for (ColumnInfoImpl info : instance.columns()) { + if (info.type() != DataType.NULL) { allNull = false; } } return switch (allNull ? between(0, 2) : between(0, 3)) { case 0 -> { int mutCol = between(0, instance.columns().size() - 1); - List cols = new ArrayList<>(instance.columns()); + List cols = new ArrayList<>(instance.columns()); // keep the type the same so the values are still valid but change the name - cols.set(mutCol, new ColumnInfo(cols.get(mutCol).name() + "mut", cols.get(mutCol).type())); + cols.set(mutCol, new ColumnInfoImpl(cols.get(mutCol).name() + "mut", cols.get(mutCol).type())); yield new EsqlQueryResponse(cols, deepCopyOfPages(instance), instance.profile(), instance.columnar(), instance.isAsync()); } case 1 -> new EsqlQueryResponse( @@ -273,7 +272,7 @@ public static class ResponseBuilder { IS_RUNNING, ObjectParser.ValueType.BOOLEAN_OR_NULL ); - parser.declareObjectArray(constructorArg(), (p, c) -> ColumnInfo.fromXContent(p), new ParseField("columns")); + parser.declareObjectArray(constructorArg(), (p, c) -> ColumnInfoImpl.fromXContent(p), new ParseField("columns")); parser.declareField(constructorArg(), (p, c) -> p.list(), new ParseField("values"), ObjectParser.ValueType.OBJECT_ARRAY); PARSER = parser.build(); } @@ -282,7 +281,12 @@ public static class ResponseBuilder { private final EsqlQueryResponse response; @ParserConstructor - public ResponseBuilder(@Nullable String asyncExecutionId, Boolean isRunning, List columns, List> values) { + public ResponseBuilder( + @Nullable String asyncExecutionId, + Boolean isRunning, + List columns, + List> values + ) { this.response = new EsqlQueryResponse( columns, List.of(valuesToPage(TestBlockFactory.getNonBreakingInstance(), columns, values)), @@ -372,7 +376,7 @@ public void testSimpleXContentRowsAsync() { public void testBasicXContentIdAndRunning() { try ( EsqlQueryResponse response = new EsqlQueryResponse( - List.of(new ColumnInfo("foo", "integer")), + List.of(new ColumnInfoImpl("foo", "integer")), List.of(new Page(blockFactory.newIntArrayVector(new int[] { 40, 80 }, 2).asBlock())), null, false, @@ -389,7 +393,7 @@ public void testBasicXContentIdAndRunning() { public void testNullColumnsXContentDropNulls() { try ( EsqlQueryResponse response = new EsqlQueryResponse( - List.of(new ColumnInfo("foo", "integer"), new ColumnInfo("all_null", "integer")), + List.of(new ColumnInfoImpl("foo", "integer"), new ColumnInfoImpl("all_null", "integer")), List.of(new Page(blockFactory.newIntArrayVector(new int[] { 40, 80 }, 2).asBlock(), blockFactory.newConstantNullBlock(2))), null, false, @@ -418,7 +422,7 @@ public void testNullColumnsFromBuilderXContentDropNulls() { b.appendNull(); try ( EsqlQueryResponse response = new EsqlQueryResponse( - List.of(new ColumnInfo("foo", "integer"), new ColumnInfo("all_null", "integer")), + List.of(new ColumnInfoImpl("foo", "integer"), new ColumnInfoImpl("all_null", "integer")), List.of(new Page(blockFactory.newIntArrayVector(new int[] { 40, 80 }, 2).asBlock(), b.build())), null, false, @@ -444,7 +448,7 @@ private EsqlQueryResponse simple(boolean columnar) { private EsqlQueryResponse simple(boolean columnar, boolean async) { return new EsqlQueryResponse( - List.of(new ColumnInfo("foo", "integer")), + List.of(new ColumnInfoImpl("foo", "integer")), List.of(new Page(blockFactory.newIntArrayVector(new int[] { 40, 80 }, 2).asBlock())), null, columnar, @@ -455,7 +459,7 @@ private EsqlQueryResponse simple(boolean columnar, boolean async) { public void testProfileXContent() { try ( EsqlQueryResponse response = new EsqlQueryResponse( - List.of(new ColumnInfo("foo", "integer")), + List.of(new ColumnInfoImpl("foo", "integer")), List.of(new Page(blockFactory.newIntArrayVector(new int[] { 40, 80 }, 2).asBlock())), new EsqlQueryResponse.Profile( List.of( @@ -520,7 +524,7 @@ public void testColumns() { var intBlk2 = blockFactory.newIntArrayVector(new int[] { 30, 40, 50 }, 3).asBlock(); var longBlk1 = blockFactory.newLongArrayVector(new long[] { 100L, 200L }, 2).asBlock(); var longBlk2 = blockFactory.newLongArrayVector(new long[] { 300L, 400L, 500L }, 3).asBlock(); - var columnInfo = List.of(new ColumnInfo("foo", "integer"), new ColumnInfo("bar", "long")); + var columnInfo = List.of(new ColumnInfoImpl("foo", "integer"), new ColumnInfoImpl("bar", "long")); var pages = List.of(new Page(intBlk1, longBlk1), new Page(intBlk2, longBlk2)); try (var response = new EsqlQueryResponse(columnInfo, pages, null, false, null, false, false)) { assertThat(columnValues(response.column(0)), contains(10, 20, 30, 40, 50)); @@ -532,7 +536,7 @@ public void testColumns() { public void testColumnsIllegalArg() { var intBlk1 = blockFactory.newIntArrayVector(new int[] { 10 }, 1).asBlock(); - var columnInfo = List.of(new ColumnInfo("foo", "integer")); + var columnInfo = List.of(new ColumnInfoImpl("foo", "integer")); var pages = List.of(new Page(intBlk1)); try (var response = new EsqlQueryResponse(columnInfo, pages, null, false, null, false, false)) { expectThrows(IllegalArgumentException.class, () -> response.column(-1)); @@ -551,7 +555,7 @@ public void testColumnsWithNull() { blk2 = bb2.appendInt(30).appendNull().appendNull().appendInt(60).build(); blk3 = bb3.appendNull().appendInt(80).appendInt(90).appendNull().build(); } - var columnInfo = List.of(new ColumnInfo("foo", "integer")); + var columnInfo = List.of(new ColumnInfoImpl("foo", "integer")); var pages = List.of(new Page(blk1), new Page(blk2), new Page(blk3)); try (var response = new EsqlQueryResponse(columnInfo, pages, null, false, null, false, false)) { assertThat(columnValues(response.column(0)), contains(10, null, 30, null, null, 60, null, 80, 90, null)); @@ -571,7 +575,7 @@ public void testColumnsWithMultiValue() { blk2 = bb2.beginPositionEntry().appendInt(40).appendInt(50).endPositionEntry().build(); blk3 = bb3.appendNull().appendInt(70).appendInt(80).appendNull().build(); } - var columnInfo = List.of(new ColumnInfo("foo", "integer")); + var columnInfo = List.of(new ColumnInfoImpl("foo", "integer")); var pages = List.of(new Page(blk1), new Page(blk2), new Page(blk3)); try (var response = new EsqlQueryResponse(columnInfo, pages, null, false, null, false, false)) { assertThat(columnValues(response.column(0)), contains(List.of(10, 20), null, List.of(40, 50), null, 70, 80, null)); @@ -583,7 +587,7 @@ public void testColumnsWithMultiValue() { public void testRowValues() { for (int times = 0; times < 10; times++) { int numColumns = randomIntBetween(1, 10); - List columns = randomList(numColumns, numColumns, this::randomColumnInfo); + List columns = randomList(numColumns, numColumns, this::randomColumnInfo); int noPages = randomIntBetween(1, 20); List pages = randomList(noPages, noPages, () -> randomPage(columns)); try (var resp = new EsqlQueryResponse(columns, pages, null, false, "", false, false)) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java index 6da9e8ef8ba48..658f396aa027c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java @@ -17,8 +17,8 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xpack.core.esql.action.ColumnInfo; import org.elasticsearch.xpack.esql.TestBlockFactory; +import org.elasticsearch.xpack.esql.action.ColumnInfoImpl; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; import org.elasticsearch.xpack.esql.core.util.StringUtils; @@ -246,17 +246,17 @@ public void testPlainTextEmptyCursorWithoutColumns() { } private static EsqlQueryResponse emptyData() { - return new EsqlQueryResponse(singletonList(new ColumnInfo("name", "keyword")), emptyList(), null, false, false); + return new EsqlQueryResponse(singletonList(new ColumnInfoImpl("name", "keyword")), emptyList(), null, false, false); } private static EsqlQueryResponse regularData() { BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); // headers - List headers = asList( - new ColumnInfo("string", "keyword"), - new ColumnInfo("number", "integer"), - new ColumnInfo("location", "geo_point"), - new ColumnInfo("location2", "cartesian_point") + List headers = asList( + new ColumnInfoImpl("string", "keyword"), + new ColumnInfoImpl("number", "integer"), + new ColumnInfoImpl("location", "geo_point"), + new ColumnInfoImpl("location2", "cartesian_point") ); BytesRefArray geoPoints = new BytesRefArray(2, BigArrays.NON_RECYCLING_INSTANCE); @@ -283,7 +283,7 @@ private static EsqlQueryResponse regularData() { private static EsqlQueryResponse escapedData() { // headers - List headers = asList(new ColumnInfo("first", "keyword"), new ColumnInfo("\"special\"", "keyword")); + List headers = asList(new ColumnInfoImpl("first", "keyword"), new ColumnInfoImpl("\"special\"", "keyword")); // values List values = List.of( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java index 9a89f3a1275f1..273561c0348c6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java @@ -15,8 +15,8 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.geometry.Point; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.esql.action.ColumnInfo; import org.elasticsearch.xpack.esql.TestBlockFactory; +import org.elasticsearch.xpack.esql.action.ColumnInfoImpl; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; import java.util.Arrays; @@ -32,17 +32,17 @@ public class TextFormatterTests extends ESTestCase { static BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); - private final List columns = Arrays.asList( - new ColumnInfo("foo", "keyword"), - new ColumnInfo("bar", "long"), - new ColumnInfo("15charwidename!", "double"), - new ColumnInfo("null_field1", "integer"), - new ColumnInfo("superduperwidename!!!", "double"), - new ColumnInfo("baz", "keyword"), - new ColumnInfo("date", "date"), - new ColumnInfo("location", "geo_point"), - new ColumnInfo("location2", "cartesian_point"), - new ColumnInfo("null_field2", "keyword") + private final List columns = Arrays.asList( + new ColumnInfoImpl("foo", "keyword"), + new ColumnInfoImpl("bar", "long"), + new ColumnInfoImpl("15charwidename!", "double"), + new ColumnInfoImpl("null_field1", "integer"), + new ColumnInfoImpl("superduperwidename!!!", "double"), + new ColumnInfoImpl("baz", "keyword"), + new ColumnInfoImpl("date", "date"), + new ColumnInfoImpl("location", "geo_point"), + new ColumnInfoImpl("location2", "cartesian_point"), + new ColumnInfoImpl("null_field2", "keyword") ); private static final BytesRefArray geoPoints = new BytesRefArray(2, BigArrays.NON_RECYCLING_INSTANCE); @@ -183,7 +183,7 @@ public void testVeryLongPadding() { getTextBodyContent( new TextFormatter( new EsqlQueryResponse( - List.of(new ColumnInfo("foo", "keyword")), + List.of(new ColumnInfoImpl("foo", "keyword")), List.of( new Page( blockFactory.newBytesRefBlockBuilder(2) From 76d0e2fcba9b9fdfa225e567fd954e850a6db804 Mon Sep 17 00:00:00 2001 From: Patrick Doyle <810052+prdoyle@users.noreply.github.com> Date: Tue, 2 Jul 2024 09:19:50 -0400 Subject: [PATCH 110/216] parseHeapRatioOrDeprecatedByteSizeValue for indices.breaker.total.limit (#110236) * parseHeapRatioOrDeprecatedByteSizeValue for indices.breaker.total.limit * Fix tests for indices.breaker.total.limit warning * Spotless * Warn if below minimum percent * Update docs/changelog/110236.yaml * Changelog * Pick correct area for changelog entry * Spotless again dammit * assertCriticalWarnings in circuit breaker test * Expect another warning --- docs/changelog/110236.yaml | 21 ++++++++ .../common/unit/MemorySizeValue.java | 54 +++++++++++++++---- .../HierarchyCircuitBreakerService.java | 4 +- .../settings/MemorySizeSettingsTests.java | 5 ++ .../HierarchyCircuitBreakerServiceTests.java | 26 ++++++++- .../org/elasticsearch/test/ESTestCase.java | 4 +- .../sequence/CircuitBreakerTests.java | 5 +- 7 files changed, 104 insertions(+), 15 deletions(-) create mode 100644 docs/changelog/110236.yaml diff --git a/docs/changelog/110236.yaml b/docs/changelog/110236.yaml new file mode 100644 index 0000000000000..e2dbff7fbf768 --- /dev/null +++ b/docs/changelog/110236.yaml @@ -0,0 +1,21 @@ +pr: 110236 +summary: '`ParseHeapRatioOrDeprecatedByteSizeValue` for `indices.breaker.total.limit`' +area: Infra/Settings +type: deprecation +issues: [] +deprecation: + title: 'Deprecate absolute size values for `indices.breaker.total.limit` setting' + area: Cluster and node setting + details: Previously, the value of `indices.breaker.total.limit` could be specified as + an absolute size in bytes. This setting controls the overal amount of + memory the server is allowed to use before taking remedial actions. Setting + this to a specific number of bytes led to strange behaviour when the node + maximum heap size changed because the circut breaker limit would remain + unchanged. This would either leave the value too low, causing part of the + heap to remain unused; or it would leave the value too high, causing the + circuit breaker to be ineffective at preventing OOM errors. The only + reasonable behaviour for this setting is that it scales with the size of + the heap, and so absolute byte limits are now deprecated. + impact: Users must change their configuration to specify a percentage instead of + an absolute number of bytes for `indices.breaker.total.limit`, or else + accept the default, which is already specified as a percentage. diff --git a/server/src/main/java/org/elasticsearch/common/unit/MemorySizeValue.java b/server/src/main/java/org/elasticsearch/common/unit/MemorySizeValue.java index bfe4e18367a74..274a4e67367c7 100644 --- a/server/src/main/java/org/elasticsearch/common/unit/MemorySizeValue.java +++ b/server/src/main/java/org/elasticsearch/common/unit/MemorySizeValue.java @@ -9,6 +9,9 @@ package org.elasticsearch.common.unit; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.monitor.jvm.JvmInfo; import java.util.Objects; @@ -25,18 +28,49 @@ public enum MemorySizeValue { public static ByteSizeValue parseBytesSizeValueOrHeapRatio(String sValue, String settingName) { settingName = Objects.requireNonNull(settingName); if (sValue != null && sValue.endsWith("%")) { - final String percentAsString = sValue.substring(0, sValue.length() - 1); - try { - final double percent = Double.parseDouble(percentAsString); - if (percent < 0 || percent > 100) { - throw new ElasticsearchParseException("percentage should be in [0-100], got [{}]", percentAsString); - } - return ByteSizeValue.ofBytes((long) ((percent / 100) * JvmInfo.jvmInfo().getMem().getHeapMax().getBytes())); - } catch (NumberFormatException e) { - throw new ElasticsearchParseException("failed to parse [{}] as a double", e, percentAsString); - } + return parseHeapRatio(sValue, settingName, 0); } else { return parseBytesSizeValue(sValue, settingName); } } + + public static ByteSizeValue parseHeapRatioOrDeprecatedByteSizeValue(String sValue, String settingName, double minHeapPercent) { + settingName = Objects.requireNonNull(settingName); + if (sValue != null && sValue.endsWith("%")) { + return parseHeapRatio(sValue, settingName, minHeapPercent); + } else { + DeprecationLogger.getLogger(BalancedShardsAllocator.class) + .critical( + DeprecationCategory.SETTINGS, + "absolute_size_not_supported", + "[{}] should be specified using a percentage of the heap. Absolute size settings will be forbidden in a future release", + settingName + ); + return parseBytesSizeValue(sValue, settingName); + } + } + + private static ByteSizeValue parseHeapRatio(String sValue, String settingName, double minHeapPercent) { + final String percentAsString = sValue.substring(0, sValue.length() - 1); + try { + final double percent = Double.parseDouble(percentAsString); + if (percent < 0 || percent > 100) { + throw new ElasticsearchParseException("percentage should be in [0-100], got [{}]", percentAsString); + } else if (percent < minHeapPercent) { + DeprecationLogger.getLogger(MemorySizeValue.class) + .warn( + DeprecationCategory.SETTINGS, + "memory_size_below_minimum", + "[{}] setting of [{}] is below the recommended minimum of {}% of the heap", + settingName, + sValue, + minHeapPercent + ); + } + return ByteSizeValue.ofBytes((long) ((percent / 100) * JvmInfo.jvmInfo().getMem().getHeapMax().getBytes())); + } catch (NumberFormatException e) { + throw new ElasticsearchParseException("failed to parse [{}] as a double", e, percentAsString); + } + } + } diff --git a/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java b/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java index a2e30b9e18098..d91b19fda1185 100644 --- a/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java +++ b/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.MemorySizeValue; import org.elasticsearch.common.util.concurrent.ReleasableLock; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.SuppressForbidden; @@ -65,7 +66,7 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { Property.NodeScope ); - public static final Setting TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING = Setting.memorySizeSetting( + public static final Setting TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING = new Setting<>( "indices.breaker.total.limit", settings -> { if (USE_REAL_MEMORY_USAGE_SETTING.get(settings)) { @@ -74,6 +75,7 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { return "70%"; } }, + (s) -> MemorySizeValue.parseHeapRatioOrDeprecatedByteSizeValue(s, "indices.breaker.total.limit", 50), Property.Dynamic, Property.NodeScope ); diff --git a/server/src/test/java/org/elasticsearch/common/settings/MemorySizeSettingsTests.java b/server/src/test/java/org/elasticsearch/common/settings/MemorySizeSettingsTests.java index 98da24fc75c96..5321079896b08 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/MemorySizeSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/MemorySizeSettingsTests.java @@ -71,6 +71,11 @@ public void testCircuitBreakerSettings() { "indices.breaker.total.limit", ByteSizeValue.ofBytes((long) (JvmInfo.jvmInfo().getMem().getHeapMax().getBytes() * defaultTotalPercentage)) ); + assertWarnings( + "[indices.breaker.total.limit] setting of [25%] is below the recommended minimum of 50.0% of the heap", + "[indices.breaker.total.limit] should be specified using a percentage of the heap. " + + "Absolute size settings will be forbidden in a future release" + ); assertMemorySizeSetting( HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, "indices.breaker.fielddata.limit", diff --git a/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java b/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java index 8d2255df9e7e8..ff2f55c791dd3 100644 --- a/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java +++ b/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java @@ -254,6 +254,8 @@ public void testBorrowingSiblingBreakerMemory() { assertThat(exception.getMessage(), containsString("request=157286400/150mb")); assertThat(exception.getDurability(), equalTo(CircuitBreaker.Durability.TRANSIENT)); } + + assertCircuitBreakerLimitWarning(); } public void testParentBreaksOnRealMemoryUsage() { @@ -325,6 +327,8 @@ long currentMemoryUsage() { memoryUsage.set(100); requestBreaker.addEstimateBytesAndMaybeBreak(reservationInBytes, "request"); assertEquals(0, requestBreaker.getTrippedCount()); + + assertCircuitBreakerLimitWarning(); } /** @@ -749,6 +753,7 @@ public void testTrippedCircuitBreakerDurability() { equalTo(expectedDurability) ); } + assertCircuitBreakerLimitWarning(); } public void testAllocationBucketsBreaker() { @@ -785,6 +790,8 @@ public void testAllocationBucketsBreaker() { assertThat(exception.getMessage(), containsString("[parent] Data too large, data for [allocated_buckets] would be")); assertThat(exception.getMessage(), containsString("which is larger than the limit of [100/100b]")); } + + assertCircuitBreakerLimitWarning(); } public void testRegisterCustomCircuitBreakers_WithDuplicates() { @@ -891,7 +898,7 @@ public void testApplySettingForUpdatingUseRealMemory() { service.getParentLimit() ); - // total.limit defaults to 70% of the JVM heap if use_real_memory set to true + // total.limit defaults to 95% of the JVM heap if use_real_memory set to true clusterSettings.applySettings(Settings.builder().put(useRealMemoryUsageSetting, true).build()); assertEquals( MemorySizeValue.parseBytesSizeValueOrHeapRatio("95%", totalCircuitBreakerLimitSetting).getBytes(), @@ -900,6 +907,15 @@ public void testApplySettingForUpdatingUseRealMemory() { } } + public void testSizeBelowMinimumWarning() { + ByteSizeValue sizeValue = MemorySizeValue.parseHeapRatioOrDeprecatedByteSizeValue( + "19%", + HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), + 20 + ); + assertWarnings("[indices.breaker.total.limit] setting of [19%] is below the recommended minimum of 20.0% of the heap"); + } + public void testBuildParentTripMessage() { class TestChildCircuitBreaker extends NoopCircuitBreaker { private final long used; @@ -972,4 +988,12 @@ public double getOverhead() { HierarchyCircuitBreakerService.permitNegativeValues = false; } } + + void assertCircuitBreakerLimitWarning() { + assertWarnings( + "[indices.breaker.total.limit] should be specified using a percentage of the heap. " + + "Absolute size settings will be forbidden in a future release" + ); + + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 0d20c613b27a8..ca6be72fd585b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -649,7 +649,7 @@ protected final void assertSettingDeprecationsAndWarnings(final Setting[] set /** * Convenience method to assert warnings for settings deprecations and general deprecation warnings. All warnings passed to this method * are assumed to be at WARNING level. - * @param expectedWarnings expected general deprecation warnings. + * @param expectedWarnings expected general deprecation warning messages. */ protected final void assertWarnings(String... expectedWarnings) { assertWarnings( @@ -663,7 +663,7 @@ protected final void assertWarnings(String... expectedWarnings) { /** * Convenience method to assert warnings for settings deprecations and general deprecation warnings. All warnings passed to this method * are assumed to be at CRITICAL level. - * @param expectedWarnings expected general deprecation warnings. + * @param expectedWarnings expected general deprecation warning messages. */ protected final void assertCriticalWarnings(String... expectedWarnings) { assertWarnings( diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sequence/CircuitBreakerTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sequence/CircuitBreakerTests.java index 9293ffc40ec53..c001b312d5578 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sequence/CircuitBreakerTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sequence/CircuitBreakerTests.java @@ -245,7 +245,9 @@ public void testEqlCBCleanedUp_on_ParentCBBreak() { final int searchRequestsExpectedCount = 2; // let the parent circuit breaker fail, setting its limit to zero - Settings settings = Settings.builder().put(HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), 0).build(); + Settings settings = Settings.builder() + .put(HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "0%") + .build(); try ( CircuitBreakerService service = new HierarchyCircuitBreakerService( @@ -277,6 +279,7 @@ public void testEqlCBCleanedUp_on_ParentCBBreak() { TumblingWindow window = new TumblingWindow(eqlClient, criteria, null, matcher, Collections.emptyList()); window.execute(wrap(p -> fail(), ex -> assertTrue(ex instanceof CircuitBreakingException))); } + assertCriticalWarnings("[indices.breaker.total.limit] setting of [0%] is below the recommended minimum of 50.0% of the heap"); } private List breakerSettings() { From 55476041d9daa4e1ccad2b732c3e4e5b0a785194 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20Fred=C3=A9n?= <109296772+jfreden@users.noreply.github.com> Date: Tue, 2 Jul 2024 15:45:39 +0200 Subject: [PATCH 111/216] Add BulkPutRoles API (#109339) * Add BulkPutRoles API --- docs/reference/rest-api/security.asciidoc | 2 + .../security/bulk-create-roles.asciidoc | 328 ++++++++++++++++ .../api/security.bulk_put_role.json | 43 +++ .../core/security/action/ActionTypes.java | 3 + .../role/BulkPutRoleRequestBuilder.java | 65 ++++ .../BulkPutRoleRequestBuilderFactory.java | 22 ++ .../action/role/BulkPutRolesRequest.java | 69 ++++ .../action/role/BulkPutRolesResponse.java | 145 +++++++ .../core/security/authz/RoleDescriptor.java | 17 +- .../authz/privilege/PrivilegeTests.java | 1 + .../xpack/security/operator/Constants.java | 1 + .../SecurityOnTrialLicenseRestTestCase.java | 59 +++ .../security/role/BulkPutRoleRestIT.java | 231 +++++++++++ .../role/RoleWithDescriptionRestIT.java | 69 +--- ...RoleWithRemoteIndicesPrivilegesRestIT.java | 36 +- .../RoleWithWorkflowsRestrictionRestIT.java | 22 +- .../xpack/security/Security.java | 31 +- .../role/TransportBulkPutRolesAction.java | 34 ++ .../action/role/TransportPutRoleAction.java | 57 +-- .../authz/store/NativeRolesStore.java | 260 ++++++++++--- .../action/role/RestBulkPutRolesAction.java | 56 +++ .../role/TransportPutRoleActionTests.java | 237 +----------- .../authz/store/NativeRolesStoreTests.java | 363 +++++++++++++++--- .../test/roles/60_bulk_roles.yml | 83 ++++ 24 files changed, 1724 insertions(+), 510 deletions(-) create mode 100644 docs/reference/rest-api/security/bulk-create-roles.asciidoc create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/security.bulk_put_role.json create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRoleRequestBuilder.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRoleRequestBuilderFactory.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRolesRequest.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRolesResponse.java create mode 100644 x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/BulkPutRoleRestIT.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportBulkPutRolesAction.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestBulkPutRolesAction.java create mode 100644 x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/60_bulk_roles.yml diff --git a/docs/reference/rest-api/security.asciidoc b/docs/reference/rest-api/security.asciidoc index 4571d963179a6..80734ca51b989 100644 --- a/docs/reference/rest-api/security.asciidoc +++ b/docs/reference/rest-api/security.asciidoc @@ -45,6 +45,7 @@ Use the following APIs to add, remove, update, and retrieve role mappings: Use the following APIs to add, remove, update, and retrieve roles in the native realm: * <> +* <> * <> * <> * <> @@ -171,6 +172,7 @@ include::security/create-api-keys.asciidoc[] include::security/put-app-privileges.asciidoc[] include::security/create-role-mappings.asciidoc[] include::security/create-roles.asciidoc[] +include::security/bulk-create-roles.asciidoc[] include::security/create-users.asciidoc[] include::security/create-service-token.asciidoc[] include::security/delegate-pki-authentication.asciidoc[] diff --git a/docs/reference/rest-api/security/bulk-create-roles.asciidoc b/docs/reference/rest-api/security/bulk-create-roles.asciidoc new file mode 100644 index 0000000000000..a8072b7ba549a --- /dev/null +++ b/docs/reference/rest-api/security/bulk-create-roles.asciidoc @@ -0,0 +1,328 @@ +[role="xpack"] +[[security-api-bulk-put-role]] +=== Bulk create or update roles API +preview::[] +++++ +Bulk create or update roles API +++++ + +Bulk adds and updates roles in the native realm. + +[[security-api-bulk-put-role-request]] +==== {api-request-title} + +`POST /_security/role/` + + +[[security-api-bulk-put-role-prereqs]] +==== {api-prereq-title} + +* To use this API, you must have at least the `manage_security` cluster +privilege. + +[[security-api-bulk-put-role-desc]] +==== {api-description-title} + +The role management APIs are generally the preferred way to manage roles, rather than using +<>. The bulk create +or update roles API cannot update roles that are defined in roles files. + +[[security-api-bulk-put-role-path-params]] +==== {api-path-parms-title} + +`refresh`:: +Optional setting of the {ref}/docs-refresh.html[refresh policy] for the write request. Defaults to Immediate. + +[[security-api-bulk-put-role-request-body]] +==== {api-request-body-title} + +The following parameters can be specified in the body of a POST request +and pertain to adding a set of roles: + +`roles`:: +(object) The roles to add as a role name to role map. + +==== +`` (required):: (string) The role name. +`applications`:: (list) A list of application privilege entries. +`application` (required)::: (string) The name of the application to which this entry applies. +`privileges`::: (list) A list of strings, where each element is the name of an application +privilege or action. +`resources`::: (list) A list resources to which the privileges are applied. + +`cluster`:: (list) A list of cluster privileges. These privileges define the +cluster level actions that users with this role are able to execute. + +`global`:: (object) An object defining global privileges. A global privilege is +a form of cluster privilege that is request-aware. Support for global privileges +is currently limited to the management of application privileges. + +`indices`:: (list) A list of indices permissions entries. +`field_security`::: (object) The document fields that the owners of the role have +read access to. For more information, see +<>. +`names` (required)::: (list) A list of indices (or index name patterns) to which the +permissions in this entry apply. +`privileges`(required)::: (list) The index level privileges that the owners of the role +have on the specified indices. +`query`::: A search query that defines the documents the owners of the role have +read access to. A document within the specified indices must match this query in +order for it to be accessible by the owners of the role. + +`metadata`:: (object) Optional meta-data. Within the `metadata` object, keys +that begin with `_` are reserved for system usage. + +`run_as`:: (list) A list of users that the owners of this role can impersonate. +For more information, see +<>. + +`remote_indices`:: beta:[] (list) A list of remote indices permissions entries. ++ +-- +NOTE: Remote indices are effective for <>. +They have no effect for remote clusters configured with the <>. +-- +`clusters` (required)::: (list) A list of cluster aliases to which the permissions +in this entry apply. +`field_security`::: (object) The document fields that the owners of the role have +read access to. For more information, see +<>. +`names` (required)::: (list) A list of indices (or index name patterns) on the remote clusters +(specified with `clusters`) to which the permissions in this entry apply. +`privileges`(required)::: (list) The index level privileges that the owners of the role +have on the specified indices. +`query`::: A search query that defines the documents the owners of the role have +read access to. A document within the specified indices must match this query in +order for it to be accessible by the owners of the role. + +For more information, see <>. +==== + +[[security-bulk-api-put-role-example]] +==== {api-examples-title} + +The following example adds the roles called `my_admin_role` and `my_user_role`: + +[source,console] +-------------------------------------------------- +POST /_security/role +{ + "roles": { + "my_admin_role": { + "cluster": [ + "all" + ], + "indices": [ + { + "names": [ + "index1", + "index2" + ], + "privileges": [ + "all" + ], + "field_security": { + "grant": [ + "title", + "body" + ] + }, + "query": "{\"match\": {\"title\": \"foo\"}}" + } + ], + "applications": [ + { + "application": "myapp", + "privileges": [ + "admin", + "read" + ], + "resources": [ + "*" + ] + } + ], + "run_as": [ + "other_user" + ], + "metadata": { + "version": 1 + } + }, + "my_user_role": { + "cluster": [ + "all" + ], + "indices": [ + { + "names": [ + "index1" + ], + "privileges": [ + "read" + ], + "field_security": { + "grant": [ + "title", + "body" + ] + }, + "query": "{\"match\": {\"title\": \"foo\"}}" + } + ], + "applications": [ + { + "application": "myapp", + "privileges": [ + "admin", + "read" + ], + "resources": [ + "*" + ] + } + ], + "run_as": [ + "other_user" + ], + "metadata": { + "version": 1 + } + } + } +} +-------------------------------------------------- + +A successful call returns a JSON structure that shows whether the role has been +created, updated, or had no changes made. + +[source,console-result] +-------------------------------------------------- +{ + "created": [ <1> + "my_admin_role", <2> + "my_user_role" + ] +} +-------------------------------------------------- + +<1> Result type, one of `created`, `updated`, `noop`, `errors`. +<2> A list of the roles that were created. + +Because errors are handled individually for each role create or update, the API allows partial success. + +The following query would throw an error for `my_admin_role` because the privilege `bad_cluster_privilege` +doesn't exist, but would be successful for the `my_user_role`. + +[source,console] +-------------------------------------------------- +POST /_security/role +{ + "roles": { + "my_admin_role": { + "cluster": [ + "bad_cluster_privilege" + ], + "indices": [ + { + "names": [ + "index1", + "index2" + ], + "privileges": ["all"], + "field_security": { + "grant": [ + "title", + "body" + ] + }, + "query": "{\"match\": {\"title\": \"foo\"}}" + } + ], + "applications": [ + { + "application": "myapp", + "privileges": [ + "admin", + "read" + ], + "resources": [ + "*" + ] + } + ], + "run_as": [ + "other_user" + ], + "metadata": { + "version": 1 + } + }, + "my_user_role": { + "cluster": [ + "all" + ], + "indices": [ + { + "names": [ + "index1" + ], + "privileges": [ + "read" + ], + "field_security": { + "grant": [ + "title", + "body" + ] + }, + "query": "{\"match\": {\"title\": \"foo\"}}" + } + ], + "applications": [ + { + "application": "myapp", + "privileges": [ + "admin", + "read" + ], + "resources": [ + "*" + ] + } + ], + "run_as": [ + "other_user" + ], + "metadata": { + "version": 1 + } + } + } +} +-------------------------------------------------- + +The result would then have the `errors` field set to `true` and hold the error for the `my_admin_role` update. + + +[source,console-result] +-------------------------------------------------- +{ + "created": [ + "my_user_role" <1> + ], + "errors": { <2> + "count": 1, <3> + "details": { + "my_admin_role": { <4> + "type": "action_request_validation_exception", + "reason": "Validation Failed: 1: unknown cluster privilege [bad_cluster_privilege]. a privilege must be either one of the predefined cluster privilege names [manage_own_api_key,none,cancel_task,cross_cluster_replication,cross_cluster_search,delegate_pki,grant_api_key,manage_autoscaling,manage_index_templates,manage_logstash_pipelines,manage_oidc,manage_saml,manage_search_application,manage_search_query_rules,manage_search_synonyms,manage_service_account,manage_token,manage_user_profile,monitor_connector,monitor_data_stream_global_retention,monitor_enrich,monitor_inference,monitor_ml,monitor_rollup,monitor_snapshot,monitor_text_structure,monitor_watcher,post_behavioral_analytics_event,read_ccr,read_connector_secrets,read_fleet_secrets,read_ilm,read_pipeline,read_security,read_slm,transport_client,write_connector_secrets,write_fleet_secrets,create_snapshot,manage_behavioral_analytics,manage_ccr,manage_connector,manage_data_stream_global_retention,manage_enrich,manage_ilm,manage_inference,manage_ml,manage_rollup,manage_slm,manage_watcher,monitor_data_frame_transforms,monitor_transform,manage_api_key,manage_ingest_pipelines,manage_pipeline,manage_data_frame_transforms,manage_transform,manage_security,monitor,manage,all] or a pattern over one of the available cluster actions;" + } + } + } +} +-------------------------------------------------- + +<1> The successfully created role. +<2> The errors encountered. +<3> The number of put role requests that resulted in an error. +<4> The error keyed by role name. diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.bulk_put_role.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.bulk_put_role.json new file mode 100644 index 0000000000000..f8916a48b31e6 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.bulk_put_role.json @@ -0,0 +1,43 @@ +{ + "security.bulk_put_role": { + "documentation": { + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-bulk-put-role.html", + "description": "Bulk adds and updates roles in the native realm." + }, + "stability": "stable", + "visibility": "public", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_security/role", + "methods": [ + "POST" + ] + } + ] + }, + "params": { + "refresh": { + "type": "enum", + "options": [ + "true", + "false", + "wait_for" + ], + "description": "If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes." + } + }, + "body": { + "description": "The roles to add", + "required": true + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/ActionTypes.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/ActionTypes.java index 30ad5e7902d19..43e914f873a83 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/ActionTypes.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/ActionTypes.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; +import org.elasticsearch.xpack.core.security.action.role.BulkPutRolesResponse; import org.elasticsearch.xpack.core.security.action.user.QueryUserResponse; /** @@ -23,4 +24,6 @@ public final class ActionTypes { ); public static final ActionType QUERY_USER_ACTION = new ActionType<>("cluster:admin/xpack/security/user/query"); + + public static final ActionType BULK_PUT_ROLES = new ActionType<>("cluster:admin/xpack/security/role/bulk_put"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRoleRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRoleRequestBuilder.java new file mode 100644 index 0000000000000..c601bbdd79396 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRoleRequestBuilder.java @@ -0,0 +1,65 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.core.security.action.role; + +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.internal.ElasticsearchClient; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.security.action.ActionTypes; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +/** + * Builder for requests to bulk add a roles to the security index + */ +public class BulkPutRoleRequestBuilder extends ActionRequestBuilder { + + private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().allowDescription(true).build(); + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser, Void> PARSER = new ConstructingObjectParser<>( + "bulk_update_roles_request_payload", + a -> (List) a[0] + ); + + static { + PARSER.declareNamedObjects(optionalConstructorArg(), (p, c, n) -> { + p.nextToken(); + return ROLE_DESCRIPTOR_PARSER.parse(n, p, false); + }, new ParseField("roles")); + } + + public BulkPutRoleRequestBuilder(ElasticsearchClient client) { + super(client, ActionTypes.BULK_PUT_ROLES, new BulkPutRolesRequest()); + } + + public BulkPutRoleRequestBuilder content(BytesReference content, XContentType xContentType) throws IOException { + XContentParser parser = XContentHelper.createParserNotCompressed( + LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG, + content, + xContentType + ); + List roles = PARSER.parse(parser, null); + request.setRoles(roles); + return this; + } + + public BulkPutRoleRequestBuilder setRefreshPolicy(WriteRequest.RefreshPolicy refreshPolicy) { + request.setRefreshPolicy(refreshPolicy); + return this; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRoleRequestBuilderFactory.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRoleRequestBuilderFactory.java new file mode 100644 index 0000000000000..a0c93c70363b0 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRoleRequestBuilderFactory.java @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.action.role; + +import org.elasticsearch.client.internal.Client; + +public interface BulkPutRoleRequestBuilderFactory { + BulkPutRoleRequestBuilder create(Client client); + + class Default implements BulkPutRoleRequestBuilderFactory { + @Override + public BulkPutRoleRequestBuilder create(Client client) { + // This needs to be added when Bulk API is made public in serverless + return new BulkPutRoleRequestBuilder(client); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRolesRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRolesRequest.java new file mode 100644 index 0000000000000..a812c33eb41b7 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRolesRequest.java @@ -0,0 +1,69 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.action.role; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public class BulkPutRolesRequest extends ActionRequest { + + private List roles; + + public BulkPutRolesRequest() {} + + public void setRoles(List roles) { + this.roles = roles; + } + + private WriteRequest.RefreshPolicy refreshPolicy = WriteRequest.RefreshPolicy.IMMEDIATE; + + @Override + public ActionRequestValidationException validate() { + // Handle validation where put role is handled to produce partial success if validation fails + return null; + } + + public List getRoles() { + return roles; + } + + public BulkPutRolesRequest setRefreshPolicy(WriteRequest.RefreshPolicy refreshPolicy) { + this.refreshPolicy = refreshPolicy; + return this; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass() || super.equals(o)) return false; + + BulkPutRolesRequest that = (BulkPutRolesRequest) o; + return Objects.equals(roles, that.roles); + } + + @Override + public int hashCode() { + return Objects.hash(roles); + } + + public WriteRequest.RefreshPolicy getRefreshPolicy() { + return refreshPolicy; + } + + public void writeTo(StreamOutput out) throws IOException { + TransportAction.localOnly(); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRolesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRolesResponse.java new file mode 100644 index 0000000000000..15870806f25fd --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRolesResponse.java @@ -0,0 +1,145 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.core.security.action.role; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +public class BulkPutRolesResponse extends ActionResponse implements ToXContentObject { + + private final List items; + + public static class Builder { + + private final List items = new LinkedList<>(); + + public Builder addItem(Item item) { + items.add(item); + return this; + } + + public BulkPutRolesResponse build() { + return new BulkPutRolesResponse(items); + } + } + + public BulkPutRolesResponse(List items) { + this.items = items; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + Map> itemsByType = items.stream().collect(Collectors.groupingBy(Item::getResultType)); + + for (var resultEntry : itemsByType.entrySet()) { + if (resultEntry.getKey().equals("errors") == false) { + builder.startArray(resultEntry.getKey()); + for (var item : resultEntry.getValue()) { + item.toXContent(builder, params); + } + builder.endArray(); + } else { + builder.startObject("errors"); + builder.field("count", resultEntry.getValue().size()); + builder.startObject("details"); + for (var item : resultEntry.getValue()) { + builder.startObject(item.roleName); + item.toXContent(builder, params); + builder.endObject(); + } + builder.endObject(); + builder.endObject(); + } + } + + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + TransportAction.localOnly(); + } + + public static class Item implements ToXContentObject { + private final Exception cause; + private final String roleName; + + private final DocWriteResponse.Result resultType; + + private Item(String roleName, DocWriteResponse.Result resultType, Exception cause) { + this.roleName = roleName; + this.resultType = resultType; + this.cause = cause; + } + + Item(StreamInput in) throws IOException { + roleName = in.readString(); + resultType = DocWriteResponse.Result.readFrom(in); + cause = in.readException(); + } + + public Exception getCause() { + return cause; + } + + public String getResultType() { + return resultType == null ? "errors" : resultType.getLowercase(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + if (resultType == null) { + ElasticsearchException.generateThrowableXContent(builder, params, cause); + } else { + builder.value(roleName); + } + return builder; + } + + public static Item success(String roleName, DocWriteResponse.Result result) { + return new Item(roleName, result, null); + } + + public static Item failure(String roleName, Exception cause) { + return new Item(roleName, null, cause); + } + + public String getRoleName() { + return roleName; + } + + public boolean isFailed() { + return cause != null; + } + + public String getFailureMessage() { + if (cause != null) { + return cause.getMessage(); + } + return null; + } + } + + public List getItems() { + return items; + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java index baf72a3411cde..08e774006ad32 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java @@ -545,12 +545,17 @@ public RoleDescriptor parse(String name, BytesReference source, XContentType xCo } public RoleDescriptor parse(String name, XContentParser parser) throws IOException { - // validate name - Validation.Error validationError = Validation.Roles.validateRoleName(name, true); - if (validationError != null) { - ValidationException ve = new ValidationException(); - ve.addValidationError(validationError.toString()); - throw ve; + return parse(name, parser, true); + } + + public RoleDescriptor parse(String name, XContentParser parser, boolean validate) throws IOException { + if (validate) { + Validation.Error validationError = Validation.Roles.validateRoleName(name, true); + if (validationError != null) { + ValidationException ve = new ValidationException(); + ve.addValidationError(validationError.toString()); + throw ve; + } } // advance to the START_OBJECT token if needed diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java index 1ade22179ab59..54af9d947a9e8 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java @@ -297,6 +297,7 @@ public void testReadSecurityPrivilege() { PutUserAction.NAME, DeleteUserAction.NAME, PutRoleAction.NAME, + ActionTypes.BULK_PUT_ROLES.name(), DeleteRoleAction.NAME, PutRoleMappingAction.NAME, DeleteRoleMappingAction.NAME, diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 878d01abd02e3..092c8e6ccf391 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -279,6 +279,7 @@ public class Constants { "cluster:admin/xpack/security/role/delete", "cluster:admin/xpack/security/role/get", "cluster:admin/xpack/security/role/put", + "cluster:admin/xpack/security/role/bulk_put", "cluster:admin/xpack/security/role_mapping/delete", "cluster:admin/xpack/security/role_mapping/get", "cluster:admin/xpack/security/role_mapping/put", diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java index bdbd5c659c479..d877ae63d0037 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java @@ -7,9 +7,15 @@ package org.elasticsearch.xpack.security; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.Strings; import org.elasticsearch.core.Tuple; import org.elasticsearch.test.TestSecurityClient; import org.elasticsearch.test.cluster.ElasticsearchCluster; @@ -24,9 +30,13 @@ import java.io.IOException; import java.util.Collection; +import java.util.HashMap; import java.util.List; +import java.util.Map; import static org.elasticsearch.test.cluster.local.model.User.ROOT_USER_ROLE; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; public abstract class SecurityOnTrialLicenseRestTestCase extends ESRestTestCase { private TestSecurityClient securityClient; @@ -128,4 +138,53 @@ protected ApiKey getApiKey(String id) throws IOException { final TestSecurityClient client = getSecurityClient(); return client.getApiKey(id); } + + protected void upsertRole(String roleDescriptor, String roleName) throws IOException { + Request createRoleRequest = roleRequest(roleDescriptor, roleName); + Response createRoleResponse = adminClient().performRequest(createRoleRequest); + assertOK(createRoleResponse); + } + + protected Request roleRequest(String roleDescriptor, String roleName) { + Request createRoleRequest; + if (randomBoolean()) { + createRoleRequest = new Request(randomFrom(HttpPut.METHOD_NAME, HttpPost.METHOD_NAME), "/_security/role/" + roleName); + createRoleRequest.setJsonEntity(roleDescriptor); + } else { + createRoleRequest = new Request(HttpPost.METHOD_NAME, "/_security/role"); + createRoleRequest.setJsonEntity(Strings.format(""" + {"roles": {"%s": %s}} + """, roleName, roleDescriptor)); + } + return createRoleRequest; + } + + @SuppressWarnings("unchecked") + protected void assertSendRequestThrowsError(Request request, String expectedError) throws IOException { + String errorMessage; + if (request.getEndpoint().endsWith("/role")) { + Map response = responseAsMap(adminClient().performRequest(request)); + + Map errors = (Map) response.get("errors"); + Map failedItems = (Map) errors.get("details"); + assertEquals(failedItems.size(), 1); + Map error = (Map) failedItems.values().stream().findFirst().orElseThrow(); + errorMessage = (String) error.get("reason"); + } else { + ResponseException e = expectThrows(ResponseException.class, () -> adminClient().performRequest(request)); + assertEquals(400, e.getResponse().getStatusLine().getStatusCode()); + errorMessage = e.getMessage(); + } + assertThat(errorMessage, containsString(expectedError)); + } + + protected void fetchRoleAndAssertEqualsExpected(final String roleName, final RoleDescriptor expectedRoleDescriptor) throws IOException { + final Response getRoleResponse = adminClient().performRequest(new Request("GET", "/_security/role/" + roleName)); + assertOK(getRoleResponse); + final Map actual = responseAsParser(getRoleResponse).map( + HashMap::new, + p -> RoleDescriptor.parserBuilder().allowDescription(true).build().parse(expectedRoleDescriptor.getName(), p) + ); + assertThat(actual, equalTo(Map.of(expectedRoleDescriptor.getName(), expectedRoleDescriptor))); + } } diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/BulkPutRoleRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/BulkPutRoleRestIT.java new file mode 100644 index 0000000000000..6e111c8f54552 --- /dev/null +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/BulkPutRoleRestIT.java @@ -0,0 +1,231 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.role; + +import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.security.SecurityOnTrialLicenseRestTestCase; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.not; + +public class BulkPutRoleRestIT extends SecurityOnTrialLicenseRestTestCase { + public void testPutManyValidRoles() throws Exception { + Map responseMap = upsertRoles(""" + {"roles": {"test1": {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["all"]}]}, "test2": + {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["read"]}]}, "test3": + {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["write"]}]}}}"""); + assertThat(responseMap, not(hasKey("errors"))); + fetchRoleAndAssertEqualsExpected( + "test1", + new RoleDescriptor( + "test1", + new String[] { "all" }, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("*").privileges("all").build() }, + null, + null, + null, + null, + null, + null, + null, + null, + null + ) + ); + fetchRoleAndAssertEqualsExpected( + "test2", + new RoleDescriptor( + "test2", + new String[] { "all" }, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("*").privileges("read").build() }, + null, + null, + null, + null, + null, + null, + null, + null, + null + ) + ); + fetchRoleAndAssertEqualsExpected( + "test3", + new RoleDescriptor( + "test3", + new String[] { "all" }, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("*").privileges("write").build() }, + null, + null, + null, + null, + null, + null, + null, + null, + null + ) + ); + } + + @SuppressWarnings("unchecked") + public void testPutMixedValidInvalidRoles() throws Exception { + Map responseMap = upsertRoles(""" + {"roles": {"test1": {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["all"]}]}, "test2": + {"cluster": ["bad_privilege"],"indices": [{"names": ["*"],"privileges": ["read"]}]}, "test3": + {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["write"]}]}}}"""); + + assertThat(responseMap, hasKey("errors")); + + List created = (List) responseMap.get("created"); + assertThat(created, hasSize(2)); + assertThat(created, contains("test1", "test3")); + + Map errors = (Map) responseMap.get("errors"); + Map failedItems = (Map) errors.get("details"); + assertEquals(failedItems.size(), 1); + + for (var entry : failedItems.entrySet()) { + Map error = (Map) entry.getValue(); + assertThat((String) error.get("reason"), containsString("unknown cluster privilege [bad_privilege]")); + } + + fetchRoleAndAssertEqualsExpected( + "test1", + new RoleDescriptor( + "test1", + new String[] { "all" }, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("*").privileges("all").build() }, + null, + null, + null, + null, + null, + null, + null, + null, + null + ) + ); + + fetchRoleAndAssertEqualsExpected( + "test3", + new RoleDescriptor( + "test3", + new String[] { "all" }, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("*").privileges("write").build() }, + null, + null, + null, + null, + null, + null, + null, + null, + null + ) + ); + + final ResponseException e = expectThrows( + ResponseException.class, + () -> adminClient().performRequest(new Request("GET", "/_security/role/test2")) + ); + assertEquals(404, e.getResponse().getStatusLine().getStatusCode()); + } + + @SuppressWarnings("unchecked") + public void testPutNoValidRoles() throws Exception { + Map responseMap = upsertRoles(""" + {"roles": {"test1": {"cluster": ["bad_privilege"],"indices": [{"names": ["*"],"privileges": ["all"]}]}, "test2": + {"cluster": ["bad_privilege"],"indices": [{"names": ["*"],"privileges": ["read"]}]}, "test3": + {"cluster": ["bad_privilege"],"indices": [{"names": ["*"],"privileges": ["write"]}]}}}"""); + + assertThat(responseMap, hasKey("errors")); + Map errors = (Map) responseMap.get("errors"); + Map failedItems = (Map) errors.get("details"); + assertEquals(failedItems.size(), 3); + + for (var entry : failedItems.entrySet()) { + Map error = (Map) entry.getValue(); + assertThat((String) error.get("reason"), containsString("unknown cluster privilege [bad_privilege]")); + } + + for (String name : List.of("test1", "test2", "test3")) { + final ResponseException e = expectThrows( + ResponseException.class, + () -> adminClient().performRequest(new Request("GET", "/_security/role/" + name)) + ); + assertEquals(404, e.getResponse().getStatusLine().getStatusCode()); + } + } + + @SuppressWarnings("unchecked") + public void testBulkUpdates() throws Exception { + String request = """ + {"roles": {"test1": {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["all"]}]}, "test2": + {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["read"]}]}, "test3": + {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["write"]}]}}}"""; + + { + Map responseMap = upsertRoles(request); + assertThat(responseMap, not(hasKey("errors"))); + + List> items = (List>) responseMap.get("created"); + assertEquals(3, items.size()); + } + { + Map responseMap = upsertRoles(request); + assertThat(responseMap, not(hasKey("errors"))); + + List> items = (List>) responseMap.get("noop"); + assertEquals(3, items.size()); + } + { + request = """ + {"roles": {"test1": {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["read"]}]}, "test2": + {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["all"]}]}, "test3": + {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["all"]}]}}}"""; + + Map responseMap = upsertRoles(request); + assertThat(responseMap, not(hasKey("errors"))); + List> items = (List>) responseMap.get("updated"); + assertEquals(3, items.size()); + } + } + + protected Map upsertRoles(String roleDescriptorsByName) throws IOException { + Request request = rolesRequest(roleDescriptorsByName); + Response response = adminClient().performRequest(request); + assertOK(response); + return responseAsMap(response); + } + + protected Request rolesRequest(String roleDescriptorsByName) { + Request rolesRequest; + rolesRequest = new Request(HttpPost.METHOD_NAME, "/_security/role"); + rolesRequest.setJsonEntity(org.elasticsearch.core.Strings.format(roleDescriptorsByName)); + return rolesRequest; + } + +} diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithDescriptionRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithDescriptionRestIT.java index 95a650737d452..33c78f2dd6324 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithDescriptionRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithDescriptionRestIT.java @@ -7,22 +7,13 @@ package org.elasticsearch.xpack.security.role; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.methods.HttpPut; import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; -import org.elasticsearch.client.ResponseException; import org.elasticsearch.core.Strings; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.support.Validation; import org.elasticsearch.xpack.security.SecurityOnTrialLicenseRestTestCase; import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; public class RoleWithDescriptionRestIT extends SecurityOnTrialLicenseRestTestCase { @@ -30,15 +21,13 @@ public void testCreateOrUpdateRoleWithDescription() throws Exception { final String roleName = "role_with_description"; final String initialRoleDescription = randomAlphaOfLengthBetween(0, 10); { - Request createRoleRequest = new Request(HttpPut.METHOD_NAME, "/_security/role/" + roleName); - createRoleRequest.setJsonEntity(Strings.format(""" + upsertRole(Strings.format(""" { "description": "%s", "cluster": ["all"], "indices": [{"names": ["*"], "privileges": ["all"]}] - }""", initialRoleDescription)); - Response createResponse = adminClient().performRequest(createRoleRequest); - assertOK(createResponse); + }""", initialRoleDescription), roleName); + fetchRoleAndAssertEqualsExpected( roleName, new RoleDescriptor( @@ -60,15 +49,12 @@ public void testCreateOrUpdateRoleWithDescription() throws Exception { } { final String newRoleDescription = randomValueOtherThan(initialRoleDescription, () -> randomAlphaOfLengthBetween(0, 10)); - Request updateRoleRequest = new Request(HttpPost.METHOD_NAME, "/_security/role/" + roleName); - updateRoleRequest.setJsonEntity(Strings.format(""" + upsertRole(Strings.format(""" { "description": "%s", "cluster": ["all"], "indices": [{"names": ["index-*"], "privileges": ["all"]}] - }""", newRoleDescription)); - Response updateResponse = adminClient().performRequest(updateRoleRequest); - assertOK(updateResponse); + }""", newRoleDescription), roleName); fetchRoleAndAssertEqualsExpected( roleName, @@ -91,56 +77,37 @@ public void testCreateOrUpdateRoleWithDescription() throws Exception { } } - public void testCreateRoleWithInvalidDescriptionFails() { - Request createRoleRequest = new Request(HttpPut.METHOD_NAME, "/_security/role/role_with_large_description"); - createRoleRequest.setJsonEntity(Strings.format(""" + public void testCreateRoleWithInvalidDescriptionFails() throws IOException { + Request request = roleRequest(Strings.format(""" { "description": "%s", "cluster": ["all"], "indices": [{"names": ["*"], "privileges": ["all"]}] - }""", randomAlphaOfLength(Validation.Roles.MAX_DESCRIPTION_LENGTH + randomIntBetween(1, 5)))); + }""", randomAlphaOfLength(Validation.Roles.MAX_DESCRIPTION_LENGTH + randomIntBetween(1, 5))), "role_with_large_description"); - ResponseException e = expectThrows(ResponseException.class, () -> adminClient().performRequest(createRoleRequest)); - assertEquals(400, e.getResponse().getStatusLine().getStatusCode()); - assertThat( - e.getMessage(), - containsString("Role description must be less than " + Validation.Roles.MAX_DESCRIPTION_LENGTH + " characters.") + assertSendRequestThrowsError( + request, + "Role description must be less than " + Validation.Roles.MAX_DESCRIPTION_LENGTH + " characters." ); } public void testUpdateRoleWithInvalidDescriptionFails() throws IOException { - Request createRoleRequest = new Request(HttpPut.METHOD_NAME, "/_security/role/my_role"); - createRoleRequest.setJsonEntity(""" + upsertRole(""" { "cluster": ["all"], "indices": [{"names": ["*"], "privileges": ["all"]}] - }"""); - Response createRoleResponse = adminClient().performRequest(createRoleRequest); - assertOK(createRoleResponse); + }""", "my_role"); - Request updateRoleRequest = new Request(HttpPost.METHOD_NAME, "/_security/role/my_role"); - updateRoleRequest.setJsonEntity(Strings.format(""" + Request updateRoleRequest = roleRequest(Strings.format(""" { "description": "%s", "cluster": ["all"], "indices": [{"names": ["index-*"], "privileges": ["all"]}] - }""", randomAlphaOfLength(Validation.Roles.MAX_DESCRIPTION_LENGTH + randomIntBetween(1, 5)))); - - ResponseException e = expectThrows(ResponseException.class, () -> adminClient().performRequest(updateRoleRequest)); - assertEquals(400, e.getResponse().getStatusLine().getStatusCode()); - assertThat( - e.getMessage(), - containsString("Role description must be less than " + Validation.Roles.MAX_DESCRIPTION_LENGTH + " characters.") - ); - } + }""", randomAlphaOfLength(Validation.Roles.MAX_DESCRIPTION_LENGTH + randomIntBetween(1, 5))), "my_role"); - private void fetchRoleAndAssertEqualsExpected(final String roleName, final RoleDescriptor expectedRoleDescriptor) throws IOException { - final Response getRoleResponse = adminClient().performRequest(new Request("GET", "/_security/role/" + roleName)); - assertOK(getRoleResponse); - final Map actual = responseAsParser(getRoleResponse).map( - HashMap::new, - p -> RoleDescriptor.parserBuilder().allowDescription(true).build().parse(expectedRoleDescriptor.getName(), p) + assertSendRequestThrowsError( + updateRoleRequest, + "Role description must be less than " + Validation.Roles.MAX_DESCRIPTION_LENGTH + " characters." ); - assertThat(actual, equalTo(Map.of(expectedRoleDescriptor.getName(), expectedRoleDescriptor))); } } diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithRemoteIndicesPrivilegesRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithRemoteIndicesPrivilegesRestIT.java index aa5967ea7277a..93dc6c3761482 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithRemoteIndicesPrivilegesRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithRemoteIndicesPrivilegesRestIT.java @@ -50,8 +50,7 @@ public void cleanup() throws IOException { } public void testRemoteIndexPrivileges() throws IOException { - var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); - putRoleRequest.setJsonEntity(""" + upsertRole(""" { "remote_indices": [ { @@ -64,9 +63,7 @@ public void testRemoteIndexPrivileges() throws IOException { } } ] - }"""); - final Response putRoleResponse1 = adminClient().performRequest(putRoleRequest); - assertOK(putRoleResponse1); + }""", REMOTE_SEARCH_ROLE); final Response getRoleResponse = adminClient().performRequest(new Request("GET", "/_security/role/" + REMOTE_SEARCH_ROLE)); assertOK(getRoleResponse); @@ -106,8 +103,7 @@ public void testRemoteIndexPrivileges() throws IOException { assertThat(e.getMessage(), containsString("action [" + TransportSearchAction.TYPE.name() + "] is unauthorized for user")); // Add local privileges and check local authorization works - putRoleRequest = new Request("PUT", "_security/role/" + REMOTE_SEARCH_ROLE); - putRoleRequest.setJsonEntity(""" + upsertRole(""" { "cluster": ["all"], "indices": [ @@ -127,9 +123,8 @@ public void testRemoteIndexPrivileges() throws IOException { } } ] - }"""); - final Response putRoleResponse2 = adminClient().performRequest(putRoleRequest); - assertOK(putRoleResponse2); + }""", REMOTE_SEARCH_ROLE); + final Response searchResponse = client().performRequest(searchRequest); assertOK(searchResponse); @@ -171,8 +166,7 @@ public void testRemoteIndexPrivileges() throws IOException { } public void testGetUserPrivileges() throws IOException { - final var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); - putRoleRequest.setJsonEntity(""" + upsertRole(""" { "remote_indices": [ { @@ -191,9 +185,7 @@ public void testGetUserPrivileges() throws IOException { "clusters": ["remote-a", "*"] } ] - }"""); - final Response putRoleResponse1 = adminClient().performRequest(putRoleRequest); - assertOK(putRoleResponse1); + }""", REMOTE_SEARCH_ROLE); final Response getUserPrivilegesResponse1 = executeAsRemoteSearchUser(new Request("GET", "/_security/user/_privileges")); assertOK(getUserPrivilegesResponse1); @@ -222,8 +214,7 @@ public void testGetUserPrivileges() throws IOException { ] }"""))); - final var putRoleRequest2 = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); - putRoleRequest2.setJsonEntity(""" + upsertRole(""" { "cluster": ["all"], "indices": [ @@ -245,9 +236,7 @@ public void testGetUserPrivileges() throws IOException { "clusters": ["remote-c"] } ] - }"""); - final Response putRoleResponse2 = adminClient().performRequest(putRoleRequest2); - assertOK(putRoleResponse2); + }""", REMOTE_SEARCH_ROLE); final Response getUserPrivilegesResponse2 = executeAsRemoteSearchUser(new Request("GET", "/_security/user/_privileges")); assertOK(getUserPrivilegesResponse2); @@ -282,8 +271,7 @@ public void testGetUserPrivileges() throws IOException { } public void testGetUserPrivilegesWithMultipleFlsDlsDefinitionsPreservesGroupPerIndexPrivilege() throws IOException { - final var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); - putRoleRequest.setJsonEntity(""" + upsertRole(""" { "remote_indices": [ { @@ -305,9 +293,7 @@ public void testGetUserPrivilegesWithMultipleFlsDlsDefinitionsPreservesGroupPerI } } ] - }"""); - final Response putRoleResponse1 = adminClient().performRequest(putRoleRequest); - assertOK(putRoleResponse1); + }""", REMOTE_SEARCH_ROLE); final Response getUserPrivilegesResponse1 = executeAsRemoteSearchUser(new Request("GET", "/_security/user/_privileges")); assertOK(getUserPrivilegesResponse1); diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithWorkflowsRestrictionRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithWorkflowsRestrictionRestIT.java index d2fc27fb3fcae..979fe87ec4bb5 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithWorkflowsRestrictionRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/RoleWithWorkflowsRestrictionRestIT.java @@ -7,10 +7,7 @@ package org.elasticsearch.xpack.security.role; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.methods.HttpPut; import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.xpack.security.SecurityOnTrialLicenseRestTestCase; @@ -21,8 +18,7 @@ public class RoleWithWorkflowsRestrictionRestIT extends SecurityOnTrialLicenseRestTestCase { public void testCreateRoleWithWorkflowsRestrictionFail() { - Request createRoleRequest = new Request(HttpPut.METHOD_NAME, "/_security/role/role_with_restriction"); - createRoleRequest.setJsonEntity(""" + Request request = roleRequest(""" { "cluster": ["all"], "indices": [ @@ -34,16 +30,15 @@ public void testCreateRoleWithWorkflowsRestrictionFail() { "restriction":{ "workflows": ["foo", "bar"] } - }"""); + }""", "role_with_restriction"); - ResponseException e = expectThrows(ResponseException.class, () -> adminClient().performRequest(createRoleRequest)); + ResponseException e = expectThrows(ResponseException.class, () -> adminClient().performRequest(request)); assertEquals(400, e.getResponse().getStatusLine().getStatusCode()); assertThat(e.getMessage(), containsString("failed to parse role [role_with_restriction]. unexpected field [restriction]")); } public void testUpdateRoleWithWorkflowsRestrictionFail() throws IOException { - Request createRoleRequest = new Request(HttpPut.METHOD_NAME, "/_security/role/my_role"); - createRoleRequest.setJsonEntity(""" + upsertRole(""" { "cluster": ["all"], "indices": [ @@ -52,12 +47,9 @@ public void testUpdateRoleWithWorkflowsRestrictionFail() throws IOException { "privileges": ["all"] } ] - }"""); - Response createRoleResponse = adminClient().performRequest(createRoleRequest); - assertOK(createRoleResponse); + }""", "my_role"); - Request updateRoleRequest = new Request(HttpPost.METHOD_NAME, "/_security/role/my_role"); - updateRoleRequest.setJsonEntity(""" + Request updateRoleRequest = roleRequest(""" { "cluster": ["all"], "indices": [ @@ -69,7 +61,7 @@ public void testUpdateRoleWithWorkflowsRestrictionFail() throws IOException { "restriction":{ "workflows": ["foo", "bar"] } - }"""); + }""", "my_role"); ResponseException e = expectThrows(ResponseException.class, () -> adminClient().performRequest(updateRoleRequest)); assertEquals(400, e.getResponse().getStatusLine().getStatusCode()); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index bbb1feeef8d44..a38710332313f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -160,6 +160,7 @@ import org.elasticsearch.xpack.core.security.action.profile.SuggestProfilesAction; import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataAction; import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheAction; +import org.elasticsearch.xpack.core.security.action.role.BulkPutRoleRequestBuilderFactory; import org.elasticsearch.xpack.core.security.action.role.ClearRolesCacheAction; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleAction; import org.elasticsearch.xpack.core.security.action.role.GetRolesAction; @@ -253,6 +254,7 @@ import org.elasticsearch.xpack.security.action.profile.TransportSuggestProfilesAction; import org.elasticsearch.xpack.security.action.profile.TransportUpdateProfileDataAction; import org.elasticsearch.xpack.security.action.realm.TransportClearRealmCacheAction; +import org.elasticsearch.xpack.security.action.role.TransportBulkPutRolesAction; import org.elasticsearch.xpack.security.action.role.TransportClearRolesCacheAction; import org.elasticsearch.xpack.security.action.role.TransportDeleteRoleAction; import org.elasticsearch.xpack.security.action.role.TransportGetRolesAction; @@ -370,6 +372,7 @@ import org.elasticsearch.xpack.security.rest.action.profile.RestSuggestProfilesAction; import org.elasticsearch.xpack.security.rest.action.profile.RestUpdateProfileDataAction; import org.elasticsearch.xpack.security.rest.action.realm.RestClearRealmCacheAction; +import org.elasticsearch.xpack.security.rest.action.role.RestBulkPutRolesAction; import org.elasticsearch.xpack.security.rest.action.role.RestClearRolesCacheAction; import org.elasticsearch.xpack.security.rest.action.role.RestDeleteRoleAction; import org.elasticsearch.xpack.security.rest.action.role.RestGetRolesAction; @@ -601,6 +604,7 @@ public class Security extends Plugin private final SetOnce scriptServiceReference = new SetOnce<>(); private final SetOnce operatorOnlyRegistry = new SetOnce<>(); private final SetOnce putRoleRequestBuilderFactory = new SetOnce<>(); + private final SetOnce bulkPutRoleRequestBuilderFactory = new SetOnce<>(); private final SetOnce createApiKeyRequestBuilderFactory = new SetOnce<>(); private final SetOnce updateApiKeyRequestTranslator = new SetOnce<>(); private final SetOnce bulkUpdateApiKeyRequestTranslator = new SetOnce<>(); @@ -911,19 +915,14 @@ Collection createComponents( dlsBitsetCache.set(new DocumentSubsetBitsetCache(settings, threadPool)); final FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(settings); - final NativeRolesStore nativeRolesStore = new NativeRolesStore( - settings, - client, - getLicenseState(), - systemIndices.getMainIndexManager(), - clusterService, - featureService - ); RoleDescriptor.setFieldPermissionsCache(fieldPermissionsCache); // Need to set to default if it wasn't set by an extension if (putRoleRequestBuilderFactory.get() == null) { putRoleRequestBuilderFactory.set(new PutRoleRequestBuilderFactory.Default()); } + if (bulkPutRoleRequestBuilderFactory.get() == null) { + bulkPutRoleRequestBuilderFactory.set(new BulkPutRoleRequestBuilderFactory.Default()); + } if (createApiKeyRequestBuilderFactory.get() == null) { createApiKeyRequestBuilderFactory.set(new CreateApiKeyRequestBuilderFactory.Default()); } @@ -951,7 +950,7 @@ Collection createComponents( this.fileRolesStore.set( new FileRolesStore(settings, environment, resourceWatcherService, getLicenseState(), xContentRegistry, fileRoleValidator.get()) ); - final ReservedRoleNameChecker reservedRoleNameChecker = reservedRoleNameCheckerFactory.get().create(fileRolesStore.get()::exists); + ReservedRoleNameChecker reservedRoleNameChecker = reservedRoleNameCheckerFactory.get().create(fileRolesStore.get()::exists); components.add(new PluginComponentBinding<>(ReservedRoleNameChecker.class, reservedRoleNameChecker)); final Map, ActionListener>>> customRoleProviders = new LinkedHashMap<>(); @@ -964,6 +963,17 @@ Collection createComponents( } } + final NativeRolesStore nativeRolesStore = new NativeRolesStore( + settings, + client, + getLicenseState(), + systemIndices.getMainIndexManager(), + clusterService, + featureService, + reservedRoleNameChecker, + xContentRegistry + ); + final ApiKeyService apiKeyService = new ApiKeyService( settings, Clock.systemUTC(), @@ -1526,6 +1536,7 @@ public void onIndexModule(IndexModule module) { new ActionHandler<>(DeleteUserAction.INSTANCE, TransportDeleteUserAction.class), new ActionHandler<>(GetRolesAction.INSTANCE, TransportGetRolesAction.class), new ActionHandler<>(PutRoleAction.INSTANCE, TransportPutRoleAction.class), + new ActionHandler<>(ActionTypes.BULK_PUT_ROLES, TransportBulkPutRolesAction.class), new ActionHandler<>(DeleteRoleAction.INSTANCE, TransportDeleteRoleAction.class), new ActionHandler<>(TransportChangePasswordAction.TYPE, TransportChangePasswordAction.class), new ActionHandler<>(AuthenticateAction.INSTANCE, TransportAuthenticateAction.class), @@ -1619,6 +1630,7 @@ public List getRestHandlers( new RestPutUserAction(settings, getLicenseState()), new RestDeleteUserAction(settings, getLicenseState()), new RestGetRolesAction(settings, getLicenseState()), + new RestBulkPutRolesAction(settings, getLicenseState(), bulkPutRoleRequestBuilderFactory.get()), new RestPutRoleAction(settings, getLicenseState(), putRoleRequestBuilderFactory.get()), new RestDeleteRoleAction(settings, getLicenseState()), new RestChangePasswordAction(settings, securityContext.get(), getLicenseState()), @@ -2257,6 +2269,7 @@ public void loadExtensions(ExtensionLoader loader) { securityExtensions.addAll(loader.loadExtensions(SecurityExtension.class)); loadSingletonExtensionAndSetOnce(loader, operatorOnlyRegistry, OperatorOnlyRegistry.class); loadSingletonExtensionAndSetOnce(loader, putRoleRequestBuilderFactory, PutRoleRequestBuilderFactory.class); + // TODO add bulkPutRoleRequestBuilderFactory loading here when available loadSingletonExtensionAndSetOnce(loader, getBuiltinPrivilegesResponseTranslator, GetBuiltinPrivilegesResponseTranslator.class); loadSingletonExtensionAndSetOnce(loader, updateApiKeyRequestTranslator, UpdateApiKeyRequestTranslator.class); loadSingletonExtensionAndSetOnce(loader, bulkUpdateApiKeyRequestTranslator, BulkUpdateApiKeyRequestTranslator.class); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportBulkPutRolesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportBulkPutRolesAction.java new file mode 100644 index 0000000000000..fca354d04c7c5 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportBulkPutRolesAction.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.security.action.role; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.security.action.ActionTypes; +import org.elasticsearch.xpack.core.security.action.role.BulkPutRolesRequest; +import org.elasticsearch.xpack.core.security.action.role.BulkPutRolesResponse; +import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; + +public class TransportBulkPutRolesAction extends TransportAction { + + private final NativeRolesStore rolesStore; + + @Inject + public TransportBulkPutRolesAction(ActionFilters actionFilters, NativeRolesStore rolesStore, TransportService transportService) { + super(ActionTypes.BULK_PUT_ROLES.name(), actionFilters, transportService.getTaskManager()); + this.rolesStore = rolesStore; + } + + @Override + protected void doExecute(Task task, final BulkPutRolesRequest request, final ActionListener listener) { + rolesStore.putRoles(request.getRefreshPolicy(), request.getRoles(), listener); + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java index 87b9bb72884be..c22e460728dca 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java @@ -6,75 +6,36 @@ */ package org.elasticsearch.xpack.security.action.role; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.security.action.role.PutRoleAction; import org.elasticsearch.xpack.core.security.action.role.PutRoleRequest; import org.elasticsearch.xpack.core.security.action.role.PutRoleResponse; -import org.elasticsearch.xpack.core.security.authz.support.DLSRoleQueryValidator; -import org.elasticsearch.xpack.security.authz.ReservedRoleNameChecker; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; -import static org.elasticsearch.action.ValidateActions.addValidationError; - public class TransportPutRoleAction extends TransportAction { private final NativeRolesStore rolesStore; - private final NamedXContentRegistry xContentRegistry; - private final ReservedRoleNameChecker reservedRoleNameChecker; @Inject - public TransportPutRoleAction( - ActionFilters actionFilters, - NativeRolesStore rolesStore, - TransportService transportService, - NamedXContentRegistry xContentRegistry, - ReservedRoleNameChecker reservedRoleNameChecker - ) { + public TransportPutRoleAction(ActionFilters actionFilters, NativeRolesStore rolesStore, TransportService transportService) { super(PutRoleAction.NAME, actionFilters, transportService.getTaskManager()); this.rolesStore = rolesStore; - this.xContentRegistry = xContentRegistry; - this.reservedRoleNameChecker = reservedRoleNameChecker; } @Override protected void doExecute(Task task, final PutRoleRequest request, final ActionListener listener) { - final Exception validationException = validateRequest(request); - if (validationException != null) { - listener.onFailure(validationException); - } else { - rolesStore.putRole(request, request.roleDescriptor(), listener.safeMap(created -> { - if (created) { - logger.info("added role [{}]", request.name()); - } else { - logger.info("updated role [{}]", request.name()); - } - return new PutRoleResponse(created); - })); - } - } - - private Exception validateRequest(final PutRoleRequest request) { - // TODO we can remove this -- `execute()` already calls `request.validate()` before `doExecute()` - ActionRequestValidationException validationException = request.validate(); - if (validationException != null) { - return validationException; - } - if (reservedRoleNameChecker.isReserved(request.name())) { - throw addValidationError("Role [" + request.name() + "] is reserved and may not be used.", null); - } - try { - DLSRoleQueryValidator.validateQueryField(request.roleDescriptor().getIndicesPrivileges(), xContentRegistry); - } catch (ElasticsearchException | IllegalArgumentException e) { - return e; - } - return null; + rolesStore.putRole(request.getRefreshPolicy(), request.roleDescriptor(), listener.safeMap(created -> { + if (created) { + logger.info("added role [{}]", request.name()); + } else { + logger.info("updated role [{}]", request.name()); + } + return new PutRoleResponse(created); + })); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index 5bd837c7d817c..b4afc82ff1816 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -11,8 +11,12 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.DelegatingActionListener; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetResponse; @@ -24,6 +28,8 @@ import org.elasticsearch.action.search.MultiSearchResponse.Item; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ContextPreservingActionListener; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; @@ -36,26 +42,32 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.ScrollHelper; +import org.elasticsearch.xpack.core.security.action.role.BulkPutRolesResponse; import org.elasticsearch.xpack.core.security.action.role.ClearRolesCacheAction; import org.elasticsearch.xpack.core.security.action.role.ClearRolesCacheRequest; import org.elasticsearch.xpack.core.security.action.role.ClearRolesCacheResponse; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleRequest; -import org.elasticsearch.xpack.core.security.action.role.PutRoleRequest; +import org.elasticsearch.xpack.core.security.action.role.RoleDescriptorRequestValidator; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.core.security.authz.store.RoleRetrievalResult; +import org.elasticsearch.xpack.core.security.authz.support.DLSRoleQueryValidator; import org.elasticsearch.xpack.core.security.support.NativeRealmValidationUtil; +import org.elasticsearch.xpack.security.authz.ReservedRoleNameChecker; import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; @@ -64,6 +76,7 @@ import java.util.function.Supplier; import static org.elasticsearch.TransportVersions.ROLE_REMOTE_CLUSTER_PRIVS; +import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.index.query.QueryBuilders.existsQuery; import static org.elasticsearch.search.SearchService.DEFAULT_KEEPALIVE_SETTING; import static org.elasticsearch.transport.RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY; @@ -106,6 +119,12 @@ public class NativeRolesStore implements BiConsumer, ActionListener< .allowDescription(true) .build(); + private static final Set UPDATE_ROLES_REFRESH_CACHE_RESULTS = Set.of( + DocWriteResponse.Result.CREATED, + DocWriteResponse.Result.UPDATED, + DocWriteResponse.Result.DELETED + ); + private final Settings settings; private final Client client; private final XPackLicenseState licenseState; @@ -117,13 +136,19 @@ public class NativeRolesStore implements BiConsumer, ActionListener< private final FeatureService featureService; + private final ReservedRoleNameChecker reservedRoleNameChecker; + + private final NamedXContentRegistry xContentRegistry; + public NativeRolesStore( Settings settings, Client client, XPackLicenseState licenseState, SecurityIndexManager securityIndex, ClusterService clusterService, - FeatureService featureService + FeatureService featureService, + ReservedRoleNameChecker reservedRoleNameChecker, + NamedXContentRegistry xContentRegistry ) { this.settings = settings; this.client = client; @@ -131,6 +156,8 @@ public NativeRolesStore( this.securityIndex = securityIndex; this.clusterService = clusterService; this.featureService = featureService; + this.reservedRoleNameChecker = reservedRoleNameChecker; + this.xContentRegistry = xContentRegistry; this.enabled = settings.getAsBoolean(NATIVE_ROLES_ENABLED, true); } @@ -258,89 +285,198 @@ public void onFailure(Exception e) { } } - public void putRole(final PutRoleRequest request, final RoleDescriptor role, final ActionListener listener) { - if (enabled == false) { - listener.onFailure(new IllegalStateException("Native role management is disabled")); - return; + private Exception validateRoleDescriptor(RoleDescriptor role) { + ActionRequestValidationException validationException = null; + validationException = RoleDescriptorRequestValidator.validate(role, validationException); + + if (reservedRoleNameChecker.isReserved(role.getName())) { + throw addValidationError("Role [" + role.getName() + "] is reserved and may not be used.", validationException); } if (role.isUsingDocumentOrFieldLevelSecurity() && DOCUMENT_LEVEL_SECURITY_FEATURE.checkWithoutTracking(licenseState) == false) { - listener.onFailure(LicenseUtils.newComplianceException("field and document level security")); + return LicenseUtils.newComplianceException("field and document level security"); } else if (role.hasRemoteIndicesPrivileges() && clusterService.state().getMinTransportVersion().before(TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY)) { - listener.onFailure( - new IllegalStateException( - "all nodes must have version [" - + TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY.toReleaseVersion() - + "] or higher to support remote indices privileges" - ) + return new IllegalStateException( + "all nodes must have version [" + + TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY.toReleaseVersion() + + "] or higher to support remote indices privileges" ); } else if (role.hasRemoteClusterPermissions() && clusterService.state().getMinTransportVersion().before(ROLE_REMOTE_CLUSTER_PRIVS)) { - listener.onFailure( - new IllegalStateException( - "all nodes must have version [" + ROLE_REMOTE_CLUSTER_PRIVS + "] or higher to support remote cluster privileges" - ) + return new IllegalStateException( + "all nodes must have version [" + ROLE_REMOTE_CLUSTER_PRIVS + "] or higher to support remote cluster privileges" ); } else if (role.hasDescription() && clusterService.state().getMinTransportVersion().before(TransportVersions.SECURITY_ROLE_DESCRIPTION)) { - listener.onFailure( - new IllegalStateException( - "all nodes must have version [" - + TransportVersions.SECURITY_ROLE_DESCRIPTION.toReleaseVersion() - + "] or higher to support specifying role description" - ) + return new IllegalStateException( + "all nodes must have version [" + + TransportVersions.SECURITY_ROLE_DESCRIPTION.toReleaseVersion() + + "] or higher to support specifying role description" ); - } else { - innerPutRole(request, role, listener); } + try { + DLSRoleQueryValidator.validateQueryField(role.getIndicesPrivileges(), xContentRegistry); + } catch (ElasticsearchException | IllegalArgumentException e) { + return e; + } + + return validationException; } - // pkg-private for testing - void innerPutRole(final PutRoleRequest request, final RoleDescriptor role, final ActionListener listener) { - final String roleName = role.getName(); - assert NativeRealmValidationUtil.validateRoleName(roleName, false) == null : "Role name was invalid or reserved: " + roleName; - assert false == role.hasRestriction() : "restriction is not supported for native roles"; + public void putRole(final WriteRequest.RefreshPolicy refreshPolicy, final RoleDescriptor role, final ActionListener listener) { + if (enabled == false) { + listener.onFailure(new IllegalStateException("Native role management is disabled")); + return; + } + Exception validationException = validateRoleDescriptor(role); + + if (validationException != null) { + listener.onFailure(validationException); + return; + } - securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { - final XContentBuilder xContentBuilder; + try { + IndexRequest indexRequest = createRoleIndexRequest(role); + indexRequest.setRefreshPolicy(refreshPolicy); + securityIndex.prepareIndexIfNeededThenExecute( + listener::onFailure, + () -> executeAsyncWithOrigin( + client.threadPool().getThreadContext(), + SECURITY_ORIGIN, + indexRequest, + new ActionListener() { + @Override + public void onResponse(DocWriteResponse indexResponse) { + final boolean created = indexResponse.getResult() == DocWriteResponse.Result.CREATED; + logger.trace("Created role: [{}]", indexRequest); + clearRoleCache(role.getName(), listener, created); + } + + @Override + public void onFailure(Exception e) { + logger.error(() -> "failed to put role [" + role.getName() + "]", e); + listener.onFailure(e); + } + }, + client::index + ) + ); + } catch (IOException exception) { + listener.onFailure(exception); + } + } + + public void putRoles( + final WriteRequest.RefreshPolicy refreshPolicy, + final List roles, + final ActionListener listener + ) { + if (enabled == false) { + listener.onFailure(new IllegalStateException("Native role management is disabled")); + return; + } + BulkRequest bulkRequest = new BulkRequest().setRefreshPolicy(refreshPolicy); + Map validationErrorByRoleName = new HashMap<>(); + + for (RoleDescriptor role : roles) { + Exception validationException; try { - xContentBuilder = role.toXContent( - jsonBuilder(), - ToXContent.EMPTY_PARAMS, - true, - featureService.clusterHasFeature(clusterService.state(), SECURITY_ROLES_METADATA_FLATTENED) - ); - } catch (IOException e) { - listener.onFailure(e); - return; + validationException = validateRoleDescriptor(role); + } catch (Exception e) { + validationException = e; + } + + if (validationException != null) { + validationErrorByRoleName.put(role.getName(), validationException); + } else { + try { + bulkRequest.add(createRoleUpsertRequest(role)); + } catch (IOException ioException) { + listener.onFailure(ioException); + } } - final IndexRequest indexRequest = client.prepareIndex(SECURITY_MAIN_ALIAS) - .setId(getIdForRole(roleName)) - .setSource(xContentBuilder) - .setRefreshPolicy(request.getRefreshPolicy()) - .request(); - executeAsyncWithOrigin( + } + + List roleNames = roles.stream().map(RoleDescriptor::getName).toList(); + + if (bulkRequest.numberOfActions() == 0) { + BulkPutRolesResponse.Builder bulkPutRolesResponseBuilder = new BulkPutRolesResponse.Builder(); + roleNames.stream() + .map(roleName -> BulkPutRolesResponse.Item.failure(roleName, validationErrorByRoleName.get(roleName))) + .forEach(bulkPutRolesResponseBuilder::addItem); + + listener.onResponse(bulkPutRolesResponseBuilder.build()); + return; + } + securityIndex.prepareIndexIfNeededThenExecute( + listener::onFailure, + () -> executeAsyncWithOrigin( client.threadPool().getThreadContext(), SECURITY_ORIGIN, - indexRequest, - new ActionListener() { + bulkRequest, + new ActionListener() { @Override - public void onResponse(DocWriteResponse indexResponse) { - final boolean created = indexResponse.getResult() == DocWriteResponse.Result.CREATED; - logger.trace("Created role: [{}]", indexRequest); - clearRoleCache(roleName, listener, created); + public void onResponse(BulkResponse bulkResponse) { + List rolesToRefreshInCache = new ArrayList<>(roleNames.size()); + + Iterator bulkItemResponses = bulkResponse.iterator(); + BulkPutRolesResponse.Builder bulkPutRolesResponseBuilder = new BulkPutRolesResponse.Builder(); + + roleNames.stream().map(roleName -> { + if (validationErrorByRoleName.containsKey(roleName)) { + return BulkPutRolesResponse.Item.failure(roleName, validationErrorByRoleName.get(roleName)); + } + BulkItemResponse resp = bulkItemResponses.next(); + if (resp.isFailed()) { + return BulkPutRolesResponse.Item.failure(roleName, resp.getFailure().getCause()); + } + if (UPDATE_ROLES_REFRESH_CACHE_RESULTS.contains(resp.getResponse().getResult())) { + rolesToRefreshInCache.add(roleName); + } + return BulkPutRolesResponse.Item.success(roleName, resp.getResponse().getResult()); + }).forEach(bulkPutRolesResponseBuilder::addItem); + + clearRoleCache(rolesToRefreshInCache.toArray(String[]::new), ActionListener.wrap(res -> { + listener.onResponse(bulkPutRolesResponseBuilder.build()); + }, listener::onFailure), bulkResponse); } @Override public void onFailure(Exception e) { - logger.error(() -> "failed to put role [" + roleName + "]", e); + logger.error(() -> "failed to put roles", e); listener.onFailure(e); } }, - client::index - ); - }); + client::bulk + ) + ); + } + + private IndexRequest createRoleIndexRequest(final RoleDescriptor role) throws IOException { + return client.prepareIndex(SECURITY_MAIN_ALIAS) + .setId(getIdForRole(role.getName())) + .setSource(createRoleXContentBuilder(role)) + .request(); + } + + private UpdateRequest createRoleUpsertRequest(final RoleDescriptor role) throws IOException { + return client.prepareUpdate(SECURITY_MAIN_ALIAS, getIdForRole(role.getName())) + .setDoc(createRoleXContentBuilder(role)) + .setDocAsUpsert(true) + .request(); + } + + private XContentBuilder createRoleXContentBuilder(RoleDescriptor role) throws IOException { + assert NativeRealmValidationUtil.validateRoleName(role.getName(), false) == null + : "Role name was invalid or reserved: " + role.getName(); + assert false == role.hasRestriction() : "restriction is not supported for native roles"; + return role.toXContent( + jsonBuilder(), + ToXContent.EMPTY_PARAMS, + true, + featureService.clusterHasFeature(clusterService.state(), SECURITY_ROLES_METADATA_FLATTENED) + ); } public void usageStats(ActionListener> listener) { @@ -498,7 +634,11 @@ private void executeGetRoleRequest(String role, ActionListener list } private void clearRoleCache(final String role, ActionListener listener, Response response) { - ClearRolesCacheRequest request = new ClearRolesCacheRequest().names(role); + clearRoleCache(new String[] { role }, listener, response); + } + + private void clearRoleCache(final String[] roles, ActionListener listener, Response response) { + ClearRolesCacheRequest request = new ClearRolesCacheRequest().names(roles); executeAsyncWithOrigin(client, SECURITY_ORIGIN, ClearRolesCacheAction.INSTANCE, request, new ActionListener<>() { @Override public void onResponse(ClearRolesCacheResponse nodes) { @@ -507,9 +647,9 @@ public void onResponse(ClearRolesCacheResponse nodes) { @Override public void onFailure(Exception e) { - logger.error(() -> "unable to clear cache for role [" + role + "]", e); + logger.error(() -> "unable to clear cache for roles [" + Arrays.toString(roles) + "]", e); ElasticsearchException exception = new ElasticsearchException( - "clearing the cache for [" + role + "] failed. please clear the role cache manually", + "clearing the cache for [" + Arrays.toString(roles) + "] failed. please clear the role cache manually", e ); listener.onFailure(exception); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestBulkPutRolesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestBulkPutRolesAction.java new file mode 100644 index 0000000000000..f132da09c4ec0 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestBulkPutRolesAction.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.security.rest.action.role; + +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.security.action.role.BulkPutRoleRequestBuilder; +import org.elasticsearch.xpack.core.security.action.role.BulkPutRoleRequestBuilderFactory; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.POST; + +/** + * Rest endpoint to bulk add a Roles to the security index + */ +public class RestBulkPutRolesAction extends NativeRoleBaseRestHandler { + + private final BulkPutRoleRequestBuilderFactory builderFactory; + + public RestBulkPutRolesAction(Settings settings, XPackLicenseState licenseState, BulkPutRoleRequestBuilderFactory builderFactory) { + super(settings, licenseState); + this.builderFactory = builderFactory; + } + + @Override + public List routes() { + return List.of(Route.builder(POST, "/_security/role").build()); + } + + @Override + public String getName() { + return "security_bulk_put_roles_action"; + } + + @Override + protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { + final BulkPutRoleRequestBuilder requestBuilder = builderFactory.create(client) + .content(request.requiredContent(), request.getXContentType()); + + if (request.param("refresh") != null) { + requestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.parse(request.param("refresh"))); + } + + return channel -> requestBuilder.execute(new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java index 8610273f205c9..759bc80ac511f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java @@ -6,13 +6,9 @@ */ package org.elasticsearch.xpack.security.action.role; -import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -28,21 +24,16 @@ import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.core.security.action.role.PutRoleRequest; import org.elasticsearch.xpack.core.security.action.role.PutRoleResponse; -import org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; -import org.elasticsearch.xpack.security.authz.ReservedRoleNameChecker; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; import org.junit.BeforeClass; -import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.test.ActionListenerUtils.anyActionListener; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -53,7 +44,6 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; public class TransportPutRoleActionTests extends ESTestCase { @@ -92,109 +82,6 @@ protected NamedXContentRegistry xContentRegistry() { ); } - public void testReservedRole() { - final String roleName = randomFrom(new ArrayList<>(ReservedRolesStore.names())); - NativeRolesStore rolesStore = mock(NativeRolesStore.class); - TransportService transportService = new TransportService( - Settings.EMPTY, - mock(Transport.class), - mock(ThreadPool.class), - TransportService.NOOP_TRANSPORT_INTERCEPTOR, - x -> null, - null, - Collections.emptySet() - ); - TransportPutRoleAction action = new TransportPutRoleAction( - mock(ActionFilters.class), - rolesStore, - transportService, - xContentRegistry(), - new ReservedRoleNameChecker.Default() - ); - - PutRoleRequest request = new PutRoleRequest(); - request.name(roleName); - - final AtomicReference throwableRef = new AtomicReference<>(); - final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(mock(Task.class), request, new ActionListener() { - @Override - public void onResponse(PutRoleResponse response) { - responseRef.set(response); - } - - @Override - public void onFailure(Exception e) { - throwableRef.set(e); - } - }); - - assertThat(responseRef.get(), is(nullValue())); - assertThat(throwableRef.get(), is(instanceOf(IllegalArgumentException.class))); - assertThat(throwableRef.get().getMessage(), containsString("is reserved and may not be used")); - verifyNoMoreInteractions(rolesStore); - } - - public void testValidRole() { - testValidRole(randomFrom("admin", "dept_a", "restricted")); - } - - public void testValidRoleWithInternalRoleName() { - testValidRole(AuthenticationTestHelper.randomInternalRoleName()); - } - - private void testValidRole(String roleName) { - NativeRolesStore rolesStore = mock(NativeRolesStore.class); - TransportService transportService = new TransportService( - Settings.EMPTY, - mock(Transport.class), - mock(ThreadPool.class), - TransportService.NOOP_TRANSPORT_INTERCEPTOR, - x -> null, - null, - Collections.emptySet() - ); - TransportPutRoleAction action = new TransportPutRoleAction( - mock(ActionFilters.class), - rolesStore, - transportService, - xContentRegistry(), - new ReservedRoleNameChecker.Default() - ); - - final boolean created = randomBoolean(); - PutRoleRequest request = new PutRoleRequest(); - request.name(roleName); - - doAnswer(invocation -> { - Object[] args = invocation.getArguments(); - assert args.length == 3; - @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) args[2]; - listener.onResponse(created); - return null; - }).when(rolesStore).putRole(eq(request), any(RoleDescriptor.class), anyActionListener()); - - final AtomicReference throwableRef = new AtomicReference<>(); - final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(mock(Task.class), request, new ActionListener() { - @Override - public void onResponse(PutRoleResponse response) { - responseRef.set(response); - } - - @Override - public void onFailure(Exception e) { - throwableRef.set(e); - } - }); - - assertThat(responseRef.get(), is(notNullValue())); - assertThat(responseRef.get().isCreated(), is(created)); - assertThat(throwableRef.get(), is(nullValue())); - verify(rolesStore, times(1)).putRole(eq(request), any(RoleDescriptor.class), anyActionListener()); - } - public void testException() { final Exception e = randomFrom(new ElasticsearchSecurityException(""), new IllegalStateException()); final String roleName = randomFrom("admin", "dept_a", "restricted"); @@ -208,17 +95,10 @@ public void testException() { null, Collections.emptySet() ); - TransportPutRoleAction action = new TransportPutRoleAction( - mock(ActionFilters.class), - rolesStore, - transportService, - xContentRegistry(), - new ReservedRoleNameChecker.Default() - ); + TransportPutRoleAction action = new TransportPutRoleAction(mock(ActionFilters.class), rolesStore, transportService); PutRoleRequest request = new PutRoleRequest(); request.name(roleName); - doAnswer(invocation -> { Object[] args = invocation.getArguments(); assert args.length == 3; @@ -226,11 +106,11 @@ public void testException() { ActionListener listener = (ActionListener) args[2]; listener.onFailure(e); return null; - }).when(rolesStore).putRole(eq(request), any(RoleDescriptor.class), anyActionListener()); + }).when(rolesStore).putRole(eq(request.getRefreshPolicy()), any(RoleDescriptor.class), anyActionListener()); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(mock(Task.class), request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener<>() { @Override public void onResponse(PutRoleResponse response) { responseRef.set(response); @@ -245,115 +125,6 @@ public void onFailure(Exception e) { assertThat(responseRef.get(), is(nullValue())); assertThat(throwableRef.get(), is(notNullValue())); assertThat(throwableRef.get(), is(sameInstance(e))); - verify(rolesStore, times(1)).putRole(eq(request), any(RoleDescriptor.class), anyActionListener()); - } - - public void testCreationOfRoleWithMalformedQueryJsonFails() { - NativeRolesStore rolesStore = mock(NativeRolesStore.class); - TransportService transportService = new TransportService( - Settings.EMPTY, - mock(Transport.class), - mock(ThreadPool.class), - TransportService.NOOP_TRANSPORT_INTERCEPTOR, - x -> null, - null, - Collections.emptySet() - ); - TransportPutRoleAction action = new TransportPutRoleAction( - mock(ActionFilters.class), - rolesStore, - transportService, - xContentRegistry(), - new ReservedRoleNameChecker.Default() - ); - PutRoleRequest request = new PutRoleRequest(); - request.name("test"); - String[] malformedQueryJson = new String[] { - "{ \"match_all\": { \"unknown_field\": \"\" } }", - "{ malformed JSON }", - "{ \"unknown\": {\"\"} }", - "{}" }; - BytesReference query = new BytesArray(randomFrom(malformedQueryJson)); - request.addIndex(new String[] { "idx1" }, new String[] { "read" }, null, null, query, randomBoolean()); - - final AtomicReference throwableRef = new AtomicReference<>(); - final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(mock(Task.class), request, new ActionListener() { - @Override - public void onResponse(PutRoleResponse response) { - responseRef.set(response); - } - - @Override - public void onFailure(Exception e) { - throwableRef.set(e); - } - }); - - assertThat(responseRef.get(), is(nullValue())); - assertThat(throwableRef.get(), is(notNullValue())); - Throwable t = throwableRef.get(); - assertThat(t, instanceOf(ElasticsearchParseException.class)); - assertThat( - t.getMessage(), - containsString( - "failed to parse field 'query' for indices [" - + Strings.arrayToCommaDelimitedString(new String[] { "idx1" }) - + "] at index privilege [0] of role descriptor" - ) - ); - } - - public void testCreationOfRoleWithUnsupportedQueryFails() throws Exception { - NativeRolesStore rolesStore = mock(NativeRolesStore.class); - TransportService transportService = new TransportService( - Settings.EMPTY, - mock(Transport.class), - mock(ThreadPool.class), - TransportService.NOOP_TRANSPORT_INTERCEPTOR, - x -> null, - null, - Collections.emptySet() - ); - TransportPutRoleAction action = new TransportPutRoleAction( - mock(ActionFilters.class), - rolesStore, - transportService, - xContentRegistry(), - new ReservedRoleNameChecker.Default() - ); - PutRoleRequest request = new PutRoleRequest(); - request.name("test"); - String hasChildQuery = "{ \"has_child\": { \"type\": \"child\", \"query\": { \"match_all\": {} } } }"; - String hasParentQuery = "{ \"has_parent\": { \"parent_type\": \"parent\", \"query\": { \"match_all\": {} } } }"; - BytesReference query = new BytesArray(randomFrom(hasChildQuery, hasParentQuery)); - request.addIndex(new String[] { "idx1" }, new String[] { "read" }, null, null, query, randomBoolean()); - - final AtomicReference throwableRef = new AtomicReference<>(); - final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(mock(Task.class), request, new ActionListener() { - @Override - public void onResponse(PutRoleResponse response) { - responseRef.set(response); - } - - @Override - public void onFailure(Exception e) { - throwableRef.set(e); - } - }); - - assertThat(responseRef.get(), is(nullValue())); - assertThat(throwableRef.get(), is(notNullValue())); - Throwable t = throwableRef.get(); - assertThat(t, instanceOf(ElasticsearchParseException.class)); - assertThat( - t.getMessage(), - containsString( - "failed to parse field 'query' for indices [" - + Strings.arrayToCommaDelimitedString(new String[] { "idx1" }) - + "] at index privilege [0] of role descriptor" - ) - ); + verify(rolesStore, times(1)).putRole(eq(request.getRefreshPolicy()), any(RoleDescriptor.class), anyActionListener()); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java index e14a25088f749..e22883d80cb8d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java @@ -12,12 +12,19 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; @@ -28,6 +35,7 @@ import org.elasticsearch.cluster.routing.UnassignedInfo.Reason; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.version.CompatibilityVersions; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; @@ -45,17 +53,21 @@ import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.security.action.role.PutRoleRequest; +import org.elasticsearch.xpack.core.security.action.role.BulkPutRolesResponse; +import org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.core.security.authz.RoleRestrictionTests; import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissionGroup; import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions; import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; +import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.core.security.authz.store.RoleRetrievalResult; +import org.elasticsearch.xpack.security.authz.ReservedRoleNameChecker; import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.elasticsearch.xpack.security.support.SecuritySystemIndices; import org.elasticsearch.xpack.security.test.SecurityTestUtils; @@ -68,12 +80,15 @@ import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; +import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; -import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; +import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_FORMAT_SETTING; +import static org.elasticsearch.indices.SystemIndexDescriptor.VERSION_META_KEY; import static org.elasticsearch.transport.RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY; import static org.elasticsearch.xpack.core.security.SecurityField.DOCUMENT_LEVEL_SECURITY_FEATURE; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomApplicationPrivileges; @@ -89,7 +104,10 @@ import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.core.IsNull.notNullValue; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -97,16 +115,63 @@ public class NativeRolesStoreTests extends ESTestCase { private ThreadPool threadPool; + private final Client client = mock(Client.class); + @Before - public void createThreadPool() { + public void beforeNativeRoleStoreTests() { threadPool = new TestThreadPool("index audit trail update mapping tests"); + when(client.threadPool()).thenReturn(threadPool); + when(client.prepareIndex(SECURITY_MAIN_ALIAS)).thenReturn(new IndexRequestBuilder(client)); + when(client.prepareUpdate(any(), any())).thenReturn(new UpdateRequestBuilder(client)); } @After - public void terminateThreadPool() throws Exception { + public void terminateThreadPool() { terminate(threadPool); } + private NativeRolesStore createRoleStoreForTest() { + return createRoleStoreForTest(Settings.builder().build()); + } + + private NativeRolesStore createRoleStoreForTest(Settings settings) { + new ReservedRolesStore(Set.of("superuser")); + final ClusterService clusterService = mock(ClusterService.class); + final SecuritySystemIndices systemIndices = new SecuritySystemIndices(settings); + final FeatureService featureService = mock(FeatureService.class); + systemIndices.init(client, featureService, clusterService); + final SecurityIndexManager securityIndex = systemIndices.getMainIndexManager(); + // Create the index + securityIndex.clusterChanged(new ClusterChangedEvent("source", getClusterStateWithSecurityIndex(), getEmptyClusterState())); + + return new NativeRolesStore( + settings, + client, + TestUtils.newTestLicenseState(), + securityIndex, + clusterService, + mock(FeatureService.class), + new ReservedRoleNameChecker.Default(), + mock(NamedXContentRegistry.class) + ); + } + + private void putRole(NativeRolesStore rolesStore, RoleDescriptor roleDescriptor, ActionListener actionListener) + throws IOException { + if (randomBoolean()) { + rolesStore.putRole(WriteRequest.RefreshPolicy.IMMEDIATE, roleDescriptor, actionListener); + } else { + rolesStore.putRoles(WriteRequest.RefreshPolicy.IMMEDIATE, List.of(roleDescriptor), ActionListener.wrap(resp -> { + BulkPutRolesResponse.Item item = resp.getItems().get(0); + if (item.getResultType().equals("created")) { + actionListener.onResponse(true); + } else { + throw item.getCause(); + } + }, actionListener::onFailure)); + } + } + // test that we can read a role where field permissions are stored in 2.x format (fields:...) public void testBWCFieldPermissions() throws IOException { Path path = getDataPath("roles2xformat.json"); @@ -329,35 +394,28 @@ public void testPutOfRoleWithFlsDlsUnlicensed() throws IOException { final ClusterService clusterService = mockClusterServiceWithMinNodeVersion(TransportVersion.current()); final FeatureService featureService = mock(FeatureService.class); final XPackLicenseState licenseState = mock(XPackLicenseState.class); - final AtomicBoolean methodCalled = new AtomicBoolean(false); final SecuritySystemIndices systemIndices = new SecuritySystemIndices(clusterService.getSettings()); systemIndices.init(client, featureService, clusterService); final SecurityIndexManager securityIndex = systemIndices.getMainIndexManager(); - + // Init for validation + new ReservedRolesStore(Set.of("superuser")); final NativeRolesStore rolesStore = new NativeRolesStore( Settings.EMPTY, client, licenseState, securityIndex, clusterService, - mock(FeatureService.class) - ) { - @Override - void innerPutRole(final PutRoleRequest request, final RoleDescriptor role, final ActionListener listener) { - if (methodCalled.compareAndSet(false, true)) { - listener.onResponse(true); - } else { - fail("method called more than once!"); - } - } - }; + mock(FeatureService.class), + mock(ReservedRoleNameChecker.class), + mock(NamedXContentRegistry.class) + ); + // setup the roles store so the security index exists securityIndex.clusterChanged( new ClusterChangedEvent("fls_dls_license", getClusterStateWithSecurityIndex(), getEmptyClusterState()) ); - PutRoleRequest putRoleRequest = new PutRoleRequest(); RoleDescriptor flsRole = new RoleDescriptor( "fls", null, @@ -366,8 +424,9 @@ void innerPutRole(final PutRoleRequest request, final RoleDescriptor role, final null ); PlainActionFuture future = new PlainActionFuture<>(); - rolesStore.putRole(putRoleRequest, flsRole, future); + putRole(rolesStore, flsRole, future); ElasticsearchSecurityException e = expectThrows(ElasticsearchSecurityException.class, future::actionGet); + assertThat(e.getMessage(), containsString("field and document level security")); BytesReference matchAllBytes = XContentHelper.toXContent(QueryBuilders.matchAllQuery(), XContentType.JSON, false); @@ -378,7 +437,7 @@ void innerPutRole(final PutRoleRequest request, final RoleDescriptor role, final null ); future = new PlainActionFuture<>(); - rolesStore.putRole(putRoleRequest, dlsRole, future); + putRole(rolesStore, dlsRole, future); e = expectThrows(ElasticsearchSecurityException.class, future::actionGet); assertThat(e.getMessage(), containsString("field and document level security")); @@ -396,22 +455,14 @@ void innerPutRole(final PutRoleRequest request, final RoleDescriptor role, final null ); future = new PlainActionFuture<>(); - rolesStore.putRole(putRoleRequest, flsDlsRole, future); + putRole(rolesStore, flsDlsRole, future); e = expectThrows(ElasticsearchSecurityException.class, future::actionGet); assertThat(e.getMessage(), containsString("field and document level security")); - - RoleDescriptor noFlsDlsRole = new RoleDescriptor( - "no_fls_dls", - null, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices("*").privileges("READ").build() }, - null - ); - future = new PlainActionFuture<>(); - rolesStore.putRole(putRoleRequest, noFlsDlsRole, future); - assertTrue(future.actionGet()); } - public void testPutRoleWithRemotePrivsUnsupportedMinNodeVersion() { + public void testPutRoleWithRemotePrivsUnsupportedMinNodeVersion() throws IOException { + // Init for validation + new ReservedRolesStore(Set.of("superuser")); enum TEST_MODE { REMOTE_INDICES_PRIVS, REMOTE_CLUSTER_PRIVS, @@ -449,7 +500,6 @@ enum TEST_MODE { final ClusterService clusterService = mockClusterServiceWithMinNodeVersion(minTransportVersion); final XPackLicenseState licenseState = mock(XPackLicenseState.class); - final AtomicBoolean methodCalled = new AtomicBoolean(false); final SecuritySystemIndices systemIndices = new SecuritySystemIndices(clusterService.getSettings()); final FeatureService featureService = mock(FeatureService.class); @@ -462,21 +512,13 @@ enum TEST_MODE { licenseState, securityIndex, clusterService, - mock(FeatureService.class) - ) { - @Override - void innerPutRole(final PutRoleRequest request, final RoleDescriptor role, final ActionListener listener) { - if (methodCalled.compareAndSet(false, true)) { - listener.onResponse(true); - } else { - fail("method called more than once!"); - } - } - }; + mock(FeatureService.class), + mock(ReservedRoleNameChecker.class), + mock(NamedXContentRegistry.class) + ); // setup the roles store so the security index exists securityIndex.clusterChanged(new ClusterChangedEvent("source", getClusterStateWithSecurityIndex(), getEmptyClusterState())); - PutRoleRequest putRoleRequest = new PutRoleRequest(); RoleDescriptor remoteIndicesRole = new RoleDescriptor( "remote", null, @@ -492,7 +534,7 @@ void innerPutRole(final PutRoleRequest request, final RoleDescriptor role, final null ); PlainActionFuture future = new PlainActionFuture<>(); - rolesStore.putRole(putRoleRequest, remoteIndicesRole, future); + putRole(rolesStore, remoteIndicesRole, future); IllegalStateException e = expectThrows( IllegalStateException.class, String.format(Locale.ROOT, "expected IllegalStateException, but not thrown for mode [%s]", testMode), @@ -515,22 +557,7 @@ void innerPutRole(final PutRoleRequest request, final RoleDescriptor role, final public void testGetRoleWhenDisabled() throws Exception { final Settings settings = Settings.builder().put(NativeRolesStore.NATIVE_ROLES_ENABLED, "false").build(); - final Client client = mock(Client.class); - final ClusterService clusterService = mock(ClusterService.class); - final XPackLicenseState licenseState = mock(XPackLicenseState.class); - final SecuritySystemIndices systemIndices = new SecuritySystemIndices(settings); - final FeatureService featureService = mock(FeatureService.class); - systemIndices.init(client, featureService, clusterService); - final SecurityIndexManager securityIndex = systemIndices.getMainIndexManager(); - - final NativeRolesStore store = new NativeRolesStore( - settings, - client, - licenseState, - securityIndex, - clusterService, - mock(FeatureService.class) - ); + NativeRolesStore store = createRoleStoreForTest(settings); final PlainActionFuture future = new PlainActionFuture<>(); store.getRoleDescriptors(Set.of(randomAlphaOfLengthBetween(4, 12)), future); @@ -541,6 +568,210 @@ public void testGetRoleWhenDisabled() throws Exception { Mockito.verifyNoInteractions(client); } + public void testReservedRole() { + final NativeRolesStore store = createRoleStoreForTest(); + final String roleName = randomFrom(new ArrayList<>(ReservedRolesStore.names())); + + RoleDescriptor roleDescriptor = new RoleDescriptor( + roleName, + randomSubsetOf(ClusterPrivilegeResolver.names()).toArray(String[]::new), + new IndicesPrivileges[] { + IndicesPrivileges.builder().privileges("READ").indices("*").grantedFields("*").deniedFields("foo").build() }, + randomApplicationPrivileges(), + randomClusterPrivileges(), + generateRandomStringArray(5, randomIntBetween(2, 8), true, true), + randomRoleDescriptorMetadata(ESTestCase.randomBoolean()), + null, + randomRemoteIndicesPrivileges(1, 2), + null, + null, + randomAlphaOfLengthBetween(0, 20) + ); + ActionRequestValidationException exception = assertThrows(ActionRequestValidationException.class, () -> { + PlainActionFuture future = new PlainActionFuture<>(); + putRole(store, roleDescriptor, future); + future.actionGet(); + }); + + assertThat(exception.getMessage(), containsString("is reserved and may not be used")); + } + + public void testValidRole() throws IOException { + testValidRole(randomFrom("admin", "dept_a", "restricted")); + } + + public void testValidRoleWithInternalRoleName() throws IOException { + testValidRole(AuthenticationTestHelper.randomInternalRoleName()); + } + + private void testValidRole(String roleName) throws IOException { + final NativeRolesStore rolesStore = createRoleStoreForTest(); + + RoleDescriptor roleDescriptor = new RoleDescriptor( + roleName, + randomSubsetOf(ClusterPrivilegeResolver.names()).toArray(String[]::new), + new IndicesPrivileges[] { + IndicesPrivileges.builder().privileges("READ").indices("*").grantedFields("*").deniedFields("foo").build() }, + randomApplicationPrivileges(), + randomClusterPrivileges(), + generateRandomStringArray(5, randomIntBetween(2, 8), true, true), + null, + null, + null, + null, + null, + null + ); + + putRole(rolesStore, roleDescriptor, ActionListener.wrap(response -> fail(), exception -> fail())); + boolean indexCalled = false; + try { + verify(client, times(1)).index(any(IndexRequest.class), any()); + indexCalled = true; + } catch (AssertionError assertionError) { + // Index wasn't called + } + + boolean bulkCalled = false; + try { + verify(client, times(1)).bulk(any(BulkRequest.class), any()); + bulkCalled = true; + } catch (AssertionError assertionError) { + // bulk wasn't called + } + + assertTrue(bulkCalled || indexCalled); + } + + public void testCreationOfRoleWithMalformedQueryJsonFails() throws IOException { + final NativeRolesStore rolesStore = createRoleStoreForTest(); + + String[] malformedQueryJson = new String[] { + "{ \"match_all\": { \"unknown_field\": \"\" } }", + "{ malformed JSON }", + "{ \"unknown\": {\"\"} }", + "{}" }; + + BytesReference query = new BytesArray(randomFrom(malformedQueryJson)); + + RoleDescriptor roleDescriptor = new RoleDescriptor( + "test", + randomSubsetOf(ClusterPrivilegeResolver.names()).toArray(String[]::new), + new IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder() + .indices("idx1") + .privileges(new String[] { "read" }) + .query(query) + .allowRestrictedIndices(randomBoolean()) + .build() }, + randomApplicationPrivileges(), + randomClusterPrivileges(), + null, + null, + null, + null, + null, + null, + null + ); + + final AtomicReference throwableRef = new AtomicReference<>(); + final AtomicReference responseRef = new AtomicReference<>(); + + putRole(rolesStore, roleDescriptor, ActionListener.wrap(responseRef::set, throwableRef::set)); + + assertThat(responseRef.get(), is(nullValue())); + assertThat(throwableRef.get(), is(notNullValue())); + Throwable t = throwableRef.get(); + assertThat(t, instanceOf(ElasticsearchParseException.class)); + assertThat( + t.getMessage(), + containsString( + "failed to parse field 'query' for indices [" + + Strings.arrayToCommaDelimitedString(new String[] { "idx1" }) + + "] at index privilege [0] of role descriptor" + ) + ); + } + + public void testCreationOfRoleWithUnsupportedQueryFails() throws IOException { + final NativeRolesStore rolesStore = createRoleStoreForTest(); + + String hasChildQuery = "{ \"has_child\": { \"type\": \"child\", \"query\": { \"match_all\": {} } } }"; + String hasParentQuery = "{ \"has_parent\": { \"parent_type\": \"parent\", \"query\": { \"match_all\": {} } } }"; + + BytesReference query = new BytesArray(randomFrom(hasChildQuery, hasParentQuery)); + + RoleDescriptor roleDescriptor = new RoleDescriptor( + "test", + randomSubsetOf(ClusterPrivilegeResolver.names()).toArray(String[]::new), + new IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder() + .indices("idx1") + .privileges(new String[] { "read" }) + .query(query) + .allowRestrictedIndices(randomBoolean()) + .build() }, + randomApplicationPrivileges(), + randomClusterPrivileges(), + null, + null, + null, + null, + null, + null, + null + ); + + final AtomicReference throwableRef = new AtomicReference<>(); + final AtomicReference responseRef = new AtomicReference<>(); + putRole(rolesStore, roleDescriptor, ActionListener.wrap(responseRef::set, throwableRef::set)); + + assertThat(responseRef.get(), is(nullValue())); + assertThat(throwableRef.get(), is(notNullValue())); + Throwable t = throwableRef.get(); + assertThat(t, instanceOf(ElasticsearchParseException.class)); + assertThat( + t.getMessage(), + containsString( + "failed to parse field 'query' for indices [" + + Strings.arrayToCommaDelimitedString(new String[] { "idx1" }) + + "] at index privilege [0] of role descriptor" + ) + ); + } + + public void testManyValidRoles() throws IOException { + final NativeRolesStore rolesStore = createRoleStoreForTest(); + List roleNames = List.of("test", "admin", "123"); + + List roleDescriptors = roleNames.stream() + .map( + roleName -> new RoleDescriptor( + roleName, + randomSubsetOf(ClusterPrivilegeResolver.names()).toArray(String[]::new), + new IndicesPrivileges[] { + IndicesPrivileges.builder().privileges("READ").indices("*").grantedFields("*").deniedFields("foo").build() }, + randomApplicationPrivileges(), + randomClusterPrivileges(), + generateRandomStringArray(5, randomIntBetween(2, 8), true, true), + null, + null, + null, + null, + null, + null + ) + ) + .toList(); + + AtomicReference response = new AtomicReference<>(); + AtomicReference exception = new AtomicReference<>(); + rolesStore.putRoles(WriteRequest.RefreshPolicy.IMMEDIATE, roleDescriptors, ActionListener.wrap(response::set, exception::set)); + assertNull(exception.get()); + verify(client, times(1)).bulk(any(BulkRequest.class), any()); + } + private ClusterService mockClusterServiceWithMinNodeVersion(TransportVersion transportVersion) { final ClusterService clusterService = mock(ClusterService.class, Mockito.RETURNS_DEEP_STUBS); when(clusterService.state().getMinTransportVersion()).thenReturn(transportVersion); @@ -552,8 +783,14 @@ private ClusterState getClusterStateWithSecurityIndex() { final boolean withAlias = randomBoolean(); final String securityIndexName = SECURITY_MAIN_ALIAS + (withAlias ? "-" + randomAlphaOfLength(5) : ""); + Settings.Builder settingsBuilder = indexSettings(IndexVersion.current(), 1, 0); + settingsBuilder.put(INDEX_FORMAT_SETTING.getKey(), SecuritySystemIndices.INTERNAL_MAIN_INDEX_FORMAT); + settingsBuilder.put(VERSION_META_KEY, 1); + MappingMetadata mappingMetadata = mock(MappingMetadata.class); + when(mappingMetadata.sourceAsMap()).thenReturn(Map.of("_meta", Map.of(VERSION_META_KEY, 1))); + when(mappingMetadata.getSha256()).thenReturn("test"); Metadata metadata = Metadata.builder() - .put(IndexMetadata.builder(securityIndexName).settings(indexSettings(IndexVersion.current(), 1, 0))) + .put(IndexMetadata.builder(securityIndexName).putMapping(mappingMetadata).settings(settingsBuilder)) .build(); if (withAlias) { diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/60_bulk_roles.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/60_bulk_roles.yml new file mode 100644 index 0000000000000..72a240ab92695 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/60_bulk_roles.yml @@ -0,0 +1,83 @@ +--- +setup: + - skip: + features: headers + + - do: + cluster.health: + wait_for_status: yellow + - do: + security.put_user: + username: "joe" + body: > + { + "password": "s3krit-password", + "roles" : [ "admin_role" ] + } + +--- +teardown: + - do: + security.delete_user: + username: "joe" + ignore: 404 + - do: + security.delete_role: + name: "admin_role" + ignore: 404 + - do: + security.delete_role: + name: "role_with_description" + ignore: 404 +--- +"Test bulk put roles api": + - do: + security.bulk_put_role: + body: > + { + "roles": { + "admin_role": { + "cluster": [ + "all" + ], + "metadata": { + "key1": "val1", + "key2": "val2" + }, + "indices": [ + { + "names": "*", + "privileges": [ + "all" + ] + } + ] + }, + "role_with_description": { + "description": "Allows all security-related operations such as CRUD operations on users and roles and cache clearing.", + "cluster": [ + "manage_security" + ] + } + } + } + - match: { created: ["admin_role", "role_with_description"] } + + - do: + headers: + Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" + security.get_role: + name: "admin_role" + - match: { admin_role.cluster.0: "all" } + - match: { admin_role.metadata.key1: "val1" } + - match: { admin_role.metadata.key2: "val2" } + - match: { admin_role.indices.0.names.0: "*" } + - match: { admin_role.indices.0.privileges.0: "all" } + + - do: + headers: + Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" + security.get_role: + name: "role_with_description" + - match: { role_with_description.cluster.0: "manage_security" } + - match: { role_with_description.description: "Allows all security-related operations such as CRUD operations on users and roles and cache clearing." } From 0eb3ee235736504324d2467c9ad6a3db4da008a0 Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Tue, 2 Jul 2024 10:50:50 -0400 Subject: [PATCH 112/216] fix compile error related to new PARTIAL_AGG data type (#110379) --- .../TopDoubleAggregatorFunction.java | 16 ++++++------- .../TopDoubleAggregatorFunctionSupplier.java | 3 +-- .../TopDoubleGroupingAggregatorFunction.java | 16 ++++++------- .../TopFloatAggregatorFunction.java | 16 ++++++------- .../TopFloatAggregatorFunctionSupplier.java | 3 +-- .../TopFloatGroupingAggregatorFunction.java | 16 ++++++------- .../aggregation/TopIntAggregatorFunction.java | 18 +++++++------- .../TopIntAggregatorFunctionSupplier.java | 3 +-- .../TopIntGroupingAggregatorFunction.java | 16 ++++++------- .../TopLongAggregatorFunction.java | 16 ++++++------- .../TopLongAggregatorFunctionSupplier.java | 3 +-- .../TopLongGroupingAggregatorFunction.java | 16 ++++++------- .../xpack/esql/action/TimeSeriesIT.java | 24 +++++++++---------- .../xpack/esql/action/PositionToXContent.java | 4 ++-- .../xpack/esql/action/ResponseValueUtils.java | 2 +- 15 files changed, 84 insertions(+), 88 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunction.java index 3d658294c154f..8549da42c0d85 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunction.java @@ -22,7 +22,7 @@ */ public final class TopDoubleAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("topList", ElementType.DOUBLE) ); + new IntermediateStateDesc("top", ElementType.DOUBLE) ); private final DriverContext driverContext; @@ -35,7 +35,7 @@ public final class TopDoubleAggregatorFunction implements AggregatorFunction { private final boolean ascending; public TopDoubleAggregatorFunction(DriverContext driverContext, List channels, - TopDoubleAggregator.SingleState state, int limit, boolean ascending) { + TopDoubleAggregator.SingleState state, int limit, boolean ascending) { this.driverContext = driverContext; this.channels = channels; this.state = state; @@ -44,7 +44,7 @@ public TopDoubleAggregatorFunction(DriverContext driverContext, List ch } public static TopDoubleAggregatorFunction create(DriverContext driverContext, - List channels, int limit, boolean ascending) { + List channels, int limit, boolean ascending) { return new TopDoubleAggregatorFunction(driverContext, channels, TopDoubleAggregator.initSingle(driverContext.bigArrays(), limit, ascending), limit, ascending); } @@ -91,13 +91,13 @@ private void addRawBlock(DoubleBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block topListUncast = page.getBlock(channels.get(0)); - if (topListUncast.areAllValuesNull()) { + Block topUncast = page.getBlock(channels.get(0)); + if (topUncast.areAllValuesNull()) { return; } - DoubleBlock topList = (DoubleBlock) topListUncast; - assert topList.getPositionCount() == 1; - TopDoubleAggregator.combineIntermediate(state, topList); + DoubleBlock top = (DoubleBlock) topUncast; + assert top.getPositionCount() == 1; + TopDoubleAggregator.combineIntermediate(state, top); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionSupplier.java index b781af87ddc82..36a8763b4a870 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunctionSupplier.java @@ -21,8 +21,7 @@ public final class TopDoubleAggregatorFunctionSupplier implements AggregatorFunc private final boolean ascending; - public TopDoubleAggregatorFunctionSupplier(List channels, int limit, - boolean ascending) { + public TopDoubleAggregatorFunctionSupplier(List channels, int limit, boolean ascending) { this.channels = channels; this.limit = limit; this.ascending = ascending; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleGroupingAggregatorFunction.java index 493e76d23a85f..c54dce5715846 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleGroupingAggregatorFunction.java @@ -24,7 +24,7 @@ */ public final class TopDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("topList", ElementType.DOUBLE) ); + new IntermediateStateDesc("top", ElementType.DOUBLE) ); private final TopDoubleAggregator.GroupingState state; @@ -37,8 +37,8 @@ public final class TopDoubleGroupingAggregatorFunction implements GroupingAggreg private final boolean ascending; public TopDoubleGroupingAggregatorFunction(List channels, - TopDoubleAggregator.GroupingState state, DriverContext driverContext, int limit, - boolean ascending) { + TopDoubleAggregator.GroupingState state, DriverContext driverContext, int limit, + boolean ascending) { this.channels = channels; this.state = state; this.driverContext = driverContext; @@ -47,7 +47,7 @@ public TopDoubleGroupingAggregatorFunction(List channels, } public static TopDoubleGroupingAggregatorFunction create(List channels, - DriverContext driverContext, int limit, boolean ascending) { + DriverContext driverContext, int limit, boolean ascending) { return new TopDoubleGroupingAggregatorFunction(channels, TopDoubleAggregator.initGrouping(driverContext.bigArrays(), limit, ascending), driverContext, limit, ascending); } @@ -154,14 +154,14 @@ private void addRawInput(int positionOffset, IntBlock groups, DoubleVector value public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - Block topListUncast = page.getBlock(channels.get(0)); - if (topListUncast.areAllValuesNull()) { + Block topUncast = page.getBlock(channels.get(0)); + if (topUncast.areAllValuesNull()) { return; } - DoubleBlock topList = (DoubleBlock) topListUncast; + DoubleBlock top = (DoubleBlock) topUncast; for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); - TopDoubleAggregator.combineIntermediate(state, groupId, topList, groupPosition + positionOffset); + TopDoubleAggregator.combineIntermediate(state, groupId, top, groupPosition + positionOffset); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunction.java index 674b534667863..40ac1432caee8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunction.java @@ -22,7 +22,7 @@ */ public final class TopFloatAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("topList", ElementType.FLOAT) ); + new IntermediateStateDesc("top", ElementType.FLOAT) ); private final DriverContext driverContext; @@ -35,7 +35,7 @@ public final class TopFloatAggregatorFunction implements AggregatorFunction { private final boolean ascending; public TopFloatAggregatorFunction(DriverContext driverContext, List channels, - TopFloatAggregator.SingleState state, int limit, boolean ascending) { + TopFloatAggregator.SingleState state, int limit, boolean ascending) { this.driverContext = driverContext; this.channels = channels; this.state = state; @@ -44,7 +44,7 @@ public TopFloatAggregatorFunction(DriverContext driverContext, List cha } public static TopFloatAggregatorFunction create(DriverContext driverContext, - List channels, int limit, boolean ascending) { + List channels, int limit, boolean ascending) { return new TopFloatAggregatorFunction(driverContext, channels, TopFloatAggregator.initSingle(driverContext.bigArrays(), limit, ascending), limit, ascending); } @@ -91,13 +91,13 @@ private void addRawBlock(FloatBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block topListUncast = page.getBlock(channels.get(0)); - if (topListUncast.areAllValuesNull()) { + Block topUncast = page.getBlock(channels.get(0)); + if (topUncast.areAllValuesNull()) { return; } - FloatBlock topList = (FloatBlock) topListUncast; - assert topList.getPositionCount() == 1; - TopFloatAggregator.combineIntermediate(state, topList); + FloatBlock top = (FloatBlock) topUncast; + assert top.getPositionCount() == 1; + TopFloatAggregator.combineIntermediate(state, top); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionSupplier.java index f40bbce1d73f6..e01df8329a315 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunctionSupplier.java @@ -21,8 +21,7 @@ public final class TopFloatAggregatorFunctionSupplier implements AggregatorFunct private final boolean ascending; - public TopFloatAggregatorFunctionSupplier(List channels, int limit, - boolean ascending) { + public TopFloatAggregatorFunctionSupplier(List channels, int limit, boolean ascending) { this.channels = channels; this.limit = limit; this.ascending = ascending; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatGroupingAggregatorFunction.java index 2555c0aeafec5..4c00f4d2c237d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatGroupingAggregatorFunction.java @@ -24,7 +24,7 @@ */ public final class TopFloatGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("topList", ElementType.FLOAT) ); + new IntermediateStateDesc("top", ElementType.FLOAT) ); private final TopFloatAggregator.GroupingState state; @@ -37,8 +37,8 @@ public final class TopFloatGroupingAggregatorFunction implements GroupingAggrega private final boolean ascending; public TopFloatGroupingAggregatorFunction(List channels, - TopFloatAggregator.GroupingState state, DriverContext driverContext, int limit, - boolean ascending) { + TopFloatAggregator.GroupingState state, DriverContext driverContext, int limit, + boolean ascending) { this.channels = channels; this.state = state; this.driverContext = driverContext; @@ -47,7 +47,7 @@ public TopFloatGroupingAggregatorFunction(List channels, } public static TopFloatGroupingAggregatorFunction create(List channels, - DriverContext driverContext, int limit, boolean ascending) { + DriverContext driverContext, int limit, boolean ascending) { return new TopFloatGroupingAggregatorFunction(channels, TopFloatAggregator.initGrouping(driverContext.bigArrays(), limit, ascending), driverContext, limit, ascending); } @@ -154,14 +154,14 @@ private void addRawInput(int positionOffset, IntBlock groups, FloatVector values public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - Block topListUncast = page.getBlock(channels.get(0)); - if (topListUncast.areAllValuesNull()) { + Block topUncast = page.getBlock(channels.get(0)); + if (topUncast.areAllValuesNull()) { return; } - FloatBlock topList = (FloatBlock) topListUncast; + FloatBlock top = (FloatBlock) topUncast; for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); - TopFloatAggregator.combineIntermediate(state, groupId, topList, groupPosition + positionOffset); + TopFloatAggregator.combineIntermediate(state, groupId, top, groupPosition + positionOffset); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunction.java index 94163e4915944..f6e858b69a639 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunction.java @@ -22,7 +22,7 @@ */ public final class TopIntAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("topList", ElementType.INT) ); + new IntermediateStateDesc("top", ElementType.INT) ); private final DriverContext driverContext; @@ -35,7 +35,7 @@ public final class TopIntAggregatorFunction implements AggregatorFunction { private final boolean ascending; public TopIntAggregatorFunction(DriverContext driverContext, List channels, - TopIntAggregator.SingleState state, int limit, boolean ascending) { + TopIntAggregator.SingleState state, int limit, boolean ascending) { this.driverContext = driverContext; this.channels = channels; this.state = state; @@ -43,8 +43,8 @@ public TopIntAggregatorFunction(DriverContext driverContext, List chann this.ascending = ascending; } - public static TopIntAggregatorFunction create(DriverContext driverContext, - List channels, int limit, boolean ascending) { + public static TopIntAggregatorFunction create(DriverContext driverContext, List channels, + int limit, boolean ascending) { return new TopIntAggregatorFunction(driverContext, channels, TopIntAggregator.initSingle(driverContext.bigArrays(), limit, ascending), limit, ascending); } @@ -91,13 +91,13 @@ private void addRawBlock(IntBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block topListUncast = page.getBlock(channels.get(0)); - if (topListUncast.areAllValuesNull()) { + Block topUncast = page.getBlock(channels.get(0)); + if (topUncast.areAllValuesNull()) { return; } - IntBlock topList = (IntBlock) topListUncast; - assert topList.getPositionCount() == 1; - TopIntAggregator.combineIntermediate(state, topList); + IntBlock top = (IntBlock) topUncast; + assert top.getPositionCount() == 1; + TopIntAggregator.combineIntermediate(state, top); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionSupplier.java index df6502350c06c..4481f2d5afaa8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunctionSupplier.java @@ -21,8 +21,7 @@ public final class TopIntAggregatorFunctionSupplier implements AggregatorFunctio private final boolean ascending; - public TopIntAggregatorFunctionSupplier(List channels, int limit, - boolean ascending) { + public TopIntAggregatorFunctionSupplier(List channels, int limit, boolean ascending) { this.channels = channels; this.limit = limit; this.ascending = ascending; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntGroupingAggregatorFunction.java index dbbc5ea6df650..37384238b7297 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntGroupingAggregatorFunction.java @@ -22,7 +22,7 @@ */ public final class TopIntGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("topList", ElementType.INT) ); + new IntermediateStateDesc("top", ElementType.INT) ); private final TopIntAggregator.GroupingState state; @@ -35,8 +35,8 @@ public final class TopIntGroupingAggregatorFunction implements GroupingAggregato private final boolean ascending; public TopIntGroupingAggregatorFunction(List channels, - TopIntAggregator.GroupingState state, DriverContext driverContext, int limit, - boolean ascending) { + TopIntAggregator.GroupingState state, DriverContext driverContext, int limit, + boolean ascending) { this.channels = channels; this.state = state; this.driverContext = driverContext; @@ -45,7 +45,7 @@ public TopIntGroupingAggregatorFunction(List channels, } public static TopIntGroupingAggregatorFunction create(List channels, - DriverContext driverContext, int limit, boolean ascending) { + DriverContext driverContext, int limit, boolean ascending) { return new TopIntGroupingAggregatorFunction(channels, TopIntAggregator.initGrouping(driverContext.bigArrays(), limit, ascending), driverContext, limit, ascending); } @@ -152,14 +152,14 @@ private void addRawInput(int positionOffset, IntBlock groups, IntVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - Block topListUncast = page.getBlock(channels.get(0)); - if (topListUncast.areAllValuesNull()) { + Block topUncast = page.getBlock(channels.get(0)); + if (topUncast.areAllValuesNull()) { return; } - IntBlock topList = (IntBlock) topListUncast; + IntBlock top = (IntBlock) topUncast; for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); - TopIntAggregator.combineIntermediate(state, groupId, topList, groupPosition + positionOffset); + TopIntAggregator.combineIntermediate(state, groupId, top, groupPosition + positionOffset); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunction.java index 1887e958344ee..c355e401478d8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunction.java @@ -22,7 +22,7 @@ */ public final class TopLongAggregatorFunction implements AggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("topList", ElementType.LONG) ); + new IntermediateStateDesc("top", ElementType.LONG) ); private final DriverContext driverContext; @@ -35,7 +35,7 @@ public final class TopLongAggregatorFunction implements AggregatorFunction { private final boolean ascending; public TopLongAggregatorFunction(DriverContext driverContext, List channels, - TopLongAggregator.SingleState state, int limit, boolean ascending) { + TopLongAggregator.SingleState state, int limit, boolean ascending) { this.driverContext = driverContext; this.channels = channels; this.state = state; @@ -44,7 +44,7 @@ public TopLongAggregatorFunction(DriverContext driverContext, List chan } public static TopLongAggregatorFunction create(DriverContext driverContext, - List channels, int limit, boolean ascending) { + List channels, int limit, boolean ascending) { return new TopLongAggregatorFunction(driverContext, channels, TopLongAggregator.initSingle(driverContext.bigArrays(), limit, ascending), limit, ascending); } @@ -91,13 +91,13 @@ private void addRawBlock(LongBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block topListUncast = page.getBlock(channels.get(0)); - if (topListUncast.areAllValuesNull()) { + Block topUncast = page.getBlock(channels.get(0)); + if (topUncast.areAllValuesNull()) { return; } - LongBlock topList = (LongBlock) topListUncast; - assert topList.getPositionCount() == 1; - TopLongAggregator.combineIntermediate(state, topList); + LongBlock top = (LongBlock) topUncast; + assert top.getPositionCount() == 1; + TopLongAggregator.combineIntermediate(state, top); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionSupplier.java index 3a41143be46ad..1a39c7b5580ec 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionSupplier.java @@ -21,8 +21,7 @@ public final class TopLongAggregatorFunctionSupplier implements AggregatorFuncti private final boolean ascending; - public TopLongAggregatorFunctionSupplier(List channels, int limit, - boolean ascending) { + public TopLongAggregatorFunctionSupplier(List channels, int limit, boolean ascending) { this.channels = channels; this.limit = limit; this.ascending = ascending; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongGroupingAggregatorFunction.java index 64564d0c49756..7b199b2a81389 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongGroupingAggregatorFunction.java @@ -24,7 +24,7 @@ */ public final class TopLongGroupingAggregatorFunction implements GroupingAggregatorFunction { private static final List INTERMEDIATE_STATE_DESC = List.of( - new IntermediateStateDesc("topList", ElementType.LONG) ); + new IntermediateStateDesc("top", ElementType.LONG) ); private final TopLongAggregator.GroupingState state; @@ -37,8 +37,8 @@ public final class TopLongGroupingAggregatorFunction implements GroupingAggregat private final boolean ascending; public TopLongGroupingAggregatorFunction(List channels, - TopLongAggregator.GroupingState state, DriverContext driverContext, int limit, - boolean ascending) { + TopLongAggregator.GroupingState state, DriverContext driverContext, int limit, + boolean ascending) { this.channels = channels; this.state = state; this.driverContext = driverContext; @@ -47,7 +47,7 @@ public TopLongGroupingAggregatorFunction(List channels, } public static TopLongGroupingAggregatorFunction create(List channels, - DriverContext driverContext, int limit, boolean ascending) { + DriverContext driverContext, int limit, boolean ascending) { return new TopLongGroupingAggregatorFunction(channels, TopLongAggregator.initGrouping(driverContext.bigArrays(), limit, ascending), driverContext, limit, ascending); } @@ -154,14 +154,14 @@ private void addRawInput(int positionOffset, IntBlock groups, LongVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - Block topListUncast = page.getBlock(channels.get(0)); - if (topListUncast.areAllValuesNull()) { + Block topUncast = page.getBlock(channels.get(0)); + if (topUncast.areAllValuesNull()) { return; } - LongBlock topList = (LongBlock) topListUncast; + LongBlock top = (LongBlock) topUncast; for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); - TopLongAggregator.combineIntermediate(state, groupId, topList, groupPosition + positionOffset); + TopLongAggregator.combineIntermediate(state, groupId, top, groupPosition + positionOffset); } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java index 02cecc63dbd0f..93f8c75ddb088 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java @@ -258,10 +258,10 @@ record RateKey(String cluster, String host) { resp.columns(), equalTo( List.of( - new ColumnInfo("max(rate(request_count))", "double"), - new ColumnInfo("min(rate(request_count))", "double"), - new ColumnInfo("min(cpu)", "double"), - new ColumnInfo("max(cpu)", "double") + new ColumnInfoImpl("max(rate(request_count))", "double"), + new ColumnInfoImpl("min(rate(request_count))", "double"), + new ColumnInfoImpl("min(cpu)", "double"), + new ColumnInfoImpl("max(cpu)", "double") ) ) ); @@ -629,10 +629,10 @@ METRICS hosts sum(rate(request_count)), max(cpu) BY ts=bucket(@timestamp, 1 minu resp.columns(), equalTo( List.of( - new ColumnInfo("sum(rate(request_count))", "double"), - new ColumnInfo("max(cpu)", "double"), - new ColumnInfo("ts", "date"), - new ColumnInfo("cluster", "keyword") + new ColumnInfoImpl("sum(rate(request_count))", "double"), + new ColumnInfoImpl("max(cpu)", "double"), + new ColumnInfoImpl("ts", "date"), + new ColumnInfoImpl("cluster", "keyword") ) ) ); @@ -666,10 +666,10 @@ METRICS hosts sum(rate(request_count)), avg(cpu) BY ts=bucket(@timestamp, 1 minu resp.columns(), equalTo( List.of( - new ColumnInfo("sum(rate(request_count))", "double"), - new ColumnInfo("avg(cpu)", "double"), - new ColumnInfo("ts", "date"), - new ColumnInfo("cluster", "keyword") + new ColumnInfoImpl("sum(rate(request_count))", "double"), + new ColumnInfoImpl("avg(cpu)", "double"), + new ColumnInfoImpl("ts", "date"), + new ColumnInfoImpl("cluster", "keyword") ) ) ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java index 2cdbd9f5f93f1..0bc1eb46abefe 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java @@ -165,8 +165,8 @@ protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Pa } } }; - case DATE_PERIOD, TIME_DURATION, DOC_DATA_TYPE, TSID_DATA_TYPE, SHORT, BYTE, OBJECT, NESTED, FLOAT, HALF_FLOAT, SCALED_FLOAT -> - throw new IllegalArgumentException("can't convert values of type [" + columnInfo.type() + "]"); + case DATE_PERIOD, TIME_DURATION, DOC_DATA_TYPE, TSID_DATA_TYPE, SHORT, BYTE, OBJECT, NESTED, FLOAT, HALF_FLOAT, SCALED_FLOAT, + PARTIAL_AGG -> throw new IllegalArgumentException("can't convert values of type [" + columnInfo.type() + "]"); }; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java index d99da4500a3b0..290a816275a29 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java @@ -152,7 +152,7 @@ private static Object valueAt(DataType dataType, Block block, int offset, BytesR } } case SHORT, BYTE, FLOAT, HALF_FLOAT, SCALED_FLOAT, OBJECT, NESTED, DATE_PERIOD, TIME_DURATION, DOC_DATA_TYPE, TSID_DATA_TYPE, - NULL -> throw EsqlIllegalArgumentException.illegalDataType(dataType); + NULL, PARTIAL_AGG -> throw EsqlIllegalArgumentException.illegalDataType(dataType); }; } From cdbe092d909a31d8bca88f2d5f4ca161f5afc403 Mon Sep 17 00:00:00 2001 From: Felix Barnsteiner Date: Tue, 2 Jul 2024 17:04:57 +0200 Subject: [PATCH 113/216] Update docs now that keyword dimensions support ignore_above (#110385) This is a follow-up from https://github.com/elastic/elasticsearch/pull/110337 --- docs/reference/mapping/types/keyword.asciidoc | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/reference/mapping/types/keyword.asciidoc b/docs/reference/mapping/types/keyword.asciidoc index a6f41a38f559c..59d307c4df0ad 100644 --- a/docs/reference/mapping/types/keyword.asciidoc +++ b/docs/reference/mapping/types/keyword.asciidoc @@ -166,7 +166,6 @@ Dimension fields have the following constraints: * Field values cannot be an <>. // end::dimension[] * Dimension values are used to identify a document’s time series. If dimension values are altered in any way during indexing, the document will be stored as belonging to different from intended time series. As a result there are additional constraints: -** <> mapping parameter isn't supported. ** The field cannot use a <>. -- From 6b64389510d4be4be486698a7b935b6c1efa36ff Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 2 Jul 2024 16:09:28 +0100 Subject: [PATCH 114/216] [ML] Sentence Chunker (#110334) The Sentence chunker splits long text into smaller chunks on sentence boundaries. --- docs/changelog/110334.yaml | 5 + .../EmbeddingRequestChunker.java | 2 +- .../chunking/SentenceBoundaryChunker.java | 138 ++++++++++++++ .../WordBoundaryChunker.java | 39 +++- .../azureaistudio/AzureAiStudioService.java | 2 +- .../azureopenai/AzureOpenAiService.java | 2 +- .../services/cohere/CohereService.java | 2 +- .../googleaistudio/GoogleAiStudioService.java | 2 +- .../googlevertexai/GoogleVertexAiService.java | 2 +- .../huggingface/HuggingFaceService.java | 2 +- .../services/mistral/MistralService.java | 2 +- .../services/openai/OpenAiService.java | 2 +- .../EmbeddingRequestChunkerTests.java | 2 +- .../SentenceBoundaryChunkerTests.java | 173 ++++++++++++++++++ .../WordBoundaryChunkerTests.java | 36 +++- 15 files changed, 384 insertions(+), 27 deletions(-) create mode 100644 docs/changelog/110334.yaml rename x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/{common => chunking}/EmbeddingRequestChunker.java (99%) create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunker.java rename x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/{common => chunking}/WordBoundaryChunker.java (73%) rename x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/{common => chunking}/EmbeddingRequestChunkerTests.java (99%) create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkerTests.java rename x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/{common => chunking}/WordBoundaryChunkerTests.java (87%) diff --git a/docs/changelog/110334.yaml b/docs/changelog/110334.yaml new file mode 100644 index 0000000000000..f83ac04ded773 --- /dev/null +++ b/docs/changelog/110334.yaml @@ -0,0 +1,5 @@ +pr: 110334 +summary: Sentence Chunker +area: Machine Learning +type: enhancement +issues: [] diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunker.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunker.java similarity index 99% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunker.java rename to x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunker.java index 0e8928c3a2391..7587dbf8ca95b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunker.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunker.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.inference.common; +package org.elasticsearch.xpack.inference.chunking; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunker.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunker.java new file mode 100644 index 0000000000000..258a127dac8ab --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunker.java @@ -0,0 +1,138 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.chunking; + +import com.ibm.icu.text.BreakIterator; + +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; + +/** + * Split text into chunks aligned on sentence boundaries. + * The maximum chunk size is measured in words and controlled + * by {@code maxNumberWordsPerChunk}. Sentences are combined + * greedily until adding the next sentence would exceed + * {@code maxNumberWordsPerChunk}, at which point a new chunk + * is created. If an individual sentence is longer than + * {@code maxNumberWordsPerChunk} it is split on word boundary with + * overlap. + */ +public class SentenceBoundaryChunker { + + private final BreakIterator sentenceIterator; + private final BreakIterator wordIterator; + + public SentenceBoundaryChunker() { + sentenceIterator = BreakIterator.getSentenceInstance(Locale.ROOT); + wordIterator = BreakIterator.getWordInstance(Locale.ROOT); + } + + /** + * Break the input text into small chunks on sentence boundaries. + * + * @param input Text to chunk + * @param maxNumberWordsPerChunk Maximum size of the chunk + * @return The input text chunked + */ + public List chunk(String input, int maxNumberWordsPerChunk) { + var chunks = new ArrayList(); + + sentenceIterator.setText(input); + wordIterator.setText(input); + + int chunkStart = 0; + int chunkEnd = 0; + int sentenceStart = 0; + int chunkWordCount = 0; + + int boundary = sentenceIterator.next(); + + while (boundary != BreakIterator.DONE) { + int sentenceEnd = sentenceIterator.current(); + int countWordsInSentence = countWords(sentenceStart, sentenceEnd); + + if (chunkWordCount + countWordsInSentence > maxNumberWordsPerChunk) { + // over the max chunk size, roll back to the last sentence + + if (chunkWordCount > 0) { + // add a new chunk containing all the input up to this sentence + chunks.add(input.substring(chunkStart, chunkEnd)); + chunkStart = chunkEnd; + chunkWordCount = countWordsInSentence; // the next chunk will contain this sentence + } + + if (countWordsInSentence > maxNumberWordsPerChunk) { + // This sentence is bigger than the max chunk size. + // Split the sentence on the word boundary + var sentenceSplits = splitLongSentence( + input.substring(chunkStart, sentenceEnd), + maxNumberWordsPerChunk, + overlapForChunkSize(maxNumberWordsPerChunk) + ); + + int i = 0; + for (; i < sentenceSplits.size() - 1; i++) { + // Because the substring was passed to splitLongSentence() + // the returned positions need to be offset by chunkStart + chunks.add(input.substring(chunkStart + sentenceSplits.get(i).start(), chunkStart + sentenceSplits.get(i).end())); + } + // The final split is partially filled. + // Set the next chunk start to the beginning of the + // final split of the long sentence. + chunkStart = chunkStart + sentenceSplits.get(i).start(); // start pos needs to be offset by chunkStart + chunkWordCount = sentenceSplits.get(i).wordCount(); + } + } else { + chunkWordCount += countWordsInSentence; + } + + sentenceStart = sentenceEnd; + chunkEnd = sentenceEnd; + + boundary = sentenceIterator.next(); + } + + if (chunkWordCount > 0) { + chunks.add(input.substring(chunkStart)); + } + + return chunks; + } + + static List splitLongSentence(String text, int maxNumberOfWords, int overlap) { + return new WordBoundaryChunker().chunkPositions(text, maxNumberOfWords, overlap); + } + + private int countWords(int start, int end) { + return countWords(start, end, this.wordIterator); + } + + // Exposed for testing. wordIterator should have had + // setText() applied before using this function. + static int countWords(int start, int end, BreakIterator wordIterator) { + assert start < end; + wordIterator.preceding(start); // start of the current word + + int boundary = wordIterator.current(); + int wordCount = 0; + while (boundary != BreakIterator.DONE && boundary <= end) { + int wordStatus = wordIterator.getRuleStatus(); + if (wordStatus != BreakIterator.WORD_NONE) { + wordCount++; + } + boundary = wordIterator.next(); + } + + return wordCount; + } + + private static int overlapForChunkSize(int chunkSize) { + return (chunkSize - 1) / 2; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/WordBoundaryChunker.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunker.java similarity index 73% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/WordBoundaryChunker.java rename to x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunker.java index d3bb9154fd426..4233f917f8f80 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/WordBoundaryChunker.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunker.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.inference.common; +package org.elasticsearch.xpack.inference.chunking; import com.ibm.icu.text.BreakIterator; @@ -32,6 +32,8 @@ public WordBoundaryChunker() { wordIterator = BreakIterator.getWordInstance(Locale.ROOT); } + record ChunkPosition(int start, int end, int wordCount) {} + /** * Break the input text into small chunks as dictated * by the chunking parameters @@ -42,6 +44,29 @@ public WordBoundaryChunker() { * @return List of chunked text */ public List chunk(String input, int chunkSize, int overlap) { + + if (input.isEmpty()) { + return List.of(""); + } + + var chunkPositions = chunkPositions(input, chunkSize, overlap); + var chunks = new ArrayList(chunkPositions.size()); + for (var pos : chunkPositions) { + chunks.add(input.substring(pos.start, pos.end)); + } + return chunks; + } + + /** + * Chunk using the same strategy as {@link #chunk(String, int, int)} + * but return the chunk start and end offsets in the {@code input} string + * @param input Text to chunk + * @param chunkSize The number of words in each chunk + * @param overlap The number of words to overlap each chunk. + * Can be 0 but must be non-negative. + * @return List of chunked text positions + */ + List chunkPositions(String input, int chunkSize, int overlap) { if (overlap > 0 && overlap > chunkSize / 2) { throw new IllegalArgumentException( "Invalid chunking parameters, overlap [" @@ -59,10 +84,10 @@ public List chunk(String input, int chunkSize, int overlap) { } if (input.isEmpty()) { - return List.of(""); + return List.of(); } - var chunks = new ArrayList(); + var chunkPositions = new ArrayList(); // This position in the chunk is where the next overlapping chunk will start final int chunkSizeLessOverlap = chunkSize - overlap; @@ -81,7 +106,7 @@ public List chunk(String input, int chunkSize, int overlap) { wordsSinceStartWindowWasMarked++; if (wordsInChunkCountIncludingOverlap >= chunkSize) { - chunks.add(input.substring(windowStart, boundary)); + chunkPositions.add(new ChunkPosition(windowStart, boundary, wordsInChunkCountIncludingOverlap)); wordsInChunkCountIncludingOverlap = overlap; if (overlap == 0) { @@ -102,10 +127,10 @@ public List chunk(String input, int chunkSize, int overlap) { // Get the last chunk that was shorter than the required chunk size // if it ends on a boundary than the count should equal overlap in which case // we can ignore it, unless this is the first chunk in which case we want to add it - if (wordsInChunkCountIncludingOverlap > overlap || chunks.isEmpty()) { - chunks.add(input.substring(windowStart)); + if (wordsInChunkCountIncludingOverlap > overlap || chunkPositions.isEmpty()) { + chunkPositions.add(new ChunkPosition(windowStart, input.length(), wordsInChunkCountIncludingOverlap)); } - return chunks; + return chunkPositions; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java index 65c3db4093249..c4ef5faf8e667 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java @@ -24,7 +24,7 @@ import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.inference.common.EmbeddingRequestChunker; +import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.azureaistudio.AzureAiStudioActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java index 5c25ae62517dd..3facb78864831 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java @@ -28,7 +28,7 @@ import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; -import org.elasticsearch.xpack.inference.common.EmbeddingRequestChunker; +import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java index dec46817be7be..2feb1428c4508 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java @@ -23,7 +23,7 @@ import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.inference.common.EmbeddingRequestChunker; +import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.cohere.CohereActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java index 19d0a5fe0a317..911ccd33690d4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java @@ -23,7 +23,7 @@ import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.inference.common.EmbeddingRequestChunker; +import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.googleaistudio.GoogleAiStudioActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java index 9b2ef5847322c..f7a8055a90abb 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java @@ -23,7 +23,7 @@ import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.inference.common.EmbeddingRequestChunker; +import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.googlevertexai.GoogleVertexAiActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java index 6e311c39c787a..7a591f094982d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java @@ -20,7 +20,7 @@ import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.inference.common.EmbeddingRequestChunker; +import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.huggingface.HuggingFaceActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java index bcef31031cb0c..d85b2b095ba2c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java @@ -22,7 +22,7 @@ import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.inference.common.EmbeddingRequestChunker; +import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.mistral.MistralActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java index 8e25d4a8936ab..d2264ce5cd881 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java @@ -24,7 +24,7 @@ import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.inference.common.EmbeddingRequestChunker; +import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.openai.OpenAiActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunkerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunkerTests.java similarity index 99% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunkerTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunkerTests.java index facd8dfd9f3b1..cb89846b197fc 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/EmbeddingRequestChunkerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunkerTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.inference.common; +package org.elasticsearch.xpack.inference.chunking; import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.ChunkedInferenceServiceResults; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkerTests.java new file mode 100644 index 0000000000000..5bf282a07067a --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkerTests.java @@ -0,0 +1,173 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.chunking; + +import com.ibm.icu.text.BreakIterator; + +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; + +import java.util.Arrays; +import java.util.Locale; + +import static org.elasticsearch.xpack.inference.chunking.WordBoundaryChunkerTests.TEST_TEXT; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.lessThanOrEqualTo; + +public class SentenceBoundaryChunkerTests extends ESTestCase { + + public void testChunkSplitLargeChunkSizes() { + for (int maxWordsPerChunk : new int[] { 100, 200 }) { + var chunker = new SentenceBoundaryChunker(); + var chunks = chunker.chunk(TEST_TEXT, maxWordsPerChunk); + + int numChunks = expectedNumberOfChunks(sentenceSizes(TEST_TEXT), maxWordsPerChunk); + assertThat("words per chunk " + maxWordsPerChunk, chunks, hasSize(numChunks)); + + for (var chunk : chunks) { + assertTrue(Character.isUpperCase(chunk.charAt(0))); + var trailingWhiteSpaceRemoved = chunk.strip(); + var lastChar = trailingWhiteSpaceRemoved.charAt(trailingWhiteSpaceRemoved.length() - 1); + assertThat(lastChar, Matchers.is('.')); + } + } + } + + public void testChunk_ChunkSizeLargerThanText() { + int maxWordsPerChunk = 500; + var chunker = new SentenceBoundaryChunker(); + var chunks = chunker.chunk(TEST_TEXT, maxWordsPerChunk); + + assertEquals(chunks.get(0), TEST_TEXT); + } + + public void testChunkSplit_SentencesLongerThanChunkSize() { + var chunkSizes = new int[] { 10, 30, 50 }; + var expectedNumberOFChunks = new int[] { 21, 7, 4 }; + + for (int i = 0; i < chunkSizes.length; i++) { + int maxWordsPerChunk = chunkSizes[i]; + var chunker = new SentenceBoundaryChunker(); + var chunks = chunker.chunk(TEST_TEXT, maxWordsPerChunk); + + assertThat("words per chunk " + maxWordsPerChunk, chunks, hasSize(expectedNumberOFChunks[i])); + for (var chunk : chunks) { + // count whitespaced words + // strip out the '=' signs as they are not counted as words by ICU + var trimmed = chunk.trim().replace("=", ""); + // split by hyphen or whitespace to match the way + // the ICU break iterator counts words + var split = trimmed.split("[\\s\\-]+"); + int numWhiteSpacedWords = (int) Arrays.stream(split).filter(s -> s.isEmpty() == false).count(); + if (chunk.trim().endsWith(".")) { + // End of sentence, may be less than maxWordsPerChunk + assertThat(Arrays.toString(split), numWhiteSpacedWords, lessThanOrEqualTo(maxWordsPerChunk)); + } else { + // splitting inside a sentence so should have max words + assertEquals(Arrays.toString(split), maxWordsPerChunk, numWhiteSpacedWords); + } + } + } + } + + public void testCountWords() { + // Test word count matches the whitespace separated word count. + var splitByWhiteSpaceSentenceSizes = sentenceSizes(TEST_TEXT); + + var sentenceIterator = BreakIterator.getSentenceInstance(Locale.ROOT); + sentenceIterator.setText(TEST_TEXT); + + var wordIterator = BreakIterator.getWordInstance(Locale.ROOT); + wordIterator.setText(TEST_TEXT); + + int start = 0; + int end = sentenceIterator.next(); + assertEquals(splitByWhiteSpaceSentenceSizes[0], SentenceBoundaryChunker.countWords(start, end, wordIterator)); + start = end; + end = sentenceIterator.next(); + assertEquals(splitByWhiteSpaceSentenceSizes[1], SentenceBoundaryChunker.countWords(start, end, wordIterator)); + start = end; + end = sentenceIterator.next(); + assertEquals(splitByWhiteSpaceSentenceSizes[2], SentenceBoundaryChunker.countWords(start, end, wordIterator)); + start = end; + end = sentenceIterator.next(); + assertEquals(splitByWhiteSpaceSentenceSizes[3], SentenceBoundaryChunker.countWords(start, end, wordIterator)); + + assertEquals(BreakIterator.DONE, sentenceIterator.next()); + } + + public void testCountWords_short() { + // Test word count matches the whitespace separated word count. + var text = "This is a short sentence. Followed by another."; + + var sentenceIterator = BreakIterator.getSentenceInstance(Locale.ROOT); + sentenceIterator.setText(text); + + var wordIterator = BreakIterator.getWordInstance(Locale.ROOT); + wordIterator.setText(text); + + int start = 0; + int end = sentenceIterator.next(); + assertEquals(5, SentenceBoundaryChunker.countWords(0, end, wordIterator)); + start = end; + end = sentenceIterator.next(); + assertEquals(3, SentenceBoundaryChunker.countWords(start, end, wordIterator)); + assertEquals(BreakIterator.DONE, sentenceIterator.next()); + } + + public void testCountWords_WithSymbols() { + { + var text = "foo != bar"; + var wordIterator = BreakIterator.getWordInstance(Locale.ROOT); + wordIterator.setText(text); + // "foo", "bar" - "!=" is not counted + assertEquals(2, SentenceBoundaryChunker.countWords(0, text.length(), wordIterator)); + } + { + var text = "foo & bar"; + var wordIterator = BreakIterator.getWordInstance(Locale.ROOT); + wordIterator.setText(text); + // "foo", "bar" - the & is not counted + assertEquals(2, SentenceBoundaryChunker.countWords(0, text.length(), wordIterator)); + } + { + var text = "m&s"; + var wordIterator = BreakIterator.getWordInstance(Locale.ROOT); + wordIterator.setText(text); + // "m", "s" - the & is not counted + assertEquals(2, SentenceBoundaryChunker.countWords(0, text.length(), wordIterator)); + } + } + + private int[] sentenceSizes(String text) { + var sentences = text.split("\\.\\s+"); + var lengths = new int[sentences.length]; + for (int i = 0; i < sentences.length; i++) { + // strip out the '=' signs as they are not counted as words by ICU + sentences[i] = sentences[i].replace("=", ""); + // split by hyphen or whitespace to match the way + // the ICU break iterator counts words + lengths[i] = sentences[i].split("[ \\-]+").length; + } + return lengths; + } + + private int expectedNumberOfChunks(int[] sentenceLengths, int maxWordsPerChunk) { + int numChunks = 1; + int runningWordCount = 0; + for (int i = 0; i < sentenceLengths.length; i++) { + if (runningWordCount + sentenceLengths[i] > maxWordsPerChunk) { + numChunks++; + runningWordCount = sentenceLengths[i]; + } else { + runningWordCount += sentenceLengths[i]; + } + } + return numChunks; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/WordBoundaryChunkerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkerTests.java similarity index 87% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/WordBoundaryChunkerTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkerTests.java index 14cb63673e174..864d01507ca35 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/WordBoundaryChunkerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkerTests.java @@ -5,11 +5,14 @@ * 2.0. */ -package org.elasticsearch.xpack.inference.common; +package org.elasticsearch.xpack.inference.chunking; + +import com.ibm.icu.text.BreakIterator; import org.elasticsearch.test.ESTestCase; import java.util.List; +import java.util.Locale; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; @@ -17,15 +20,17 @@ public class WordBoundaryChunkerTests extends ESTestCase { - private final String TEST_TEXT = "Word segmentation is the problem of dividing a string of written language into its component words.\n" - + "In English and many other languages using some form of the Latin alphabet, the space is a good approximation of a word divider " - + "(word delimiter), although this concept has limits because of the variability with which languages emically regard collocations " - + "and compounds. Many English compound nouns are variably written (for example, ice box = ice-box = icebox; pig sty = pig-sty = " - + "pigsty) with a corresponding variation in whether speakers think of them as noun phrases or single nouns; there are trends in " - + "how norms are set, such as that open compounds often tend eventually to solidify by widespread convention, but variation remains" - + " systemic. In contrast, German compound nouns show less orthographic variation, with solidification being a stronger norm."; - - private final String[] MULTI_LINGUAL = new String[] { + @SuppressWarnings("checkstyle:linelength") + public static final String TEST_TEXT = + "Word segmentation is the problem of dividing a string of written language into its component words.\n" + + "In English and many other languages using some form of the Latin alphabet, the space is a good approximation of a word divider " + + "(word delimiter), although this concept has limits because of the variability with which languages emically regard collocations " + + "and compounds. Many English compound nouns are variably written (for example, ice box = ice-box = icebox; pig sty = pig-sty = " + + "pigsty) with a corresponding variation in whether speakers think of them as noun phrases or single nouns; there are trends in " + + "how norms are set, such as that open compounds often tend eventually to solidify by widespread convention, but variation remains" + + " systemic. In contrast, German compound nouns show less orthographic variation, with solidification being a stronger norm."; + + public static final String[] MULTI_LINGUAL = new String[] { "Građevne strukture Mesa Verde dokaz su akumuliranog znanja i vještina koje su se stoljećima prenosile generacijama civilizacije" + " Anasazi. Vrhunce svojih dosega ostvarili su u 12. i 13. stoljeću, kada su sagrađene danas najpoznatije građevine na " + "liticama. Zidali su obrađenim pješčenjakom, tvrđim kamenom oblikovanim do veličine štruce kruha. Kao žbuku između ciglā " @@ -48,6 +53,17 @@ public class WordBoundaryChunkerTests extends ESTestCase { + " خليفہ المومنين يا خليفہ المسلمين يا صحابی يا رضي الله عنه چئي۔ (ب) آنحضور ﷺ جي گھروارين کان علاوه ڪنھن کي ام المومنين " + "چئي۔ (ج) آنحضور ﷺ جي خاندان جي اھل بيت کان علاوہڍه ڪنھن کي اھل بيت چئي۔ (د) پنھنجي عبادت گاھ کي مسجد چئي۔" }; + public static int NUM_WORDS_IN_TEST_TEXT; + static { + var wordIterator = BreakIterator.getWordInstance(Locale.ROOT); + wordIterator.setText(TEST_TEXT); + int wordCount = 0; + while (wordIterator.next() != BreakIterator.DONE) { + wordCount++; + } + NUM_WORDS_IN_TEST_TEXT = wordCount; + } + public void testSingleSplit() { var chunker = new WordBoundaryChunker(); var chunks = chunker.chunk(TEST_TEXT, 10_000, 0); From b666c029e2045edab7e79e8db66cab673594f067 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 3 Jul 2024 01:10:31 +1000 Subject: [PATCH 115/216] Mute org.elasticsearch.search.aggregations.bucket.terms.RareTermsIT testSingleValuedString #110388 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 78f01713f7351..f91aa7c7173ec 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -109,6 +109,9 @@ tests: - class: org.elasticsearch.search.vectors.ExactKnnQueryBuilderTests method: testToQuery issue: https://github.com/elastic/elasticsearch/issues/110357 +- class: org.elasticsearch.search.aggregations.bucket.terms.RareTermsIT + method: testSingleValuedString + issue: https://github.com/elastic/elasticsearch/issues/110388 # Examples: # From 33d63fa9c2fc3afb9184f31e2681942cfd8dc7fd Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Tue, 2 Jul 2024 11:53:07 -0400 Subject: [PATCH 116/216] Tidy up some geoip code and fix a mistaken test (#110384) --- .../ingest/geoip/DatabaseNodeService.java | 21 +++++----- .../ingest/geoip/GeoIpDownloader.java | 22 +++++++---- .../geoip/GeoIpDownloaderTaskExecutor.java | 4 +- .../ingest/geoip/GeoIpTaskState.java | 18 +++++++++ .../ingest/geoip/GeoIpDownloaderTests.java | 39 +++++++++++-------- .../NodePersistentTasksExecutor.java | 1 - 6 files changed, 68 insertions(+), 37 deletions(-) diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java index 266d40f2f9d56..efae8fa0c50ca 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java @@ -64,7 +64,7 @@ import java.util.zip.GZIPInputStream; import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.persistent.PersistentTasksCustomMetadata.getTaskWithId; +import static org.elasticsearch.ingest.geoip.GeoIpTaskState.getGeoIpTaskState; /** * A component that is responsible for making the databases maintained by {@link GeoIpDownloader} @@ -179,11 +179,10 @@ public Boolean isValid(String databaseFile) { ClusterState currentState = clusterService.state(); assert currentState != null; - PersistentTasksCustomMetadata.PersistentTask task = getTaskWithId(currentState, GeoIpDownloader.GEOIP_DOWNLOADER); - if (task == null || task.getState() == null) { + GeoIpTaskState state = getGeoIpTaskState(currentState); + if (state == null) { return true; } - GeoIpTaskState state = (GeoIpTaskState) task.getState(); GeoIpTaskState.Metadata metadata = state.getDatabases().get(databaseFile); // we never remove metadata from cluster state, if metadata is null we deal with built-in database, which is always valid if (metadata == null) { @@ -270,12 +269,11 @@ void checkDatabases(ClusterState state) { } } - PersistentTasksCustomMetadata.PersistentTask task = PersistentTasksCustomMetadata.getTaskWithId( - state, - GeoIpDownloader.GEOIP_DOWNLOADER - ); - // Empty state will purge stale entries in databases map. - GeoIpTaskState taskState = task == null || task.getState() == null ? GeoIpTaskState.EMPTY : (GeoIpTaskState) task.getState(); + GeoIpTaskState taskState = getGeoIpTaskState(state); + if (taskState == null) { + // Note: an empty state will purge stale entries in databases map + taskState = GeoIpTaskState.EMPTY; + } taskState.getDatabases().entrySet().stream().filter(e -> e.getValue().isValid(state.getMetadata().settings())).forEach(e -> { String name = e.getKey(); @@ -291,7 +289,7 @@ void checkDatabases(ClusterState state) { try { retrieveAndUpdateDatabase(name, metadata); } catch (Exception ex) { - logger.error(() -> "attempt to download database [" + name + "] failed", ex); + logger.error(() -> "failed to retrieve database [" + name + "]", ex); } }); @@ -511,4 +509,5 @@ public Set getFilesInTemp() { public CacheStats getCacheStats() { return cache.getCacheStats(); } + } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java index 713e5111853a7..895c9315d2325 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java @@ -111,16 +111,25 @@ public class GeoIpDownloader extends AllocatedPersistentTask { Supplier atLeastOneGeoipProcessorSupplier ) { super(id, type, action, description, parentTask, headers); - this.httpClient = httpClient; this.client = client; + this.httpClient = httpClient; this.clusterService = clusterService; this.threadPool = threadPool; - endpoint = ENDPOINT_SETTING.get(settings); + this.endpoint = ENDPOINT_SETTING.get(settings); this.pollIntervalSupplier = pollIntervalSupplier; this.eagerDownloadSupplier = eagerDownloadSupplier; this.atLeastOneGeoipProcessorSupplier = atLeastOneGeoipProcessorSupplier; } + void setState(GeoIpTaskState state) { + // this is for injecting the state in GeoIpDownloaderTaskExecutor#nodeOperation just after the task instance has been created + // by the PersistentTasksNodeService -- since the GeoIpDownloader is newly created, the state will be null, and the passed-in + // state cannot be null + assert this.state == null; + assert state != null; + this.state = state; + } + // visible for testing void updateDatabases() throws IOException { var clusterState = clusterService.state(); @@ -171,7 +180,7 @@ void processDatabase(Map databaseInfo) { logger.debug("downloading geoip database [{}]", name); String url = databaseInfo.get("url").toString(); if (url.startsWith("http") == false) { - // relative url, add it after last slash (i.e resolve sibling) or at the end if there's no slash after http[s]:// + // relative url, add it after last slash (i.e. resolve sibling) or at the end if there's no slash after http[s]:// int lastSlash = endpoint.substring(8).lastIndexOf('/'); url = (lastSlash != -1 ? endpoint.substring(0, lastSlash + 8) : endpoint) + "/" + url; } @@ -264,14 +273,13 @@ static byte[] getChunk(InputStream is) throws IOException { return buf; } - void setState(GeoIpTaskState state) { - this.state = state; - } - /** * Downloads the geoip databases now, and schedules them to be downloaded again after pollInterval. */ void runDownloader() { + // by the time we reach here, the state will never be null + assert state != null; + if (isCancelled() || isCompleted()) { return; } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java index 0a423cb375e88..09ac488f96e2d 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java @@ -105,7 +105,7 @@ public final class GeoIpDownloaderTaskExecutor extends PersistentTasksExecutor task = getTaskWithId(state, GeoIpDownloader.GEOIP_DOWNLOADER); + return (task == null) ? null : (GeoIpTaskState) task.getState(); + } + } diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTests.java index d84e1aac303d9..9cc5405c1b617 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.ingest.geoip.stats.GeoIpDownloaderStats; import org.elasticsearch.node.Node; import org.elasticsearch.persistent.PersistentTaskState; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; @@ -276,9 +277,8 @@ public void testProcessDatabaseNew() throws IOException { () -> true ) { @Override - void updateTaskState() { - assertEquals(0, state.get("test").firstChunk()); - assertEquals(10, state.get("test").lastChunk()); + protected void updateTimestamp(String name, GeoIpTaskState.Metadata metadata) { + fail(); } @Override @@ -289,19 +289,22 @@ int indexChunks(String name, InputStream is, int chunk, String expectedMd5, long } @Override - protected void updateTimestamp(String name, GeoIpTaskState.Metadata metadata) { - fail(); + void updateTaskState() { + assertEquals(0, state.get("test.mmdb").firstChunk()); + assertEquals(10, state.get("test.mmdb").lastChunk()); } @Override void deleteOldChunks(String name, int firstChunk) { - assertEquals("test", name); + assertEquals("test.mmdb", name); assertEquals(0, firstChunk); } }; geoIpDownloader.setState(GeoIpTaskState.EMPTY); geoIpDownloader.processDatabase(Map.of("name", "test.tgz", "url", "http://a.b/t1", "md5_hash", "1")); + GeoIpDownloaderStats stats = geoIpDownloader.getStatus(); + assertEquals(0, stats.getFailedDownloads()); } public void testProcessDatabaseUpdate() throws IOException { @@ -325,9 +328,8 @@ public void testProcessDatabaseUpdate() throws IOException { () -> true ) { @Override - void updateTaskState() { - assertEquals(9, state.get("test.mmdb").firstChunk()); - assertEquals(10, state.get("test.mmdb").lastChunk()); + protected void updateTimestamp(String name, GeoIpTaskState.Metadata metadata) { + fail(); } @Override @@ -338,8 +340,9 @@ int indexChunks(String name, InputStream is, int chunk, String expectedMd5, long } @Override - protected void updateTimestamp(String name, GeoIpTaskState.Metadata metadata) { - fail(); + void updateTaskState() { + assertEquals(9, state.get("test.mmdb").firstChunk()); + assertEquals(10, state.get("test.mmdb").lastChunk()); } @Override @@ -351,6 +354,8 @@ void deleteOldChunks(String name, int firstChunk) { geoIpDownloader.setState(GeoIpTaskState.EMPTY.put("test.mmdb", new GeoIpTaskState.Metadata(0, 5, 8, "0", 0))); geoIpDownloader.processDatabase(Map.of("name", "test.tgz", "url", "http://a.b/t1", "md5_hash", "1")); + GeoIpDownloaderStats stats = geoIpDownloader.getStatus(); + assertEquals(0, stats.getFailedDownloads()); } public void testProcessDatabaseSame() throws IOException { @@ -376,8 +381,9 @@ public void testProcessDatabaseSame() throws IOException { () -> true ) { @Override - void updateTaskState() { - fail(); + protected void updateTimestamp(String name, GeoIpTaskState.Metadata newMetadata) { + assertEquals(metadata, newMetadata); + assertEquals("test.mmdb", name); } @Override @@ -387,9 +393,8 @@ int indexChunks(String name, InputStream is, int chunk, String expectedMd5, long } @Override - protected void updateTimestamp(String name, GeoIpTaskState.Metadata newMetadata) { - assertEquals(metadata, newMetadata); - assertEquals("test.mmdb", name); + void updateTaskState() { + fail(); } @Override @@ -399,6 +404,8 @@ void deleteOldChunks(String name, int firstChunk) { }; geoIpDownloader.setState(taskState); geoIpDownloader.processDatabase(Map.of("name", "test.tgz", "url", "http://a.b/t1", "md5_hash", "1")); + GeoIpDownloaderStats stats = geoIpDownloader.getStatus(); + assertEquals(0, stats.getFailedDownloads()); } @SuppressWarnings("unchecked") diff --git a/server/src/main/java/org/elasticsearch/persistent/NodePersistentTasksExecutor.java b/server/src/main/java/org/elasticsearch/persistent/NodePersistentTasksExecutor.java index a75c4b3352475..396b5bd5c59a2 100644 --- a/server/src/main/java/org/elasticsearch/persistent/NodePersistentTasksExecutor.java +++ b/server/src/main/java/org/elasticsearch/persistent/NodePersistentTasksExecutor.java @@ -35,7 +35,6 @@ protected void doRun() throws Exception { } catch (Exception ex) { task.markAsFailed(ex); } - } }); } From 566f5f831afc9189733d09c0013ed70c8740343d Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 2 Jul 2024 18:59:11 +0300 Subject: [PATCH 117/216] Query Roles API (#108733) This adds the Query Roles API: ``` POST /_security/_query/role GET /_security/_query/role ``` This is similar to the currently existing: * [Query API key API](https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-query-api-key.html) * [Query User API](https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-query-user.html) Sample request: ``` POST /_security/_query/role { "query": { "bool": { "filter": [ { "terms": { "applications.application": ["app-1", "app-2" ] } } ], "must_not": [ { "match": { "description": { "query": "test match on role description (which is mapped as a text field)" } } } ] } }, "sort": [ "name" ], "search_after": [ "role-name-1" ] } ``` The query supports a subset of query types, including match_all, bool, term, terms, match, ids, prefix, wildcard, exists, range, and simple query string. Currently, the supported fields are: * name * description * metadata * applications.application * applications.resources * applications.privileges The query also supports pagination-related fields (`from`, `size`, `search_after`), analogous to the generic [Search API](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-search.html). The response format is similar to that of the [Query API key](https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-query-api-key.html) and [Query User](https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-query-user.html) APIs. It contains a **list** of roles, in the sorted order (if specified). Unlike the [Get Roles API](https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-role.html), the role **name** is an attribute of the element in the list of roles (in the get-roles API case, the role name was the key in the response map, and the value was the rest of the role descriptor). In addition, the element in the list of roles also contains the optional `_sort` field, eg (sample response): ``` { "total": 3, "count": 3, "roles": [ { "name": "LYdz2", "cluster": [], "indices": [], "applications": [ { "application": "ejYWvGQTF", "privileges": [ "pRCfBMgOy", "zDhFtMQfc", "roudxado" ], "resources": [ "nWHEpmgxy", "SOML/hMYrqx", "YIqP/*", "ueEomwsA" ] }, { "application": "ampUW9", "privileges": [ "jDvRtp" ], "resources": [ "99" ] } ], "run_as": [], "metadata": { "nFKc": [ 1, 0 ], "PExF": [], "qlqY": -433239865, "IQXm": [] }, "transient_metadata": { "enabled": true }, "description": "KoLlsEbq", "_sort": [ "LYdz2" ] }, { "name": "oaxW0", "cluster": [], "indices": [], "applications": [ { "application": "*", "privileges": [ "qZYb" ], "resources": [ "tFrSULaKb" ] }, { "application": "aLaEN9", "privileges": [ "fCOc" ], "resources": [ "gozqXtSgE", "UX/JgydeIM", "sjUp", "Ivdz/UAmuNrQAG" ] }, { "application": "rbxyuKIMPAp", "privileges": [ "lluqieFRu", "xKU", "gHlb" ], "resources": [ "99" ] } ], "run_as": [], "metadata": {}, "transient_metadata": { "enabled": true }, "_sort": [ "oaxW0" ] }, { "name": "vWAV1", "cluster": [], "indices": [], "applications": [ { "application": "*", "privileges": [ "kWBWjCAc" ], "resources": [ "hvEtV", "gZJ" ] }, { "application": "avVUV9", "privileges": [ "newZTa", "gQpxNm" ], "resources": [ "99" ] } ], "run_as": [], "metadata": {}, "transient_metadata": { "enabled": true }, "_sort": [ "vWAV1" ] } ] } ``` --- docs/changelog/108733.yaml | 5 + .../api/security.query_role.json | 33 + .../core/security/action/ActionTypes.java | 3 +- .../action/role/QueryRoleRequest.java | 87 +++ .../action/role/QueryRoleResponse.java | 106 +++ .../core/security/authz/RoleDescriptor.java | 12 +- .../privilege/ClusterPrivilegeResolver.java | 1 + .../action/role/QueryRoleRequestTests.java | 44 ++ .../authz/privilege/PrivilegeTests.java | 2 + .../xpack/security/operator/Constants.java | 1 + .../xpack/security/LicenseDLSFLSRoleIT.java | 196 ++++++ .../xpack/security/QueryRoleIT.java | 653 ++++++++++++++++++ .../xpack/security/QueryUserIT.java | 10 +- .../security/SecurityInBasicRestTestCase.java | 7 +- .../src/javaRestTest/resources/roles.yml | 4 +- .../xpack/security/Security.java | 4 + .../action/role/TransportQueryRoleAction.java | 98 +++ .../authz/store/NativeRolesStore.java | 57 +- .../rest/action/role/RestQueryRoleAction.java | 106 +++ .../support/FieldNameTranslators.java | 18 +- .../support/RoleBoolQueryBuilder.java | 75 ++ .../support/SecurityIndexManager.java | 8 + .../security/support/SecurityMigrations.java | 99 +-- .../rest-api-spec/test/roles/10_basic.yml | 27 + .../test/roles/50_remote_only.yml | 16 + .../upgrades/AbstractUpgradeTestCase.java | 1 + ...SecurityIndexRolesMetadataMigrationIT.java | 158 +++-- 27 files changed, 1725 insertions(+), 106 deletions(-) create mode 100644 docs/changelog/108733.yaml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/security.query_role.json create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/QueryRoleRequest.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/QueryRoleResponse.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/QueryRoleRequestTests.java create mode 100644 x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/LicenseDLSFLSRoleIT.java create mode 100644 x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryRoleIT.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportQueryRoleAction.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestQueryRoleAction.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/RoleBoolQueryBuilder.java diff --git a/docs/changelog/108733.yaml b/docs/changelog/108733.yaml new file mode 100644 index 0000000000000..76a969219ea4c --- /dev/null +++ b/docs/changelog/108733.yaml @@ -0,0 +1,5 @@ +pr: 108733 +summary: Query Roles API +area: Security +type: feature +issues: [] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_role.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_role.json new file mode 100644 index 0000000000000..d9f9d9f45ff69 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_role.json @@ -0,0 +1,33 @@ +{ + "security.query_role": { + "documentation": { + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-query-role.html", + "description": "Retrieves information for Roles using a subset of query DSL" + }, + "stability": "stable", + "visibility": "public", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_security/_query/role", + "methods": [ + "GET", + "POST" + ] + } + ] + }, + "body": { + "description": "From, size, query, sort and search_after", + "required": false + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/ActionTypes.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/ActionTypes.java index 43e914f873a83..5406ecb105d0e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/ActionTypes.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/ActionTypes.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.xpack.core.security.action.role.BulkPutRolesResponse; +import org.elasticsearch.xpack.core.security.action.role.QueryRoleResponse; import org.elasticsearch.xpack.core.security.action.user.QueryUserResponse; /** @@ -24,6 +25,6 @@ public final class ActionTypes { ); public static final ActionType QUERY_USER_ACTION = new ActionType<>("cluster:admin/xpack/security/user/query"); - + public static final ActionType QUERY_ROLE_ACTION = new ActionType<>("cluster:admin/xpack/security/role/query"); public static final ActionType BULK_PUT_ROLES = new ActionType<>("cluster:admin/xpack/security/role/bulk_put"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/QueryRoleRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/QueryRoleRequest.java new file mode 100644 index 0000000000000..c61f9b7156dda --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/QueryRoleRequest.java @@ -0,0 +1,87 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.action.role; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.search.searchafter.SearchAfterBuilder; +import org.elasticsearch.search.sort.FieldSortBuilder; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +public final class QueryRoleRequest extends ActionRequest { + + @Nullable + private final QueryBuilder queryBuilder; + @Nullable + private final Integer from; + @Nullable + private final Integer size; + @Nullable + private final List fieldSortBuilders; + @Nullable + private final SearchAfterBuilder searchAfterBuilder; + + public QueryRoleRequest( + @Nullable QueryBuilder queryBuilder, + @Nullable Integer from, + @Nullable Integer size, + @Nullable List fieldSortBuilders, + @Nullable SearchAfterBuilder searchAfterBuilder + ) { + this.queryBuilder = queryBuilder; + this.from = from; + this.size = size; + this.fieldSortBuilders = fieldSortBuilders; + this.searchAfterBuilder = searchAfterBuilder; + } + + public QueryBuilder getQueryBuilder() { + return queryBuilder; + } + + public Integer getFrom() { + return from; + } + + public Integer getSize() { + return size; + } + + public List getFieldSortBuilders() { + return fieldSortBuilders; + } + + public SearchAfterBuilder getSearchAfterBuilder() { + return searchAfterBuilder; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (from != null && from < 0) { + validationException = addValidationError("[from] parameter cannot be negative but was [" + from + "]", validationException); + } + if (size != null && size < 0) { + validationException = addValidationError("[size] parameter cannot be negative but was [" + size + "]", validationException); + } + return validationException; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + TransportAction.localOnly(); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/QueryRoleResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/QueryRoleResponse.java new file mode 100644 index 0000000000000..6bdc6c66c1835 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/QueryRoleResponse.java @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.action.role; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +public final class QueryRoleResponse extends ActionResponse implements ToXContentObject { + + public static final QueryRoleResponse EMPTY = new QueryRoleResponse(0, List.of()); + + private final long total; + private final List foundRoleDescriptors; + + public QueryRoleResponse(long total, List foundRoleDescriptors) { + this.total = total; + Objects.requireNonNull(foundRoleDescriptors, "found_role_descriptor must be provided"); + this.foundRoleDescriptors = foundRoleDescriptors; + } + + public long getTotal() { + return total; + } + + public List getRoleDescriptors() { + return foundRoleDescriptors; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("total", total).field("count", foundRoleDescriptors.size()).field("roles", foundRoleDescriptors); + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + TransportAction.localOnly(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QueryRoleResponse that = (QueryRoleResponse) o; + return total == that.total && Objects.equals(foundRoleDescriptors, that.foundRoleDescriptors); + } + + @Override + public int hashCode() { + int result = Objects.hash(total); + result = 31 * result + Objects.hash(foundRoleDescriptors); + return result; + } + + @Override + public String toString() { + return "QueryRoleResponse{total=" + total + ", items=" + foundRoleDescriptors + "}"; + } + + public record Item(RoleDescriptor roleDescriptor, @Nullable Object[] sortValues) implements ToXContentObject { + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + // The role name is not normally stored in the role document (it is part of the doc id), + // so the "toXContent" method doesn't include it. + // But, for the query role API, we'd like to return the role name together with the + // other details of the role descriptor (in the same object). + assert Strings.isNullOrEmpty(roleDescriptor.getName()) == false; + builder.field("name", roleDescriptor.getName()); + roleDescriptor.innerToXContent(builder, params, false, false); + if (sortValues != null && sortValues.length > 0) { + builder.array("_sort", sortValues); + } + builder.endObject(); + return builder; + } + + @Override + public String toString() { + return "Item{roleDescriptor=" + roleDescriptor + ", sortValues=" + Arrays.toString(sortValues) + "}"; + } + } + + public record QueryRoleResult(long total, List items) { + public static final QueryRoleResult EMPTY = new QueryRoleResult(0, List.of()); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java index 08e774006ad32..7bedab61bd43d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java @@ -420,6 +420,13 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params, boolea return toXContent(builder, params, docCreation, false); } + public XContentBuilder toXContent(XContentBuilder builder, Params params, boolean docCreation, boolean includeMetadataFlattened) + throws IOException { + builder.startObject(); + innerToXContent(builder, params, docCreation, includeMetadataFlattened); + return builder.endObject(); + } + /** * Generates x-content for this {@link RoleDescriptor} instance. * @@ -432,9 +439,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params, boolea * @return x-content builder * @throws IOException if there was an error writing the x-content to the builder */ - public XContentBuilder toXContent(XContentBuilder builder, Params params, boolean docCreation, boolean includeMetadataFlattened) + public XContentBuilder innerToXContent(XContentBuilder builder, Params params, boolean docCreation, boolean includeMetadataFlattened) throws IOException { - builder.startObject(); builder.array(Fields.CLUSTER.getPreferredName(), clusterPrivileges); if (configurableClusterPrivileges.length != 0) { builder.field(Fields.GLOBAL.getPreferredName()); @@ -466,7 +472,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params, boolea if (hasDescription()) { builder.field(Fields.DESCRIPTION.getPreferredName(), description); } - return builder.endObject(); + return builder; } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java index 1cbe6c739a75f..a435f7e877250 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java @@ -280,6 +280,7 @@ public class ClusterPrivilegeResolver { ProfileHasPrivilegesAction.NAME, SuggestProfilesAction.NAME, GetRolesAction.NAME, + ActionTypes.QUERY_ROLE_ACTION.name(), GetRoleMappingsAction.NAME, GetServiceAccountAction.NAME, GetServiceAccountCredentialsAction.NAME + "*", diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/QueryRoleRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/QueryRoleRequestTests.java new file mode 100644 index 0000000000000..832e35a8dce32 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/QueryRoleRequestTests.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.action.role; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.nullValue; + +public class QueryRoleRequestTests extends ESTestCase { + public void testValidate() { + final QueryRoleRequest request1 = new QueryRoleRequest( + null, + randomIntBetween(0, Integer.MAX_VALUE), + randomIntBetween(0, Integer.MAX_VALUE), + null, + null + ); + assertThat(request1.validate(), nullValue()); + + final QueryRoleRequest request2 = new QueryRoleRequest( + null, + randomIntBetween(Integer.MIN_VALUE, -1), + randomIntBetween(0, Integer.MAX_VALUE), + null, + null + ); + assertThat(request2.validate().getMessage(), containsString("[from] parameter cannot be negative")); + + final QueryRoleRequest request3 = new QueryRoleRequest( + null, + randomIntBetween(0, Integer.MAX_VALUE), + randomIntBetween(Integer.MIN_VALUE, -1), + null, + null + ); + assertThat(request3.validate().getMessage(), containsString("[size] parameter cannot be negative")); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java index 54af9d947a9e8..6f3c435eb12f6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java @@ -278,6 +278,7 @@ public void testReadSecurityPrivilege() { ProfileHasPrivilegesAction.NAME, SuggestProfilesAction.NAME, GetRolesAction.NAME, + ActionTypes.QUERY_ROLE_ACTION.name(), GetRoleMappingsAction.NAME, GetServiceAccountAction.NAME, GetServiceAccountCredentialsAction.NAME, @@ -340,6 +341,7 @@ public void testManageUserProfilePrivilege() { ClusterPrivilegeResolver.MANAGE_USER_PROFILE, "cluster:admin/xpack/security/role/put", "cluster:admin/xpack/security/role/get", + "cluster:admin/xpack/security/role/query", "cluster:admin/xpack/security/role/delete" ); verifyClusterActionDenied( diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 092c8e6ccf391..a85be132ebca8 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -278,6 +278,7 @@ public class Constants { "cluster:admin/xpack/security/remote_cluster_credentials/reload", "cluster:admin/xpack/security/role/delete", "cluster:admin/xpack/security/role/get", + "cluster:admin/xpack/security/role/query", "cluster:admin/xpack/security/role/put", "cluster:admin/xpack/security/role/bulk_put", "cluster:admin/xpack/security/role_mapping/delete", diff --git a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/LicenseDLSFLSRoleIT.java b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/LicenseDLSFLSRoleIT.java new file mode 100644 index 0000000000000..f81bab4866bdf --- /dev/null +++ b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/LicenseDLSFLSRoleIT.java @@ -0,0 +1,196 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.local.model.User; +import org.elasticsearch.test.cluster.util.resource.Resource; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; +import org.junit.ClassRule; + +import java.io.IOException; +import java.util.Comparator; +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.xpack.security.QueryRoleIT.assertQuery; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.iterableWithSize; + +/** + * This class tests that roles with DLS and FLS are disabled when queried when the license doesn't allow such features. + */ +public final class LicenseDLSFLSRoleIT extends ESRestTestCase { + + protected static final String REST_USER = "security_test_user"; + private static final SecureString REST_PASSWORD = new SecureString("security-test-password".toCharArray()); + private static final String ADMIN_USER = "admin_user"; + private static final SecureString ADMIN_PASSWORD = new SecureString("admin-password".toCharArray()); + protected static final String READ_SECURITY_USER = "read_security_user"; + private static final SecureString READ_SECURITY_PASSWORD = new SecureString("read-security-password".toCharArray()); + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .nodes(1) + .distribution(DistributionType.DEFAULT) + // start as "trial" + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "true") + .setting("xpack.security.http.ssl.enabled", "false") + .setting("xpack.security.transport.ssl.enabled", "false") + .rolesFile(Resource.fromClasspath("roles.yml")) + .user(ADMIN_USER, ADMIN_PASSWORD.toString(), User.ROOT_USER_ROLE, true) + .user(REST_USER, REST_PASSWORD.toString(), "security_test_role", false) + .user(READ_SECURITY_USER, READ_SECURITY_PASSWORD.toString(), "read_security_user_role", false) + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Override + protected Settings restAdminSettings() { + String token = basicAuthHeaderValue(ADMIN_USER, ADMIN_PASSWORD); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + @Override + protected Settings restClientSettings() { + String token = basicAuthHeaderValue(REST_USER, REST_PASSWORD); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + @SuppressWarnings("unchecked") + public void testQueryDLSFLSRolesShowAsDisabled() throws Exception { + // auto-generated "trial" + waitForLicense(adminClient(), "trial"); + // neither DLS nor FLS role + { + RoleDescriptor.IndicesPrivileges[] indicesPrivileges = new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("no-dls-nor-fls*").privileges("read").build() }; + createRoleWithIndicesPrivileges(adminClient(), "role_with_neither", indicesPrivileges); + } + // role with DLS + { + RoleDescriptor.IndicesPrivileges[] indicesPrivileges = new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("*").privileges("read").query("{\"match_all\":{}}").build() }; + createRoleWithIndicesPrivileges(adminClient(), "role_with_DLS", indicesPrivileges); + } + // role with FLS + { + RoleDescriptor.IndicesPrivileges[] indicesPrivileges = new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder() + .indices("*") + .privileges("read") + .grantedFields("granted_field1", "granted*") + .build() }; + createRoleWithIndicesPrivileges(adminClient(), "role_with_FLS", indicesPrivileges); + } + // role with DLS and FLS + { + RoleDescriptor.IndicesPrivileges[] indicesPrivileges = new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder() + .indices("*") + .privileges("read") + .grantedFields("granted_field1", "granted*") + .build(), + RoleDescriptor.IndicesPrivileges.builder() + .indices("*") + .privileges("read") + .query("{\"match\": {\"category\": \"click\"}}") + .build() }; + createRoleWithIndicesPrivileges(adminClient(), "role_with_FLS_and_DLS", indicesPrivileges); + } + assertQuery(client(), "", 4, roles -> { + roles.sort(Comparator.comparing(o -> ((String) o.get("name")))); + assertThat(roles, iterableWithSize(4)); + assertThat(roles.get(0).get("name"), equalTo("role_with_DLS")); + assertRoleEnabled(roles.get(0), true); + assertThat(roles.get(1).get("name"), equalTo("role_with_FLS")); + assertRoleEnabled(roles.get(1), true); + assertThat(roles.get(2).get("name"), equalTo("role_with_FLS_and_DLS")); + assertRoleEnabled(roles.get(2), true); + assertThat(roles.get(3).get("name"), equalTo("role_with_neither")); + assertRoleEnabled(roles.get(3), true); + }); + // start "basic" license + Request request = new Request("POST", "/_license/start_basic?acknowledge=true"); + Response response = adminClient().performRequest(request); + assertOK(response); + Map responseMap = responseAsMap(response); + assertTrue(((Boolean) responseMap.get("basic_was_started"))); + assertTrue(((Boolean) responseMap.get("acknowledged"))); + waitForLicense(adminClient(), "basic"); + // now the same roles show up as disabled ("enabled" is "false") + assertQuery(client(), "", 4, roles -> { + roles.sort(Comparator.comparing(o -> ((String) o.get("name")))); + assertThat(roles, iterableWithSize(4)); + assertThat(roles.get(0).get("name"), equalTo("role_with_DLS")); + assertRoleEnabled(roles.get(0), false); + assertThat(roles.get(1).get("name"), equalTo("role_with_FLS")); + assertRoleEnabled(roles.get(1), false); + assertThat(roles.get(2).get("name"), equalTo("role_with_FLS_and_DLS")); + assertRoleEnabled(roles.get(2), false); + // role with neither DLS nor FLS is still enabled + assertThat(roles.get(3).get("name"), equalTo("role_with_neither")); + assertRoleEnabled(roles.get(3), true); + }); + } + + @SuppressWarnings("unchecked") + private void createRoleWithIndicesPrivileges(RestClient adminClient, String name, RoleDescriptor.IndicesPrivileges[] indicesPrivileges) + throws IOException { + Request request = new Request("POST", "/_security/role/" + name); + Map requestMap = new HashMap<>(); + requestMap.put(RoleDescriptor.Fields.INDICES.getPreferredName(), indicesPrivileges); + BytesReference source = BytesReference.bytes(jsonBuilder().map(requestMap)); + request.setJsonEntity(source.utf8ToString()); + Response response = adminClient.performRequest(request); + assertOK(response); + Map responseMap = responseAsMap(response); + assertTrue((Boolean) ((Map) responseMap.get("role")).get("created")); + } + + @SuppressWarnings("unchecked") + private static void assertRoleEnabled(Map roleMap, boolean enabled) { + assertTrue(roleMap.containsKey("transient_metadata")); + assertThat(roleMap.get("transient_metadata"), instanceOf(Map.class)); + assertThat(((Map) roleMap.get("transient_metadata")).get("enabled"), equalTo(enabled)); + } + + @SuppressWarnings("unchecked") + private static void waitForLicense(RestClient adminClient, String type) throws Exception { + final Request request = new Request("GET", "_license"); + assertBusy(() -> { + Response response; + try { + response = adminClient.performRequest(request); + } catch (ResponseException e) { + throw new AssertionError("license not yet installed", e); + } + assertOK(response); + Map responseMap = responseAsMap(response); + assertTrue(responseMap.containsKey("license")); + assertThat(((Map) responseMap.get("license")).get("status"), equalTo("active")); + assertThat(((Map) responseMap.get("license")).get("type"), equalTo(type)); + }); + } +} diff --git a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryRoleIT.java b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryRoleIT.java new file mode 100644 index 0000000000000..1588749b9a331 --- /dev/null +++ b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryRoleIT.java @@ -0,0 +1,653 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security; + +import org.apache.http.HttpHeaders; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ApplicationResourcePrivileges; +import org.elasticsearch.xpack.security.support.SecurityMigrations; +import org.hamcrest.Matchers; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Comparator; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; +import java.util.function.Supplier; + +import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.xpack.core.security.action.UpdateIndexMigrationVersionAction.MIGRATION_VERSION_CUSTOM_DATA_KEY; +import static org.elasticsearch.xpack.core.security.action.UpdateIndexMigrationVersionAction.MIGRATION_VERSION_CUSTOM_KEY; +import static org.elasticsearch.xpack.core.security.test.TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7; +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.Matchers.emptyIterable; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.iterableWithSize; + +public final class QueryRoleIT extends SecurityInBasicRestTestCase { + + private static final String READ_SECURITY_USER_AUTH_HEADER = "Basic cmVhZF9zZWN1cml0eV91c2VyOnJlYWQtc2VjdXJpdHktcGFzc3dvcmQ="; + + public void testSimpleQueryAllRoles() throws IOException { + assertQuery("", 0, roles -> assertThat(roles, emptyIterable())); + RoleDescriptor createdRole = createRandomRole(); + assertQuery("", 1, roles -> { + assertThat(roles, iterableWithSize(1)); + assertRoleMap(roles.get(0), createdRole); + }); + assertQuery(""" + {"query":{"match_all":{}},"from":1}""", 1, roles -> assertThat(roles, emptyIterable())); + } + + public void testDisallowedFields() throws Exception { + if (randomBoolean()) { + createRandomRole(); + } + // query on some disallowed field + { + Request request = new Request(randomFrom("POST", "GET"), "/_security/_query/role"); + request.setOptions(request.getOptions().toBuilder().addHeader(HttpHeaders.AUTHORIZATION, READ_SECURITY_USER_AUTH_HEADER)); + request.setJsonEntity(""" + {"query":{"prefix":{"password":"whatever"}}}"""); + ResponseException e = expectThrows(ResponseException.class, () -> client().performRequest(request)); + assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(400)); + assertThat(e.getMessage(), containsString("Field [password] is not allowed for querying or aggregation")); + } + // query on the _id field + { + Request request = new Request(randomFrom("POST", "GET"), "/_security/_query/role"); + request.setOptions(request.getOptions().toBuilder().addHeader(HttpHeaders.AUTHORIZATION, READ_SECURITY_USER_AUTH_HEADER)); + request.setJsonEntity(""" + {"query":{"term":{"_id":"role-test"}}}"""); + ResponseException e = expectThrows(ResponseException.class, () -> client().performRequest(request)); + assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(400)); + assertThat(e.getMessage(), containsString("Field [_id] is not allowed for querying or aggregation")); + } + // sort on disallowed field + { + Request request = new Request(randomFrom("POST", "GET"), "/_security/_query/role"); + request.setOptions(request.getOptions().toBuilder().addHeader(HttpHeaders.AUTHORIZATION, READ_SECURITY_USER_AUTH_HEADER)); + request.setJsonEntity(""" + {"query":{"bool":{"must_not":[{"wildcard":{"applications.application":"a*9"}}]}},"sort":["api_key_hash"]}"""); + ResponseException e = expectThrows(ResponseException.class, () -> client().performRequest(request)); + assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(400)); + assertThat(e.getMessage(), containsString("Field [api_key_hash] is not allowed for querying or aggregation")); + } + } + + public void testDisallowedQueryType() throws Exception { + if (randomBoolean()) { + createRandomRole(); + } + // query using some disallowed query type + { + Request request = new Request(randomFrom("POST", "GET"), "/_security/_query/role"); + request.setOptions(request.getOptions().toBuilder().addHeader(HttpHeaders.AUTHORIZATION, READ_SECURITY_USER_AUTH_HEADER)); + request.setJsonEntity(""" + {"query":{"match_phrase":{"description":{"query":"whatever"}}}}"""); + ResponseException e = expectThrows(ResponseException.class, () -> client().performRequest(request)); + assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(400)); + assertThat(e.getMessage(), containsString("Query type [match_phrase] is not currently supported in this context")); + } + // query using some disallowed query type inside the (allowed) boolean query type + { + Request request = new Request(randomFrom("POST", "GET"), "/_security/_query/role"); + request.setOptions(request.getOptions().toBuilder().addHeader(HttpHeaders.AUTHORIZATION, READ_SECURITY_USER_AUTH_HEADER)); + request.setJsonEntity(""" + {"query":{"bool":{"must_not":[{"more_like_this":{"fields":["description"],"like":"hollywood"}}]}}}"""); + ResponseException e = expectThrows(ResponseException.class, () -> client().performRequest(request)); + assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(400)); + assertThat(e.getMessage(), containsString("Query type [more_like_this] is not currently supported in this context")); + } + } + + public void testSimpleMetadataSearch() throws Exception { + int nroles = randomIntBetween(1, 3); + for (int i = 0; i < nroles; i++) { + createRandomRole(); + } + RoleDescriptor matchesOnMetadataValue = createRole( + "matchesOnMetadataValue", + randomBoolean() ? null : randomAlphaOfLength(8), + Map.of("matchSimpleKey", "matchSimpleValue"), + randomApplicationPrivileges() + ); + RoleDescriptor matchesOnMetadataKey = createRole( + "matchesOnMetadataKey", + randomBoolean() ? null : randomAlphaOfLength(8), + Map.of("matchSimpleKey", "other"), + randomApplicationPrivileges() + ); + createRole( + "other2", + randomBoolean() ? null : randomAlphaOfLength(8), + Map.of("other", "matchSimpleValue"), + randomApplicationPrivileges() + ); + waitForMigrationCompletion(adminClient(), SecurityMigrations.ROLE_METADATA_FLATTENED_MIGRATION_VERSION); + assertQuery(""" + {"query":{"term":{"metadata.matchSimpleKey":"matchSimpleValue"}}}""", 1, roles -> { + assertThat(roles, iterableWithSize(1)); + assertRoleMap(roles.get(0), matchesOnMetadataValue); + }); + assertQuery(""" + {"query":{"exists":{"field":"metadata.matchSimpleKey"}}}""", 2, roles -> { + assertThat(roles, iterableWithSize(2)); + roles.sort(Comparator.comparing(o -> ((String) o.get("name")))); + assertRoleMap(roles.get(0), matchesOnMetadataKey); + assertRoleMap(roles.get(1), matchesOnMetadataValue); + }); + } + + public void testSearchMultipleMetadataFields() throws Exception { + createRole( + "noMetadataRole", + randomBoolean() ? null : randomAlphaOfLength(8), + randomBoolean() ? null : Map.of(), + randomApplicationPrivileges() + ); + RoleDescriptor role1 = createRole( + "1" + randomAlphaOfLength(4), + randomBoolean() ? null : randomAlphaOfLength(8), + Map.of("simpleField1", "matchThis", "simpleField2", "butNotThis"), + randomApplicationPrivileges() + ); + createRole( + "2" + randomAlphaOfLength(4), + randomBoolean() ? null : randomAlphaOfLength(8), + Map.of("simpleField2", "butNotThis"), + randomApplicationPrivileges() + ); + RoleDescriptor role3 = createRole( + "3" + randomAlphaOfLength(4), + randomBoolean() ? null : randomAlphaOfLength(8), + Map.of("listField1", List.of("matchThis", "butNotThis"), "listField2", List.of("butNotThisToo")), + randomApplicationPrivileges() + ); + createRole( + "4" + randomAlphaOfLength(4), + randomBoolean() ? null : randomAlphaOfLength(8), + Map.of("listField2", List.of("butNotThisToo", "andAlsoNotThis")), + randomApplicationPrivileges() + ); + RoleDescriptor role5 = createRole( + "5" + randomAlphaOfLength(4), + randomBoolean() ? null : randomAlphaOfLength(8), + Map.of("listField1", List.of("maybeThis", List.of("matchThis")), "listField2", List.of("butNotThis")), + randomApplicationPrivileges() + ); + RoleDescriptor role6 = createRole( + "6" + randomAlphaOfLength(4), + randomBoolean() ? null : randomAlphaOfLength(8), + Map.of("mapField1", Map.of("innerField", "matchThis")), + randomApplicationPrivileges() + ); + createRole( + "7" + randomAlphaOfLength(4), + randomBoolean() ? null : randomAlphaOfLength(8), + Map.of("mapField1", Map.of("innerField", "butNotThis")), + randomApplicationPrivileges() + ); + RoleDescriptor role8 = createRole( + "8" + randomAlphaOfLength(4), + randomBoolean() ? null : randomAlphaOfLength(8), + Map.of("mapField1", Map.of("innerField", "butNotThis", "innerField2", Map.of("deeperInnerField", "matchThis"))), + randomApplicationPrivileges() + ); + waitForMigrationCompletion(adminClient(), SecurityMigrations.ROLE_METADATA_FLATTENED_MIGRATION_VERSION); + Consumer>> matcher = roles -> { + assertThat(roles, iterableWithSize(5)); + roles.sort(Comparator.comparing(o -> ((String) o.get("name")))); + assertRoleMap(roles.get(0), role1); + assertRoleMap(roles.get(1), role3); + assertRoleMap(roles.get(2), role5); + assertRoleMap(roles.get(3), role6); + assertRoleMap(roles.get(4), role8); + }; + assertQuery(""" + {"query":{"prefix":{"metadata":"match"}}}""", 5, matcher); + assertQuery(""" + {"query":{"simple_query_string":{"fields":["meta*"],"query":"matchThis"}}}""", 5, matcher); + } + + @SuppressWarnings("unchecked") + public void testSimpleSort() throws IOException { + // some other non-matching roles + int nOtherRoles = randomIntBetween(1, 5); + for (int i = 0; i < nOtherRoles; i++) { + createRandomRole(); + } + // some matching roles (at least 2, for sorting) + int nMatchingRoles = randomIntBetween(2, 5); + for (int i = 0; i < nMatchingRoles; i++) { + ApplicationResourcePrivileges[] applicationResourcePrivileges = randomArray( + 1, + 5, + ApplicationResourcePrivileges[]::new, + this::randomApplicationResourcePrivileges + ); + { + int matchingApplicationIndex = randomIntBetween(0, applicationResourcePrivileges.length - 1); + // make sure the "application" matches the filter query below ("a*9") + applicationResourcePrivileges[matchingApplicationIndex] = RoleDescriptor.ApplicationResourcePrivileges.builder() + .application("a" + randomAlphaOfLength(4) + "9") + .resources(applicationResourcePrivileges[matchingApplicationIndex].getResources()) + .privileges(applicationResourcePrivileges[matchingApplicationIndex].getPrivileges()) + .build(); + } + { + int matchingApplicationIndex = randomIntBetween(0, applicationResourcePrivileges.length - 1); + int matchingResourcesIndex = randomIntBetween( + 0, + applicationResourcePrivileges[matchingApplicationIndex].getResources().length - 1 + ); + // make sure the "resources" matches the terms query below ("99") + applicationResourcePrivileges[matchingApplicationIndex] = RoleDescriptor.ApplicationResourcePrivileges.builder() + .application(applicationResourcePrivileges[matchingApplicationIndex].getApplication()) + .resources(applicationResourcePrivileges[matchingApplicationIndex].getResources()[matchingResourcesIndex] = "99") + .privileges(applicationResourcePrivileges[matchingApplicationIndex].getPrivileges()) + .build(); + } + createRole( + randomAlphaOfLength(4) + i, + randomBoolean() ? null : randomAlphaOfLength(8), + randomBoolean() ? null : randomMetadata(), + applicationResourcePrivileges + ); + } + assertQuery(""" + {"query":{"bool":{"filter":[{"wildcard":{"applications.application":"a*9"}}]}},"sort":["name"]}""", nMatchingRoles, roles -> { + assertThat(roles, iterableWithSize(nMatchingRoles)); + // assert sorting on name + for (int i = 0; i < nMatchingRoles; i++) { + assertThat(roles.get(i).get("_sort"), instanceOf(List.class)); + assertThat(((List) roles.get(i).get("_sort")), iterableWithSize(1)); + assertThat(((List) roles.get(i).get("_sort")).get(0), equalTo(roles.get(i).get("name"))); + } + // assert the ascending sort order + for (int i = 1; i < nMatchingRoles; i++) { + int compareNames = roles.get(i - 1).get("name").toString().compareTo(roles.get(i).get("name").toString()); + assertThat(compareNames < 0, is(true)); + } + }); + assertQuery( + """ + {"query":{"bool":{"must":[{"terms":{"applications.resources":["99"]}}]}},"sort":["applications.privileges"]}""", + nMatchingRoles, + roles -> { + assertThat(roles, iterableWithSize(nMatchingRoles)); + // assert sorting on best "applications.privileges" + for (int i = 0; i < nMatchingRoles; i++) { + assertThat(roles.get(i).get("_sort"), instanceOf(List.class)); + assertThat(((List) roles.get(i).get("_sort")), iterableWithSize(1)); + assertThat(((List) roles.get(i).get("_sort")).get(0), equalTo(getPrivilegeNameUsedForSorting(roles.get(i)))); + } + // assert the ascending sort order + for (int i = 1; i < nMatchingRoles; i++) { + int comparePrivileges = getPrivilegeNameUsedForSorting(roles.get(i - 1)).compareTo( + getPrivilegeNameUsedForSorting(roles.get(i)) + ); + assertThat(comparePrivileges < 0, is(true)); + } + } + ); + } + + @SuppressWarnings("unchecked") + public void testSortWithPagination() throws IOException { + int roleIdx = 0; + // some non-matching roles + int nOtherRoles = randomIntBetween(0, 5); + for (int i = 0; i < nOtherRoles; i++) { + createRole( + Strings.format("role_%03d", roleIdx++), + randomBoolean() ? null : randomDescription(), + randomBoolean() ? null : randomMetadata(), + randomApplicationPrivileges() + ); + } + // first matching role + RoleDescriptor firstMatchingRole = createRole( + Strings.format("role_%03d", roleIdx++), + "some ZZZZmatchZZZZ descr", + randomBoolean() ? null : randomMetadata(), + randomApplicationPrivileges() + ); + nOtherRoles = randomIntBetween(0, 5); + for (int i = 0; i < nOtherRoles; i++) { + createRole( + Strings.format("role_%03d", roleIdx++), + randomBoolean() ? null : randomDescription(), + randomBoolean() ? null : randomMetadata(), + randomApplicationPrivileges() + ); + } + // second matching role + RoleDescriptor secondMatchingRole = createRole( + Strings.format("role_%03d", roleIdx++), + "other ZZZZmatchZZZZ meh", + randomBoolean() ? null : randomMetadata(), + randomApplicationPrivileges() + ); + nOtherRoles = randomIntBetween(0, 5); + for (int i = 0; i < nOtherRoles; i++) { + createRole( + Strings.format("role_%03d", roleIdx++), + randomBoolean() ? null : randomDescription(), + randomBoolean() ? null : randomMetadata(), + randomApplicationPrivileges() + ); + } + // third matching role + RoleDescriptor thirdMatchingRole = createRole( + Strings.format("role_%03d", roleIdx++), + "me ZZZZmatchZZZZ go", + randomBoolean() ? null : randomMetadata(), + randomApplicationPrivileges() + ); + nOtherRoles = randomIntBetween(0, 5); + for (int i = 0; i < nOtherRoles; i++) { + createRole( + Strings.format("role_%03d", roleIdx++), + randomBoolean() ? null : randomDescription(), + randomBoolean() ? null : randomMetadata(), + randomApplicationPrivileges() + ); + } + String queryTemplate = """ + {"query":{"match":{"description":{"query":"ZZZZmatchZZZZ"}}}, + "size":1, + "sort":[{"name":{"order":"desc"}},{"applications.resources":{"order":"asc"}}] + %s + }"""; + AtomicReference searchAfter = new AtomicReference<>(""); + Consumer> searchAfterChain = roleMap -> { + assertThat(roleMap.get("_sort"), instanceOf(List.class)); + assertThat(((List) roleMap.get("_sort")), iterableWithSize(2)); + String firstSortValue = ((List) roleMap.get("_sort")).get(0); + assertThat(firstSortValue, equalTo(roleMap.get("name"))); + String secondSortValue = ((List) roleMap.get("_sort")).get(1); + searchAfter.set( + ",\"search_after\":[\"" + + firstSortValue + + "\"," + + (secondSortValue != null ? ("\"" + secondSortValue + "\"") : "null") + + "]" + ); + }; + assertQuery(Strings.format(queryTemplate, searchAfter.get()), 3, roles -> { + assertThat(roles, iterableWithSize(1)); + assertRoleMap(roles.get(0), thirdMatchingRole); + searchAfterChain.accept(roles.get(0)); + }); + assertQuery(Strings.format(queryTemplate, searchAfter.get()), 3, roles -> { + assertThat(roles, iterableWithSize(1)); + assertRoleMap(roles.get(0), secondMatchingRole); + searchAfterChain.accept(roles.get(0)); + }); + assertQuery(Strings.format(queryTemplate, searchAfter.get()), 3, roles -> { + assertThat(roles, iterableWithSize(1)); + assertRoleMap(roles.get(0), firstMatchingRole); + searchAfterChain.accept(roles.get(0)); + }); + // no more results + assertQuery(Strings.format(queryTemplate, searchAfter.get()), 3, roles -> assertThat(roles, emptyIterable())); + } + + @SuppressWarnings("unchecked") + private String getPrivilegeNameUsedForSorting(Map roleMap) { + String bestPrivilege = null; + List> applications = (List>) roleMap.get("applications"); + if (applications == null) { + return bestPrivilege; + } + for (Map application : applications) { + List privileges = (List) application.get("privileges"); + if (privileges != null) { + for (String privilege : privileges) { + if (bestPrivilege == null) { + bestPrivilege = privilege; + } else if (privilege.compareTo(bestPrivilege) < 0) { + bestPrivilege = privilege; + } + } + } + } + return bestPrivilege; + } + + private RoleDescriptor createRandomRole() throws IOException { + return createRole( + randomUUID(), + randomBoolean() ? null : randomDescription(), + randomBoolean() ? null : randomMetadata(), + randomApplicationPrivileges() + ); + } + + private ApplicationResourcePrivileges[] randomApplicationPrivileges() { + ApplicationResourcePrivileges[] applicationResourcePrivileges = randomArray( + 0, + 3, + ApplicationResourcePrivileges[]::new, + this::randomApplicationResourcePrivileges + ); + return applicationResourcePrivileges.length == 0 && randomBoolean() ? null : applicationResourcePrivileges; + } + + @SuppressWarnings("unchecked") + private RoleDescriptor createRole( + String roleName, + String description, + Map metadata, + ApplicationResourcePrivileges... applicationResourcePrivileges + ) throws IOException { + Request request = new Request("POST", "/_security/role/" + roleName); + Map requestMap = new HashMap<>(); + if (description != null) { + requestMap.put(RoleDescriptor.Fields.DESCRIPTION.getPreferredName(), description); + } + if (metadata != null) { + requestMap.put(RoleDescriptor.Fields.METADATA.getPreferredName(), metadata); + } + if (applicationResourcePrivileges != null) { + requestMap.put(RoleDescriptor.Fields.APPLICATIONS.getPreferredName(), applicationResourcePrivileges); + } + BytesReference source = BytesReference.bytes(jsonBuilder().map(requestMap)); + request.setJsonEntity(source.utf8ToString()); + Response response = adminClient().performRequest(request); + assertOK(response); + Map responseMap = responseAsMap(response); + assertTrue((Boolean) ((Map) responseMap.get("role")).get("created")); + return new RoleDescriptor( + roleName, + null, + null, + applicationResourcePrivileges, + null, + null, + metadata, + null, + null, + null, + null, + description + ); + } + + private void assertQuery(String body, int total, Consumer>> roleVerifier) throws IOException { + assertQuery(client(), body, total, roleVerifier); + } + + private static Request queryRoleRequestWithAuth() { + Request request = new Request(randomFrom("POST", "GET"), "/_security/_query/role"); + request.setOptions(request.getOptions().toBuilder().addHeader(HttpHeaders.AUTHORIZATION, READ_SECURITY_USER_AUTH_HEADER)); + return request; + } + + public static void assertQuery(RestClient client, String body, int total, Consumer>> roleVerifier) + throws IOException { + Request request = queryRoleRequestWithAuth(); + request.setJsonEntity(body); + Response response = client.performRequest(request); + assertOK(response); + Map responseMap = responseAsMap(response); + assertThat(responseMap.get("total"), is(total)); + @SuppressWarnings("unchecked") + List> roles = new ArrayList<>((List>) responseMap.get("roles")); + assertThat(roles.size(), is(responseMap.get("count"))); + roleVerifier.accept(roles); + } + + @SuppressWarnings("unchecked") + private void assertRoleMap(Map roleMap, RoleDescriptor roleDescriptor) { + assertThat(roleMap.get("name"), equalTo(roleDescriptor.getName())); + if (Strings.isNullOrEmpty(roleDescriptor.getDescription())) { + assertThat(roleMap.get("description"), nullValue()); + } else { + assertThat(roleMap.get("description"), equalTo(roleDescriptor.getDescription())); + } + // "applications" is always present + assertThat(roleMap.get("applications"), instanceOf(Iterable.class)); + if (roleDescriptor.getApplicationPrivileges().length == 0) { + assertThat((Iterable) roleMap.get("applications"), emptyIterable()); + } else { + assertThat( + (Iterable>) roleMap.get("applications"), + iterableWithSize(roleDescriptor.getApplicationPrivileges().length) + ); + Iterator> responseIterator = ((Iterable>) roleMap.get("applications")).iterator(); + Iterator descriptorIterator = Arrays.asList(roleDescriptor.getApplicationPrivileges()) + .iterator(); + while (responseIterator.hasNext()) { + assertTrue(descriptorIterator.hasNext()); + Map responsePrivilege = responseIterator.next(); + ApplicationResourcePrivileges descriptorPrivilege = descriptorIterator.next(); + assertThat(responsePrivilege.get("application"), equalTo(descriptorPrivilege.getApplication())); + assertThat(responsePrivilege.get("privileges"), equalTo(Arrays.asList(descriptorPrivilege.getPrivileges()))); + assertThat(responsePrivilege.get("resources"), equalTo(Arrays.asList(descriptorPrivilege.getResources()))); + } + assertFalse(descriptorIterator.hasNext()); + } + // in this test suite all roles are always enabled + assertTrue(roleMap.containsKey("transient_metadata")); + assertThat(roleMap.get("transient_metadata"), Matchers.instanceOf(Map.class)); + assertThat(((Map) roleMap.get("transient_metadata")).get("enabled"), equalTo(true)); + } + + private Map randomMetadata() { + return randomMetadata(3); + } + + private Map randomMetadata(int maxLevel) { + int size = randomIntBetween(0, 5); + Map metadata = new HashMap<>(size); + for (int i = 0; i < size; i++) { + switch (randomFrom(1, 2, 3, 4, 5)) { + case 1: + metadata.put(randomAlphaOfLength(4), randomAlphaOfLength(4)); + break; + case 2: + metadata.put(randomAlphaOfLength(4), randomInt()); + break; + case 3: + metadata.put(randomAlphaOfLength(4), randomList(0, 3, () -> randomAlphaOfLength(4))); + break; + case 4: + metadata.put(randomAlphaOfLength(4), randomList(0, 3, () -> randomInt(4))); + break; + case 5: + if (maxLevel > 0) { + metadata.put(randomAlphaOfLength(4), randomMetadata(maxLevel - 1)); + } + break; + } + } + return metadata; + } + + private ApplicationResourcePrivileges randomApplicationResourcePrivileges() { + String applicationName; + if (randomBoolean()) { + applicationName = "*"; + } else { + applicationName = randomAlphaOfLength(1).toLowerCase(Locale.ROOT) + randomAlphaOfLengthBetween(2, 10); + } + Supplier privilegeNameSupplier = () -> randomAlphaOfLength(1).toLowerCase(Locale.ROOT) + randomAlphaOfLengthBetween(2, 8); + int size = randomIntBetween(1, 5); + List resources = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + if (randomBoolean()) { + String suffix = randomBoolean() ? "*" : randomAlphaOfLengthBetween(4, 9); + resources.add(randomAlphaOfLengthBetween(2, 5) + "/" + suffix); + } else { + resources.add(randomAlphaOfLength(1).toLowerCase(Locale.ROOT) + randomAlphaOfLengthBetween(2, 8)); + } + } + return RoleDescriptor.ApplicationResourcePrivileges.builder() + .application(applicationName) + .resources(resources) + .privileges(randomList(1, 3, privilegeNameSupplier)) + .build(); + } + + private String randomDescription() { + StringBuilder randomDescriptionBuilder = new StringBuilder(); + int nParts = randomIntBetween(1, 5); + for (int i = 0; i < nParts; i++) { + randomDescriptionBuilder.append(randomAlphaOfLengthBetween(1, 5)); + } + return randomDescriptionBuilder.toString(); + } + + @SuppressWarnings("unchecked") + public static void waitForMigrationCompletion(RestClient adminClient, Integer migrationVersion) throws Exception { + final Request request = new Request("GET", "_cluster/state/metadata/" + INTERNAL_SECURITY_MAIN_INDEX_7); + assertBusy(() -> { + Response response = adminClient.performRequest(request); + assertOK(response); + Map responseMap = responseAsMap(response); + Map indicesMetadataMap = (Map) ((Map) responseMap.get("metadata")).get( + "indices" + ); + assertTrue(indicesMetadataMap.containsKey(INTERNAL_SECURITY_MAIN_INDEX_7)); + assertTrue( + ((Map) indicesMetadataMap.get(INTERNAL_SECURITY_MAIN_INDEX_7)).containsKey(MIGRATION_VERSION_CUSTOM_KEY) + ); + if (migrationVersion != null) { + assertTrue( + ((Map) ((Map) indicesMetadataMap.get(INTERNAL_SECURITY_MAIN_INDEX_7)).get( + MIGRATION_VERSION_CUSTOM_KEY + )).containsKey(MIGRATION_VERSION_CUSTOM_DATA_KEY) + ); + Integer versionInteger = Integer.parseInt( + (String) ((Map) ((Map) indicesMetadataMap.get(INTERNAL_SECURITY_MAIN_INDEX_7)).get( + MIGRATION_VERSION_CUSTOM_KEY + )).get(MIGRATION_VERSION_CUSTOM_DATA_KEY) + ); + assertThat(versionInteger, greaterThanOrEqualTo(migrationVersion)); + } + }); + } +} diff --git a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryUserIT.java b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryUserIT.java index 223c07a1e9dec..56af8aa16360a 100644 --- a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryUserIT.java +++ b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryUserIT.java @@ -35,7 +35,7 @@ public class QueryUserIT extends SecurityInBasicRestTestCase { - private static final String READ_USERS_USER_AUTH_HEADER = "Basic cmVhZF91c2Vyc191c2VyOnJlYWQtdXNlcnMtcGFzc3dvcmQ="; + private static final String READ_SECURITY_USER_AUTH_HEADER = "Basic cmVhZF9zZWN1cml0eV91c2VyOnJlYWQtc2VjdXJpdHktcGFzc3dvcmQ="; private static final String TEST_USER_NO_READ_USERS_AUTH_HEADER = "Basic c2VjdXJpdHlfdGVzdF91c2VyOnNlY3VyaXR5LXRlc3QtcGFzc3dvcmQ="; private static final Set reservedUsers = Set.of( @@ -57,7 +57,7 @@ private Request queryUserRequestWithAuth(boolean withProfileId) { randomFrom("POST", "GET"), "/_security/_query/user" + (withProfileId ? "?with_profile_uid=true" : randomFrom("", "?with_profile_uid=false")) ); - request.setOptions(request.getOptions().toBuilder().addHeader(HttpHeaders.AUTHORIZATION, READ_USERS_USER_AUTH_HEADER)); + request.setOptions(request.getOptions().toBuilder().addHeader(HttpHeaders.AUTHORIZATION, READ_SECURITY_USER_AUTH_HEADER)); return request; } @@ -321,7 +321,7 @@ public void testSort() throws IOException { final String invalidSortName = randomFrom("email", "full_name"); assertQueryError( - READ_USERS_USER_AUTH_HEADER, + READ_SECURITY_USER_AUTH_HEADER, 400, Strings.format("{\"sort\":[\"%s\"]}", invalidSortName), Strings.format("sorting is not supported for field [%s]", invalidSortName) @@ -338,7 +338,7 @@ private String getReservedUsernameAndAssertExists() throws IOException { putUserRequest.setJsonEntity("{\"enabled\": true}"); } - request.setOptions(request.getOptions().toBuilder().addHeader(HttpHeaders.AUTHORIZATION, READ_USERS_USER_AUTH_HEADER)); + request.setOptions(request.getOptions().toBuilder().addHeader(HttpHeaders.AUTHORIZATION, READ_SECURITY_USER_AUTH_HEADER)); final Response response = client().performRequest(request); assertOK(response); final Map responseMap = responseAsMap(response); @@ -363,7 +363,7 @@ private List> collectUsers(Request request, int total) throw } private void assertQueryError(int statusCode, String body) { - assertQueryError(READ_USERS_USER_AUTH_HEADER, statusCode, body); + assertQueryError(READ_SECURITY_USER_AUTH_HEADER, statusCode, body); } private void assertQueryError(String authHeader, int statusCode, String body) { diff --git a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityInBasicRestTestCase.java b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityInBasicRestTestCase.java index b9dfa4c7fcfd7..7cb8c09545bb1 100644 --- a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityInBasicRestTestCase.java +++ b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityInBasicRestTestCase.java @@ -23,8 +23,8 @@ public abstract class SecurityInBasicRestTestCase extends ESRestTestCase { private static final SecureString REST_PASSWORD = new SecureString("security-test-password".toCharArray()); protected static final String TEST_USER_AUTH_HEADER = "Basic c2VjdXJpdHlfdGVzdF91c2VyOnNlY3VyaXR5LXRlc3QtcGFzc3dvcmQ="; - protected static final String READ_USERS_USER = "read_users_user"; - private static final SecureString READ_USERS_PASSWORD = new SecureString("read-users-password".toCharArray()); + protected static final String READ_SECURITY_USER = "read_security_user"; + private static final SecureString READ_SECURITY_PASSWORD = new SecureString("read-security-password".toCharArray()); private static final String ADMIN_USER = "admin_user"; private static final SecureString ADMIN_PASSWORD = new SecureString("admin-password".toCharArray()); @@ -53,7 +53,7 @@ public abstract class SecurityInBasicRestTestCase extends ESRestTestCase { .user(REST_USER, REST_PASSWORD.toString(), "security_test_role", false) .user(API_KEY_USER, API_KEY_USER_PASSWORD.toString(), "api_key_user_role", false) .user(API_KEY_ADMIN_USER, API_KEY_ADMIN_USER_PASSWORD.toString(), "api_key_admin_role", false) - .user(READ_USERS_USER, READ_USERS_PASSWORD.toString(), "read_users_user_role", false) + .user(READ_SECURITY_USER, READ_SECURITY_PASSWORD.toString(), "read_security_user_role", false) .build(); @Override @@ -72,5 +72,4 @@ protected Settings restClientSettings() { String token = basicAuthHeaderValue(REST_USER, REST_PASSWORD); return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); } - } diff --git a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/resources/roles.yml b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/resources/roles.yml index 15c291274bcdb..ee7d8f7c6214f 100644 --- a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/resources/roles.yml +++ b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/resources/roles.yml @@ -18,8 +18,8 @@ api_key_user_role: cluster: - manage_own_api_key -# Used to perform query user operations -read_users_user_role: +# Used to perform query user and role operations +read_security_user_role: cluster: - read_security diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index a38710332313f..d5099729c52b3 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -259,6 +259,7 @@ import org.elasticsearch.xpack.security.action.role.TransportDeleteRoleAction; import org.elasticsearch.xpack.security.action.role.TransportGetRolesAction; import org.elasticsearch.xpack.security.action.role.TransportPutRoleAction; +import org.elasticsearch.xpack.security.action.role.TransportQueryRoleAction; import org.elasticsearch.xpack.security.action.rolemapping.ReservedRoleMappingAction; import org.elasticsearch.xpack.security.action.rolemapping.TransportDeleteRoleMappingAction; import org.elasticsearch.xpack.security.action.rolemapping.TransportGetRoleMappingsAction; @@ -377,6 +378,7 @@ import org.elasticsearch.xpack.security.rest.action.role.RestDeleteRoleAction; import org.elasticsearch.xpack.security.rest.action.role.RestGetRolesAction; import org.elasticsearch.xpack.security.rest.action.role.RestPutRoleAction; +import org.elasticsearch.xpack.security.rest.action.role.RestQueryRoleAction; import org.elasticsearch.xpack.security.rest.action.rolemapping.RestDeleteRoleMappingAction; import org.elasticsearch.xpack.security.rest.action.rolemapping.RestGetRoleMappingsAction; import org.elasticsearch.xpack.security.rest.action.rolemapping.RestPutRoleMappingAction; @@ -1535,6 +1537,7 @@ public void onIndexModule(IndexModule module) { new ActionHandler<>(PutUserAction.INSTANCE, TransportPutUserAction.class), new ActionHandler<>(DeleteUserAction.INSTANCE, TransportDeleteUserAction.class), new ActionHandler<>(GetRolesAction.INSTANCE, TransportGetRolesAction.class), + new ActionHandler<>(ActionTypes.QUERY_ROLE_ACTION, TransportQueryRoleAction.class), new ActionHandler<>(PutRoleAction.INSTANCE, TransportPutRoleAction.class), new ActionHandler<>(ActionTypes.BULK_PUT_ROLES, TransportBulkPutRolesAction.class), new ActionHandler<>(DeleteRoleAction.INSTANCE, TransportDeleteRoleAction.class), @@ -1630,6 +1633,7 @@ public List getRestHandlers( new RestPutUserAction(settings, getLicenseState()), new RestDeleteUserAction(settings, getLicenseState()), new RestGetRolesAction(settings, getLicenseState()), + new RestQueryRoleAction(settings, getLicenseState()), new RestBulkPutRolesAction(settings, getLicenseState(), bulkPutRoleRequestBuilderFactory.get()), new RestPutRoleAction(settings, getLicenseState(), putRoleRequestBuilderFactory.get()), new RestDeleteRoleAction(settings, getLicenseState()), diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportQueryRoleAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportQueryRoleAction.java new file mode 100644 index 0000000000000..d6bdfea29a676 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportQueryRoleAction.java @@ -0,0 +1,98 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.action.role; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.security.action.ActionTypes; +import org.elasticsearch.xpack.core.security.action.role.QueryRoleRequest; +import org.elasticsearch.xpack.core.security.action.role.QueryRoleResponse; +import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; +import org.elasticsearch.xpack.security.support.FieldNameTranslators; +import org.elasticsearch.xpack.security.support.RoleBoolQueryBuilder; + +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.elasticsearch.xpack.security.support.FieldNameTranslators.ROLE_FIELD_NAME_TRANSLATORS; + +public class TransportQueryRoleAction extends TransportAction { + + public static final String ROLE_NAME_RUNTIME_MAPPING_FIELD = "runtime_role_name"; + private static final Map ROLE_NAME_RUNTIME_MAPPING = Map.of( + ROLE_NAME_RUNTIME_MAPPING_FIELD, + Map.of("type", "keyword", "script", Map.of("source", "emit(params._fields['_id'].value.substring(5));")) + ); + + private final NativeRolesStore nativeRolesStore; + + @Inject + public TransportQueryRoleAction(ActionFilters actionFilters, NativeRolesStore nativeRolesStore, TransportService transportService) { + super(ActionTypes.QUERY_ROLE_ACTION.name(), actionFilters, transportService.getTaskManager()); + this.nativeRolesStore = nativeRolesStore; + } + + @Override + protected void doExecute(Task task, QueryRoleRequest request, ActionListener listener) { + SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.searchSource().version(false).fetchSource(true).trackTotalHits(true); + if (request.getFrom() != null) { + searchSourceBuilder.from(request.getFrom()); + } + if (request.getSize() != null) { + searchSourceBuilder.size(request.getSize()); + } + if (request.getSearchAfterBuilder() != null) { + searchSourceBuilder.searchAfter(request.getSearchAfterBuilder().getSortValues()); + } + AtomicBoolean accessesMetadata = new AtomicBoolean(false); + AtomicBoolean accessesRoleName = new AtomicBoolean(false); + searchSourceBuilder.query(RoleBoolQueryBuilder.build(request.getQueryBuilder(), indexFieldName -> { + if (indexFieldName.startsWith(FieldNameTranslators.FLATTENED_METADATA_INDEX_FIELD_NAME)) { + accessesMetadata.set(true); + } else if (indexFieldName.equals(ROLE_NAME_RUNTIME_MAPPING_FIELD)) { + accessesRoleName.set(true); + } + })); + if (request.getFieldSortBuilders() != null) { + ROLE_FIELD_NAME_TRANSLATORS.translateFieldSortBuilders(request.getFieldSortBuilders(), searchSourceBuilder, indexFieldName -> { + if (indexFieldName.startsWith(FieldNameTranslators.FLATTENED_METADATA_INDEX_FIELD_NAME)) { + accessesMetadata.set(true); + } else if (indexFieldName.equals(ROLE_NAME_RUNTIME_MAPPING_FIELD)) { + accessesRoleName.set(true); + } + }); + } + if (accessesMetadata.get() && nativeRolesStore.isMetadataSearchable() == false) { + listener.onFailure( + new ElasticsearchStatusException( + "Cannot query or sort role metadata until automatic migration completed", + RestStatus.SERVICE_UNAVAILABLE + ) + ); + return; + } + // only add the query-level runtime field to the search request if it's actually referring the role name + if (accessesRoleName.get()) { + searchSourceBuilder.runtimeMappings(ROLE_NAME_RUNTIME_MAPPING); + } + nativeRolesStore.queryRoleDescriptors( + searchSourceBuilder, + ActionListener.wrap( + queryRoleResults -> listener.onResponse(new QueryRoleResponse(queryRoleResults.total(), queryRoleResults.items())), + listener::onFailure + ) + ); + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index b4afc82ff1816..00714dd3b024f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -25,8 +25,8 @@ import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.MultiSearchResponse; -import org.elasticsearch.action.search.MultiSearchResponse.Item; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.update.UpdateRequest; @@ -42,6 +42,8 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -52,6 +54,8 @@ import org.elasticsearch.xpack.core.security.action.role.ClearRolesCacheRequest; import org.elasticsearch.xpack.core.security.action.role.ClearRolesCacheResponse; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleRequest; +import org.elasticsearch.xpack.core.security.action.role.QueryRoleResponse; +import org.elasticsearch.xpack.core.security.action.role.QueryRoleResponse.QueryRoleResult; import org.elasticsearch.xpack.core.security.action.role.RoleDescriptorRequestValidator; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; @@ -87,6 +91,7 @@ import static org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ROLE_TYPE; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.Availability.PRIMARY_SHARDS; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.Availability.SEARCH_SHARDS; +import static org.elasticsearch.xpack.security.support.SecurityMigrations.ROLE_METADATA_FLATTENED_MIGRATION_VERSION; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_ROLES_METADATA_FLATTENED; @@ -244,6 +249,54 @@ public void getRoleDescriptors(Set names, final ActionListener listener) { + SearchRequest searchRequest = new SearchRequest(new String[] { SECURITY_MAIN_ALIAS }, searchSourceBuilder); + SecurityIndexManager frozenSecurityIndex = securityIndex.defensiveCopy(); + if (frozenSecurityIndex.indexExists() == false) { + logger.debug("security index does not exist"); + listener.onResponse(QueryRoleResult.EMPTY); + } else if (frozenSecurityIndex.isAvailable(SEARCH_SHARDS) == false) { + listener.onFailure(frozenSecurityIndex.getUnavailableReason(SEARCH_SHARDS)); + } else { + securityIndex.checkIndexVersionThenExecute( + listener::onFailure, + () -> executeAsyncWithOrigin( + client, + SECURITY_ORIGIN, + TransportSearchAction.TYPE, + searchRequest, + ActionListener.wrap(searchResponse -> { + long total = searchResponse.getHits().getTotalHits().value; + if (total == 0) { + logger.debug("No roles found for query [{}]", searchRequest.source().query()); + listener.onResponse(QueryRoleResult.EMPTY); + return; + } + SearchHit[] hits = searchResponse.getHits().getHits(); + List items = Arrays.stream(hits).map(hit -> { + RoleDescriptor roleDescriptor = transformRole(hit.getId(), hit.getSourceRef(), logger, licenseState); + if (roleDescriptor == null) { + return null; + } + return new QueryRoleResponse.Item(roleDescriptor, hit.getSortValues()); + }).filter(Objects::nonNull).toList(); + listener.onResponse(new QueryRoleResult(total, items)); + }, listener::onFailure) + ) + ); + } + } + public void deleteRole(final DeleteRoleRequest deleteRoleRequest, final ActionListener listener) { if (enabled == false) { listener.onFailure(new IllegalStateException("Native role management is disabled")); @@ -551,7 +604,7 @@ public void usageStats(ActionListener> listener) { new DelegatingActionListener>(listener) { @Override public void onResponse(MultiSearchResponse items) { - Item[] responses = items.getResponses(); + MultiSearchResponse.Item[] responses = items.getResponses(); if (responses[0].isFailure()) { usageStats.put("size", 0); } else { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestQueryRoleAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestQueryRoleAction.java new file mode 100644 index 0000000000000..c2dc7166bd3b6 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestQueryRoleAction.java @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.rest.action.role; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.search.searchafter.SearchAfterBuilder; +import org.elasticsearch.search.sort.FieldSortBuilder; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.security.action.ActionTypes; +import org.elasticsearch.xpack.core.security.action.role.QueryRoleRequest; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.index.query.AbstractQueryBuilder.parseTopLevelQuery; +import static org.elasticsearch.rest.RestRequest.Method.GET; +import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public final class RestQueryRoleAction extends NativeRoleBaseRestHandler { + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "query_role_request_payload", + a -> new Payload((QueryBuilder) a[0], (Integer) a[1], (Integer) a[2], (List) a[3], (SearchAfterBuilder) a[4]) + ); + + static { + PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseTopLevelQuery(p), new ParseField("query")); + PARSER.declareInt(optionalConstructorArg(), new ParseField("from")); + PARSER.declareInt(optionalConstructorArg(), new ParseField("size")); + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return new FieldSortBuilder(p.text()); + } else if (p.currentToken() == XContentParser.Token.START_OBJECT) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, p.nextToken(), p); + final FieldSortBuilder fieldSortBuilder = FieldSortBuilder.fromXContent(p, p.currentName()); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, p.nextToken(), p); + return fieldSortBuilder; + } else { + throw new IllegalArgumentException("malformed sort object"); + } + }, new ParseField("sort")); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> SearchAfterBuilder.fromXContent(p), + new ParseField("search_after"), + ObjectParser.ValueType.VALUE_ARRAY + ); + } + + public RestQueryRoleAction(Settings settings, XPackLicenseState licenseState) { + super(settings, licenseState); + } + + @Override + public String getName() { + return "xpack_security_query_role"; + } + + @Override + public List routes() { + return List.of(new Route(GET, "/_security/_query/role"), new Route(POST, "/_security/_query/role")); + } + + @Override + protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { + final QueryRoleRequest queryRoleRequest; + if (request.hasContentOrSourceParam()) { + RestQueryRoleAction.Payload payload = PARSER.parse(request.contentOrSourceParamParser(), null); + queryRoleRequest = new QueryRoleRequest( + payload.queryBuilder, + payload.from, + payload.size, + payload.fieldSortBuilders, + payload.searchAfterBuilder + ); + } else { + queryRoleRequest = new QueryRoleRequest(null, null, null, null, null); + } + return channel -> client.execute(ActionTypes.QUERY_ROLE_ACTION, queryRoleRequest, new RestToXContentListener<>(channel)); + } + + private record Payload( + @Nullable QueryBuilder queryBuilder, + @Nullable Integer from, + @Nullable Integer size, + @Nullable List fieldSortBuilders, + @Nullable SearchAfterBuilder searchAfterBuilder + ) {} +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/FieldNameTranslators.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/FieldNameTranslators.java index 6d0b076fd9bf1..31689f22608c6 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/FieldNameTranslators.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/FieldNameTranslators.java @@ -38,9 +38,12 @@ import java.util.function.Consumer; import static org.elasticsearch.xpack.security.action.apikey.TransportQueryApiKeyAction.API_KEY_TYPE_RUNTIME_MAPPING_FIELD; +import static org.elasticsearch.xpack.security.action.role.TransportQueryRoleAction.ROLE_NAME_RUNTIME_MAPPING_FIELD; public final class FieldNameTranslators { + public static final String FLATTENED_METADATA_INDEX_FIELD_NAME = "metadata_flattened"; + public static final FieldNameTranslators API_KEY_FIELD_NAME_TRANSLATORS = new FieldNameTranslators( List.of( new SimpleFieldNameTranslator("creator.principal", "username"), @@ -53,7 +56,7 @@ public final class FieldNameTranslators { new SimpleFieldNameTranslator("invalidation_time", "invalidation"), // allows querying on any non-wildcard sub-fields under the "metadata." prefix // also allows querying on the "metadata" field itself (including by specifying patterns) - new FlattenedFieldNameTranslator("metadata_flattened", "metadata") + new FlattenedFieldNameTranslator(FLATTENED_METADATA_INDEX_FIELD_NAME, "metadata") ) ); @@ -68,6 +71,19 @@ public final class FieldNameTranslators { ) ); + public static final FieldNameTranslators ROLE_FIELD_NAME_TRANSLATORS = new FieldNameTranslators( + List.of( + new SimpleFieldNameTranslator(ROLE_NAME_RUNTIME_MAPPING_FIELD, "name"), + idemFieldNameTranslator("description"), + idemFieldNameTranslator("applications.application"), + idemFieldNameTranslator("applications.resources"), + idemFieldNameTranslator("applications.privileges"), + // allows querying on any non-wildcard sub-fields under the "metadata." prefix + // also allows querying on the "metadata" field itself (including by specifying patterns) + new FlattenedFieldNameTranslator(FLATTENED_METADATA_INDEX_FIELD_NAME, "metadata") + ) + ); + private final List fieldNameTranslators; private FieldNameTranslators(List fieldNameTranslators) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/RoleBoolQueryBuilder.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/RoleBoolQueryBuilder.java new file mode 100644 index 0000000000000..1d5b93fbb1917 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/RoleBoolQueryBuilder.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.support; + +import org.apache.lucene.search.Query; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.QueryRewriteContext; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; + +import java.io.IOException; +import java.util.Set; +import java.util.function.Consumer; + +import static org.elasticsearch.xpack.security.support.FieldNameTranslators.ROLE_FIELD_NAME_TRANSLATORS; + +public class RoleBoolQueryBuilder extends BoolQueryBuilder { + + // Field names allowed at the index level + private static final Set FIELDS_ALLOWED_TO_QUERY = Set.of("_id", "type"); + + private RoleBoolQueryBuilder() {} + + /** + * Build a bool query that is specialised for querying roles from the security index. + * The method processes the given QueryBuilder to ensure: + * * Only fields from an allowlist are queried + * * Only query types from an allowlist are used + * * Field names used in the Query DSL get translated into corresponding names used at the index level. + * * Not exposing any other types of documents stored in the same security index + * + * @param queryBuilder This represents the query parsed directly from the user input. It is validated + * and transformed (see above). + * @param fieldNameVisitor This {@code Consumer} is invoked with all the (index-level) field names referred to in the passed-in query. + * @return A specialised query builder for roles that is safe to run on the security index. + */ + public static RoleBoolQueryBuilder build(QueryBuilder queryBuilder, @Nullable Consumer fieldNameVisitor) { + final RoleBoolQueryBuilder finalQuery = new RoleBoolQueryBuilder(); + if (queryBuilder != null) { + QueryBuilder processedQuery = ROLE_FIELD_NAME_TRANSLATORS.translateQueryBuilderFields(queryBuilder, fieldNameVisitor); + finalQuery.must(processedQuery); + } + finalQuery.filter(QueryBuilders.termQuery(RoleDescriptor.Fields.TYPE.getPreferredName(), RoleDescriptor.ROLE_TYPE)); + if (fieldNameVisitor != null) { + fieldNameVisitor.accept(RoleDescriptor.Fields.TYPE.getPreferredName()); + } + return finalQuery; + } + + @Override + protected Query doToQuery(SearchExecutionContext context) throws IOException { + context.setAllowedFields(RoleBoolQueryBuilder::isIndexFieldNameAllowed); + return super.doToQuery(context); + } + + @Override + protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { + if (queryRewriteContext instanceof SearchExecutionContext) { + ((SearchExecutionContext) queryRewriteContext).setAllowedFields(RoleBoolQueryBuilder::isIndexFieldNameAllowed); + } + return super.doRewrite(queryRewriteContext); + } + + static boolean isIndexFieldNameAllowed(String fieldName) { + return FIELDS_ALLOWED_TO_QUERY.contains(fieldName) || ROLE_FIELD_NAME_TRANSLATORS.isIndexFieldSupported(fieldName); + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java index 9c15356d1433d..61314e14c8bec 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java @@ -187,6 +187,14 @@ public boolean isStateRecovered() { return this.state != State.UNRECOVERED_STATE; } + public boolean isMigrationsVersionAtLeast(Integer expectedMigrationsVersion) { + return indexExists() && this.state.migrationsVersion.compareTo(expectedMigrationsVersion) >= 0; + } + + public boolean isCreatedOnLatestVersion() { + return this.state.createdOnLatestVersion; + } + public ElasticsearchException getUnavailableReason(Availability availability) { // ensure usage of a local copy so all checks execute against the same state! if (defensiveCopy == false) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java index f7ca72cd89eba..5ec76a8dc3d01 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java @@ -61,55 +61,64 @@ public interface SecurityMigration { int minMappingVersion(); } - public static final TreeMap MIGRATIONS_BY_VERSION = new TreeMap<>(Map.of(1, new SecurityMigration() { - private static final Logger logger = LogManager.getLogger(SecurityMigration.class); + public static final Integer ROLE_METADATA_FLATTENED_MIGRATION_VERSION = 1; - @Override - public void migrate(SecurityIndexManager indexManager, Client client, ActionListener listener) { - BoolQueryBuilder filterQuery = new BoolQueryBuilder().filter(QueryBuilders.termQuery("type", "role")) - .mustNot(QueryBuilders.existsQuery("metadata_flattened")); - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(filterQuery).size(0).trackTotalHits(true); - SearchRequest countRequest = new SearchRequest(indexManager.getConcreteIndexName()); - countRequest.source(searchSourceBuilder); + public static final TreeMap MIGRATIONS_BY_VERSION = new TreeMap<>( + Map.of(ROLE_METADATA_FLATTENED_MIGRATION_VERSION, new SecurityMigration() { + private static final Logger logger = LogManager.getLogger(SecurityMigration.class); - client.search(countRequest, ActionListener.wrap(response -> { - // If there are no roles, skip migration - if (response.getHits().getTotalHits().value > 0) { - logger.info("Preparing to migrate [" + response.getHits().getTotalHits().value + "] roles"); - updateRolesByQuery(indexManager, client, filterQuery, listener); - } else { - listener.onResponse(null); - } - }, listener::onFailure)); - } + @Override + public void migrate(SecurityIndexManager indexManager, Client client, ActionListener listener) { + BoolQueryBuilder filterQuery = new BoolQueryBuilder().filter(QueryBuilders.termQuery("type", "role")) + .mustNot(QueryBuilders.existsQuery("metadata_flattened")); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(filterQuery).size(0).trackTotalHits(true); + SearchRequest countRequest = new SearchRequest(indexManager.getConcreteIndexName()); + countRequest.source(searchSourceBuilder); - private void updateRolesByQuery( - SecurityIndexManager indexManager, - Client client, - BoolQueryBuilder filterQuery, - ActionListener listener - ) { - UpdateByQueryRequest updateByQueryRequest = new UpdateByQueryRequest(indexManager.getConcreteIndexName()); - updateByQueryRequest.setQuery(filterQuery); - updateByQueryRequest.setScript( - new Script(ScriptType.INLINE, "painless", "ctx._source.metadata_flattened = ctx._source.metadata", Collections.emptyMap()) - ); - client.admin() - .cluster() - .execute(UpdateByQueryAction.INSTANCE, updateByQueryRequest, ActionListener.wrap(bulkByScrollResponse -> { - logger.info("Migrated [" + bulkByScrollResponse.getTotal() + "] roles"); - listener.onResponse(null); + client.search(countRequest, ActionListener.wrap(response -> { + // If there are no roles, skip migration + if (response.getHits().getTotalHits().value > 0) { + logger.info("Preparing to migrate [" + response.getHits().getTotalHits().value + "] roles"); + updateRolesByQuery(indexManager, client, filterQuery, listener); + } else { + listener.onResponse(null); + } }, listener::onFailure)); - } + } - @Override - public Set nodeFeaturesRequired() { - return Set.of(SecuritySystemIndices.SECURITY_ROLES_METADATA_FLATTENED); - } + private void updateRolesByQuery( + SecurityIndexManager indexManager, + Client client, + BoolQueryBuilder filterQuery, + ActionListener listener + ) { + UpdateByQueryRequest updateByQueryRequest = new UpdateByQueryRequest(indexManager.getConcreteIndexName()); + updateByQueryRequest.setQuery(filterQuery); + updateByQueryRequest.setScript( + new Script( + ScriptType.INLINE, + "painless", + "ctx._source.metadata_flattened = ctx._source.metadata", + Collections.emptyMap() + ) + ); + client.admin() + .cluster() + .execute(UpdateByQueryAction.INSTANCE, updateByQueryRequest, ActionListener.wrap(bulkByScrollResponse -> { + logger.info("Migrated [" + bulkByScrollResponse.getTotal() + "] roles"); + listener.onResponse(null); + }, listener::onFailure)); + } - @Override - public int minMappingVersion() { - return ADD_REMOTE_CLUSTER_AND_DESCRIPTION_FIELDS.id(); - } - })); + @Override + public Set nodeFeaturesRequired() { + return Set.of(SecuritySystemIndices.SECURITY_ROLES_METADATA_FLATTENED); + } + + @Override + public int minMappingVersion() { + return ADD_REMOTE_CLUSTER_AND_DESCRIPTION_FIELDS.id(); + } + }) + ); } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml index db4ea4e8b205d..50c26394efbf2 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml @@ -87,6 +87,20 @@ teardown: - match: { admin_role.indices.0.names.0: "*" } - match: { admin_role.indices.0.privileges.0: "all" } + # query match_all roles + - do: + headers: + Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" + security.query_role: + body: > + { + "query": { "match_all": {} }, "sort": ["name"] + } + - match: { total: 2 } + - match: { count: 2 } + - match: { roles.0.name: "admin_role" } + - match: { roles.1.name: "backwards_role" } + - do: security.put_role: name: "role_with_description" @@ -104,3 +118,16 @@ teardown: name: "role_with_description" - match: { role_with_description.cluster.0: "manage_security" } - match: { role_with_description.description: "Allows all security-related operations such as CRUD operations on users and roles and cache clearing." } + + # query again for this last role + - do: + headers: + Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" + security.query_role: + body: > + { + "query": { "match_all": {} }, "sort": ["name"], "from": 2 + } + - match: { total: 3 } + - match: { count: 1 } + - match: { roles.0.name: "role_with_description" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/50_remote_only.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/50_remote_only.yml index cc60b68069195..1b5ce381319d2 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/50_remote_only.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/50_remote_only.yml @@ -71,6 +71,22 @@ teardown: - match: { remote_role.remote_cluster.0.clusters.0: "my_remote*" } - match: { remote_role.remote_cluster.0.clusters.1: "my_remote2*" } + # the query roles endpoint also shows the same role info + - do: + security.query_role: + body: > + {} + - match: { total: 1 } + - match: { count: 1 } + - match: { roles.0.name: "remote_role" } + - match: { roles.0.remote_indices.0.names.0: "logs*" } + - match: { roles.0.remote_indices.0.privileges.0: "read" } + - match: { roles.0.remote_indices.0.allow_restricted_indices: false } + - match: { roles.0.remote_indices.0.clusters.0: "*" } + - match: { roles.0.remote_cluster.0.privileges.0: "monitor_enrich" } + - match: { roles.0.remote_cluster.0.clusters.0: "my_remote*" } + - match: { roles.0.remote_cluster.0.clusters.1: "my_remote2*" } + - do: headers: Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java index c7c51a2a96c87..d7cab65df1ac9 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java @@ -33,6 +33,7 @@ public abstract class AbstractUpgradeTestCase extends ESRestTestCase { ); protected static final String UPGRADE_FROM_VERSION = System.getProperty("tests.upgrade_from_version"); + protected static final boolean FIRST_MIXED_ROUND = Boolean.parseBoolean(System.getProperty("tests.first_round", "false")); protected static final boolean SKIP_ML_TESTS = Booleans.parseBoolean(System.getProperty("tests.ml.skip", "false")); protected static boolean isOriginalCluster(String clusterVersion) { diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SecurityIndexRolesMetadataMigrationIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SecurityIndexRolesMetadataMigrationIT.java index 4b39f71dea1a9..d31130e970f03 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SecurityIndexRolesMetadataMigrationIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SecurityIndexRolesMetadataMigrationIT.java @@ -6,54 +6,70 @@ */ package org.elasticsearch.upgrades; +import org.elasticsearch.client.Node; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.RestClient; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.xpack.core.security.action.UpdateIndexMigrationVersionAction.MIGRATION_VERSION_CUSTOM_KEY; +import static org.elasticsearch.xpack.core.security.test.TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; public class SecurityIndexRolesMetadataMigrationIT extends AbstractUpgradeTestCase { - public void testMetadataMigratedAfterUpgrade() throws Exception { - String testRole = "test-role"; - String metaKey = "test_key"; - String metaValue = "test_value"; - - Map testMetadata = Map.of(metaKey, metaValue); + public void testRoleMigration() throws Exception { + String oldTestRole = "old-test-role"; + String mixed1TestRole = "mixed1-test-role"; + String mixed2TestRole = "mixed2-test-role"; + String upgradedTestRole = "upgraded-test-role"; if (CLUSTER_TYPE == ClusterType.OLD) { - createRole(testRole, testMetadata); - assertEntityInSecurityIndex(testRole); - } - if (CLUSTER_TYPE == ClusterType.UPGRADED) { - refreshSecurityIndex(); - waitForMigrationCompletion(); - assertEntityInSecurityIndex(testRole, metaKey, metaValue); - } - } - - public void testMetadataWrittenAfterUpgradeWithoutMigration() throws IOException { - String testRole = "another-test-role"; - String metaKey = "another-test_key"; - String metaValue = "another-test_value"; - - Map testMetadata = Map.of(metaKey, metaValue); - - if (CLUSTER_TYPE == ClusterType.UPGRADED) { - createRole(testRole, testMetadata); - assertEntityInSecurityIndex(testRole, metaKey, metaValue); + createRoleWithMetadata(oldTestRole, Map.of("meta", "test")); + assertDocInSecurityIndex(oldTestRole); + if (canRolesBeMigrated() == false) { + assertNoMigration(adminClient()); + assertCannotQueryRolesByMetadata(client()); + } + } else if (CLUSTER_TYPE == ClusterType.MIXED) { + if (FIRST_MIXED_ROUND) { + createRoleWithMetadata(mixed1TestRole, Map.of("meta", "test")); + assertDocInSecurityIndex(mixed1TestRole); + } else { + createRoleWithMetadata(mixed2TestRole, Map.of("meta", "test")); + assertDocInSecurityIndex(mixed2TestRole); + } + if (canRolesBeMigrated() == false) { + assertNoMigration(adminClient()); + assertCannotQueryRolesByMetadata(client()); + } + } else if (CLUSTER_TYPE == ClusterType.UPGRADED) { + createRoleWithMetadata(upgradedTestRole, Map.of("meta", "test")); + assertTrue(canRolesBeMigrated()); + waitForMigrationCompletion(adminClient()); + assertMigratedDocInSecurityIndex(oldTestRole, "meta", "test"); + assertMigratedDocInSecurityIndex(mixed1TestRole, "meta", "test"); + assertMigratedDocInSecurityIndex(mixed2TestRole, "meta", "test"); + assertMigratedDocInSecurityIndex(upgradedTestRole, "meta", "test"); + // queries all roles by metadata + assertAllRoles(client(), "mixed1-test-role", "mixed2-test-role", "old-test-role", "upgraded-test-role"); } } @SuppressWarnings("unchecked") - private void assertEntityInSecurityIndex(String roleName, String metaKey, String metaValue) throws IOException { + private void assertMigratedDocInSecurityIndex(String roleName, String metaKey, String metaValue) throws IOException { final Request request = new Request("POST", "/.security/_search"); RequestOptions.Builder options = request.getOptions().toBuilder(); request.setJsonEntity( @@ -79,7 +95,7 @@ private void assertEntityInSecurityIndex(String roleName, String metaKey, String } @SuppressWarnings("unchecked") - private void assertEntityInSecurityIndex(String id) throws IOException { + private void assertDocInSecurityIndex(String id) throws IOException { final Request request = new Request("POST", "/.security/_search"); RequestOptions.Builder options = request.getOptions().toBuilder(); request.setJsonEntity(String.format(Locale.ROOT, """ @@ -108,21 +124,36 @@ private void addExpectWarningOption(RequestOptions.Builder options) { } @SuppressWarnings("unchecked") - private void waitForMigrationCompletion() throws Exception { - final Request request = new Request("GET", "_cluster/state/metadata/.security-7"); + private static void assertNoMigration(RestClient adminClient) throws Exception { + Request request = new Request("GET", "_cluster/state/metadata/" + INTERNAL_SECURITY_MAIN_INDEX_7); + Response response = adminClient.performRequest(request); + assertOK(response); + Map responseMap = responseAsMap(response); + Map indicesMetadataMap = (Map) ((Map) responseMap.get("metadata")).get("indices"); + assertTrue(indicesMetadataMap.containsKey(INTERNAL_SECURITY_MAIN_INDEX_7)); + assertFalse( + ((Map) indicesMetadataMap.get(INTERNAL_SECURITY_MAIN_INDEX_7)).containsKey(MIGRATION_VERSION_CUSTOM_KEY) + ); + } + + @SuppressWarnings("unchecked") + private static void waitForMigrationCompletion(RestClient adminClient) throws Exception { + final Request request = new Request("GET", "_cluster/state/metadata/" + INTERNAL_SECURITY_MAIN_INDEX_7); assertBusy(() -> { - Response response = adminClient().performRequest(request); + Response response = adminClient.performRequest(request); assertOK(response); Map responseMap = responseAsMap(response); + Map indicesMetadataMap = (Map) ((Map) responseMap.get("metadata")).get( + "indices" + ); + assertTrue(indicesMetadataMap.containsKey(INTERNAL_SECURITY_MAIN_INDEX_7)); assertTrue( - ((Map) ((Map) ((Map) responseMap.get("metadata")).get("indices")).get( - ".security-7" - )).containsKey("migration_version") + ((Map) indicesMetadataMap.get(INTERNAL_SECURITY_MAIN_INDEX_7)).containsKey(MIGRATION_VERSION_CUSTOM_KEY) ); }); } - private void createRole(String roleName, Map metadata) throws IOException { + private void createRoleWithMetadata(String roleName, Map metadata) throws IOException { final Request request = new Request("POST", "/_security/role/" + roleName); BytesReference source = BytesReference.bytes( jsonBuilder().map( @@ -135,15 +166,56 @@ private void createRole(String roleName, Map metadata) throws IO ) ); request.setJsonEntity(source.utf8ToString()); - assertOK(adminClient().performRequest(request)); - refreshSecurityIndex(); + assertOK(client().performRequest(request)); } - private void refreshSecurityIndex() throws IOException { - Request request = new Request("POST", "/.security-7/_refresh"); - RequestOptions.Builder options = request.getOptions().toBuilder(); - addExpectWarningOption(options); - request.setOptions(options); - assertOK(adminClient().performRequest(request)); + private void assertCannotQueryRolesByMetadata(RestClient client) { + List originalNodes = client.getNodes(); + try { + // try the query on every node (upgraded or not) + for (Node node : originalNodes) { + client.setNodes(List.of(node)); + String metadataQuery = """ + {"query":{"exists":{"field":"metadata.test"}}}"""; + Request request = new Request(randomFrom("POST", "GET"), "/_security/_query/role"); + request.setJsonEntity(metadataQuery); + ResponseException e = expectThrows(ResponseException.class, () -> client.performRequest(request)); + if (e.getResponse().getStatusLine().getStatusCode() == 400) { + // this is an old node that doesn't know about the API + // note that 7.17 shows different error messages from "no handler found for uri" + } else if (e.getResponse().getStatusLine().getStatusCode() == 503) { + // this is an upgraded node, but migration does not work + assertThat(e.getMessage(), containsString("Cannot query or sort role metadata until automatic migration completed")); + } else { + fail(e, "Unexpected exception type"); + } + } + } finally { + client.setNodes(originalNodes); + } + } + + private void assertAllRoles(RestClient client, String... roleNames) throws IOException { + // this queries all roles by metadata + String metadataQuery = """ + {"query":{"bool":{"must":[{"exists":{"field":"metadata.meta"}}]}},"sort":["name"]}"""; + Request request = new Request(randomFrom("POST", "GET"), "/_security/_query/role"); + request.setJsonEntity(metadataQuery); + Response response = client.performRequest(request); + assertOK(response); + Map responseMap = responseAsMap(response); + assertThat(responseMap.get("total"), is(roleNames.length)); + assertThat(responseMap.get("count"), is(roleNames.length)); + @SuppressWarnings("unchecked") + List> roles = new ArrayList<>((List>) responseMap.get("roles")); + assertThat(roles.size(), is(responseMap.get("count"))); + for (int i = 0; i < roleNames.length; i++) { + assertThat(roles.get(i).get("name"), equalTo(roleNames[i])); + } + } + + private boolean canRolesBeMigrated() { + return clusterHasFeature("security.migration_framework") != false + && clusterHasFeature("security.roles_metadata_flattened") != false; } } From 5f91b7d8a5533b5966c009ecb91758aa02489ebf Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Tue, 2 Jul 2024 18:09:01 +0200 Subject: [PATCH 118/216] Implement lucene pushdown on ST_DISTANCE for Equality (#110348) * Reapply "Support ST_DISTANCE with EQ predicate" This reverts commit 1deb7b76ba799ab73d6851dc82e651f9ae4ce3a0. * Quantize points before calculating distance, and add more tests Curiously I was not able to reproduce any difference in behaviour between Lucene (pushdown) and ESQL distance calculation, even when focusing in on same data generated to be on the edge of the quantization boundaries and the haversinDistance boundary. --- .../xpack/esql/CsvTestsDataLoader.java | 4 +- .../src/main/resources/distances.csv | 118 ++++++++++++++++++ .../src/main/resources/mapping-distances.json | 10 ++ .../src/main/resources/spatial.csv-spec | 80 ++++++++++-- .../function/scalar/spatial/StDistance.java | 9 +- .../optimizer/LocalPhysicalPlanOptimizer.java | 46 +++---- .../optimizer/PhysicalPlanOptimizerTests.java | 48 ++++--- 7 files changed, 264 insertions(+), 51 deletions(-) create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/distances.csv create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-distances.json diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java index ec5770e8ce70b..530b2bc01b3d6 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java @@ -95,6 +95,7 @@ public class CsvTestsDataLoader { "mapping-cartesian_multipolygons.json", "cartesian_multipolygons.csv" ); + private static final TestsDataset DISTANCES = new TestsDataset("distances", "mapping-distances.json", "distances.csv"); private static final TestsDataset K8S = new TestsDataset("k8s", "k8s-mappings.json", "k8s.csv", "k8s-settings.json", true); @@ -119,7 +120,8 @@ public class CsvTestsDataLoader { Map.entry(COUNTRIES_BBOX_WEB.indexName, COUNTRIES_BBOX_WEB), Map.entry(AIRPORT_CITY_BOUNDARIES.indexName, AIRPORT_CITY_BOUNDARIES), Map.entry(CARTESIAN_MULTIPOLYGONS.indexName, CARTESIAN_MULTIPOLYGONS), - Map.entry(K8S.indexName, K8S) + Map.entry(K8S.indexName, K8S), + Map.entry(DISTANCES.indexName, DISTANCES) ); private static final EnrichConfig LANGUAGES_ENRICH = new EnrichConfig("languages_policy", "enrich-policy-languages.json"); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/distances.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/distances.csv new file mode 100644 index 0000000000000..3c21d41d7452b --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/distances.csv @@ -0,0 +1,118 @@ +distance:double,location:geo_point +0.2848062860101461,POINT (5.867332220077515E-7 2.514570951461792E-6) +0.2848062860101461,POINT (5.867332220077515E-7 2.514570951461792E-6) +0.2848062860101461,POINT (5.867332220077515E-7 2.514570951461792E-6) +0.30021218524180354,POINT (6.705522537231445E-7 2.514570951461792E-6) +0.30021218524180354,POINT (6.705522537231445E-7 2.514570951461792E-6) +0.30021218524180354,POINT (6.705522537231445E-7 2.514570951461792E-6) +0.2848062860101461,POINT (5.867332220077515E-7 2.514570951461792E-6) +0.2848062860101461,POINT (5.867332220077515E-7 2.514570951461792E-6) +0.2848062860101461,POINT (5.867332220077515E-7 2.514570951461792E-6) +0.30021218524180354,POINT (6.705522537231445E-7 2.514570951461792E-6) +0.30021218524180354,POINT (6.705522537231445E-7 2.514570951461792E-6) +0.30021218524180354,POINT (6.705522537231445E-7 2.514570951461792E-6) +0.26851794154977293,POINT (5.867332220077515E-7 2.4726614356040955E-6) +0.26851794154977293,POINT (5.867332220077515E-7 2.4726614356040955E-6) +0.26851794154977293,POINT (5.867332220077515E-7 2.4726614356040955E-6) +0.2848062860101461,POINT (6.705522537231445E-7 2.4726614356040955E-6) +0.2848062860101461,POINT (6.705522537231445E-7 2.4726614356040955E-6) +0.2848062860101461,POINT (6.705522537231445E-7 2.4726614356040955E-6) +0.2848062860101461,POINT (1.0058283805847168E-6 2.346932888031006E-6) +0.2848062860101461,POINT (1.0058283805847168E-6 2.346932888031006E-6) +0.2848062860101461,POINT (1.0058283805847168E-6 2.346932888031006E-6) +0.30021218524180354,POINT (1.0896474123001099E-6 2.346932888031006E-6) +0.30021218524180354,POINT (1.0896474123001099E-6 2.346932888031006E-6) +0.30021218524180354,POINT (1.0896474123001099E-6 2.346932888031006E-6) +0.2848062860101461,POINT (1.0058283805847168E-6 2.346932888031006E-6) +0.2848062860101461,POINT (1.0058283805847168E-6 2.346932888031006E-6) +0.2848062860101461,POINT (1.0058283805847168E-6 2.346932888031006E-6) +0.30021218524180354,POINT (1.0896474123001099E-6 2.346932888031006E-6) +0.30021218524180354,POINT (1.0896474123001099E-6 2.346932888031006E-6) +0.30021218524180354,POINT (1.0896474123001099E-6 2.346932888031006E-6) +0.2685179415497728,POINT (1.0058283805847168E-6 2.3050233721733093E-6) +0.2685179415497728,POINT (1.0058283805847168E-6 2.3050233721733093E-6) +0.2685179415497728,POINT (1.0058283805847168E-6 2.3050233721733093E-6) +0.2848062860101459,POINT (1.0896474123001099E-6 2.3050233721733093E-6) +0.2848062860101459,POINT (1.0896474123001099E-6 2.3050233721733093E-6) +0.2848062860101459,POINT (1.0896474123001099E-6 2.3050233721733093E-6) +0.2848062860101459,POINT (1.341104507446289E-6 2.1792948246002197E-6) +0.2848062860101459,POINT (1.341104507446289E-6 2.1792948246002197E-6) +0.2848062860101459,POINT (1.341104507446289E-6 2.1792948246002197E-6) +0.30021218524180354,POINT (1.4249235391616821E-6 2.1792948246002197E-6) +0.30021218524180354,POINT (1.4249235391616821E-6 2.1792948246002197E-6) +0.30021218524180354,POINT (1.4249235391616821E-6 2.1792948246002197E-6) +0.2848062860101459,POINT (1.341104507446289E-6 2.1792948246002197E-6) +0.2848062860101459,POINT (1.341104507446289E-6 2.1792948246002197E-6) +0.2848062860101459,POINT (1.341104507446289E-6 2.1792948246002197E-6) +0.30021218524180354,POINT (1.4249235391616821E-6 2.1792948246002197E-6) +0.30021218524180354,POINT (1.4249235391616821E-6 2.1792948246002197E-6) +0.30021218524180354,POINT (1.4249235391616821E-6 2.1792948246002197E-6) +0.2685179415497728,POINT (1.341104507446289E-6 2.137385308742523E-6) +0.2685179415497728,POINT (1.341104507446289E-6 2.137385308742523E-6) +0.2685179415497728,POINT (1.341104507446289E-6 2.137385308742523E-6) +0.2848062860101459,POINT (1.4249235391616821E-6 2.137385308742523E-6) +0.2848062860101459,POINT (1.4249235391616821E-6 2.137385308742523E-6) +0.2848062860101459,POINT (1.4249235391616821E-6 2.137385308742523E-6) +0.2848062860101459,POINT (1.5925616025924683E-6 2.0116567611694336E-6) +0.2848062860101459,POINT (1.5925616025924683E-6 2.0116567611694336E-6) +0.2848062860101459,POINT (1.5925616025924683E-6 2.0116567611694336E-6) +0.30021218524180354,POINT (1.6763806343078613E-6 2.0116567611694336E-6) +0.30021218524180354,POINT (1.6763806343078613E-6 2.0116567611694336E-6) +0.30021218524180354,POINT (1.6763806343078613E-6 2.0116567611694336E-6) +0.2848062860101459,POINT (1.5925616025924683E-6 2.0116567611694336E-6) +0.2848062860101459,POINT (1.5925616025924683E-6 2.0116567611694336E-6) +0.2848062860101459,POINT (1.5925616025924683E-6 2.0116567611694336E-6) +0.30021218524180354,POINT (1.6763806343078613E-6 2.0116567611694336E-6) +0.30021218524180354,POINT (1.6763806343078613E-6 2.0116567611694336E-6) +0.30021218524180354,POINT (1.6763806343078613E-6 2.0116567611694336E-6) +0.2685179415497728,POINT (1.5925616025924683E-6 1.969747245311737E-6) +0.2685179415497728,POINT (1.5925616025924683E-6 1.969747245311737E-6) +0.2685179415497728,POINT (1.5925616025924683E-6 1.969747245311737E-6) +0.2848062860101459,POINT (1.6763806343078613E-6 1.969747245311737E-6) +0.2848062860101459,POINT (1.6763806343078613E-6 1.969747245311737E-6) +0.2848062860101459,POINT (1.6763806343078613E-6 1.969747245311737E-6) +0.2848062860101459,POINT (1.7601996660232544E-6 1.8440186977386475E-6) +0.2848062860101459,POINT (1.7601996660232544E-6 1.8440186977386475E-6) +0.2848062860101459,POINT (1.7601996660232544E-6 1.8440186977386475E-6) +0.30021218524180354,POINT (1.8440186977386475E-6 1.8440186977386475E-6) +0.30021218524180354,POINT (1.8440186977386475E-6 1.8440186977386475E-6) +0.30021218524180354,POINT (1.8440186977386475E-6 1.8440186977386475E-6) +0.2848062860101459,POINT (1.7601996660232544E-6 1.8440186977386475E-6) +0.2848062860101459,POINT (1.7601996660232544E-6 1.8440186977386475E-6) +0.2848062860101459,POINT (1.7601996660232544E-6 1.8440186977386475E-6) +0.30021218524180354,POINT (1.8440186977386475E-6 1.8440186977386475E-6) +0.30021218524180354,POINT (1.8440186977386475E-6 1.8440186977386475E-6) +0.30021218524180354,POINT (1.8440186977386475E-6 1.8440186977386475E-6) +0.2685179415497728,POINT (1.7601996660232544E-6 1.802109181880951E-6) +0.2685179415497728,POINT (1.7601996660232544E-6 1.802109181880951E-6) +0.2685179415497728,POINT (1.7601996660232544E-6 1.802109181880951E-6) +0.2848062860101459,POINT (1.8440186977386475E-6 1.802109181880951E-6) +0.2848062860101459,POINT (1.8440186977386475E-6 1.802109181880951E-6) +0.2848062860101459,POINT (1.8440186977386475E-6 1.802109181880951E-6) +3.2597569375901188,POINT (2.3720785975456238E-5 -1.7224811017513275E-5) +3.2597569375901188,POINT (2.3720785975456238E-5 -1.7224811017513275E-5) +3.2597569375901188,POINT (2.3720785975456238E-5 -1.7224811017513275E-5) +3.2625206150153967,POINT (2.3720785975456238E-5 -1.726672053337097E-5) +3.2625206150153967,POINT (2.3720785975456238E-5 -1.726672053337097E-5) +3.2625206150153967,POINT (2.3720785975456238E-5 -1.726672053337097E-5) +3.2625206150153967,POINT (2.3720785975456238E-5 -1.726672053337097E-5) +3.2625206150153967,POINT (2.3720785975456238E-5 -1.726672053337097E-5) +3.2625206150153967,POINT (2.3720785975456238E-5 -1.726672053337097E-5) +3.2597569375901188,POINT (2.346932888031006E-5 -1.7560087144374847E-5) +3.2597569375901188,POINT (2.346932888031006E-5 -1.7560087144374847E-5) +3.2597569375901188,POINT (2.346932888031006E-5 -1.7560087144374847E-5) +3.2625206150153967,POINT (2.346932888031006E-5 -1.7601996660232544E-5) +3.2625206150153967,POINT (2.346932888031006E-5 -1.7601996660232544E-5) +3.2625206150153967,POINT (2.346932888031006E-5 -1.7601996660232544E-5) +3.2625206150153967,POINT (2.346932888031006E-5 -1.7601996660232544E-5) +3.2625206150153967,POINT (2.346932888031006E-5 -1.7601996660232544E-5) +3.2625206150153967,POINT (2.346932888031006E-5 -1.7601996660232544E-5) +3.258374219844941,POINT (2.2547319531440735E-5 -1.873355358839035E-5) +3.258374219844941,POINT (2.2547319531440735E-5 -1.873355358839035E-5) +3.258374219844941,POINT (2.2547319531440735E-5 -1.873355358839035E-5) +3.262520615015394,POINT (2.2547319531440735E-5 -1.8775463104248047E-5) +3.262520615015394,POINT (2.2547319531440735E-5 -1.8775463104248047E-5) +3.262520615015394,POINT (2.2547319531440735E-5 -1.8775463104248047E-5) +3.262520615015394,POINT (2.2547319531440735E-5 -1.8775463104248047E-5) +3.262520615015394,POINT (2.2547319531440735E-5 -1.8775463104248047E-5) +3.262520615015394,POINT (2.2547319531440735E-5 -1.8775463104248047E-5) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-distances.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-distances.json new file mode 100644 index 0000000000000..8e29f9e71f129 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-distances.json @@ -0,0 +1,10 @@ +{ + "properties": { + "distance": { + "type": "double" + }, + "location": { + "type": "geo_point" + } + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index aeb95da1ddaa0..018a22db1337a 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -883,10 +883,10 @@ ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] ; wkt:keyword | pt:geo_point | distance:double -"POINT(1 1)" | POINT(1 1) | 157249.59776811762 -"POINT(-1 -1)" | POINT(-1 -1) | 157249.59776811762 -"POINT(-1 1)" | POINT(-1 1) | 157249.59776811762 -"POINT(1 -1)" | POINT(1 -1) | 157249.59776811762 +"POINT(1 1)" | POINT(1 1) | 157249.5916907891 +"POINT(-1 -1)" | POINT(-1 -1) | 157249.6015756357 +"POINT(-1 1)" | POINT(-1 1) | 157249.5982806869 +"POINT(1 -1)" | POINT(1 -1) | 157249.59498573805 ; airportCityLocationPointDistance @@ -898,7 +898,7 @@ FROM airports ; distance:double | count:long -15869.987675939537 | 891 +15869.9876282387 | 891 ; airportDistanceToCityCopenhagen @@ -914,7 +914,7 @@ FROM airports // tag::st_distance-airports-result[] abbrev:k | name:text | location:geo_point | city_location:geo_point | distance:d -CPH | Copenhagen | POINT(12.6493508684508 55.6285017221528) | POINT(12.5683 55.6761) | 7339.57266575626 +CPH | Copenhagen | POINT(12.6493508684508 55.6285017221528) | POINT(12.5683 55.6761) | 7339.573896618216 // end::st_distance-airports-result[] ; @@ -1054,11 +1054,69 @@ FROM airports ; abbrev:k | name:text | location:geo_point | country:k | city:k | city_location:geo_point | distance:d -TRD | Trondheim Vaernes | POINT(10.9168095241445 63.472029381717) | Norway | Stjørdalshalsen | POINT(10.9189 63.4712) | 138.86728011324072 -DHA | King Abdulaziz AB | POINT(50.1477245727844 26.2703680854768) | Saudi Arabia | Dhahran | POINT(50.15 26.2667) | 466.7314410344158 -NDB | Nouadhibou Int'l | POINT(-17.0334398691538 20.9290523064387) | Mauritania | Nouadhibou | POINT(-17.0333 20.9333) | 472.545954400989 -ESE | Ensenada | POINT(-116.595724400418 31.7977139760569) | Mexico | Rodolfo Sánchez Taboada | POINT(-116.5911 31.7958) | 486.1044856437723 -INU | Nauru Int'l | POINT(166.91613965882 -0.545037226856384) | Nauru | Yaren | POINT(166.9209 -0.5477) | 606.4888777331985 +TRD | Trondheim Vaernes | POINT(10.9168095241445 63.472029381717) | Norway | Stjørdalshalsen | POINT(10.9189 63.4712) | 138.86985803478004 +DHA | King Abdulaziz AB | POINT(50.1477245727844 26.2703680854768) | Saudi Arabia | Dhahran | POINT(50.15 26.2667) | 466.7321285739462 +NDB | Nouadhibou Int'l | POINT(-17.0334398691538 20.9290523064387) | Mauritania | Nouadhibou | POINT(-17.0333 20.9333) | 472.54642026512636 +ESE | Ensenada | POINT(-116.595724400418 31.7977139760569) | Mexico | Rodolfo Sánchez Taboada | POINT(-116.5911 31.7958) | 486.1022373716486 +INU | Nauru Int'l | POINT(166.91613965882 -0.545037226856384) | Nauru | Yaren | POINT(166.9209 -0.5477) | 606.4899254580574 +; + +distancesNearQuantizationBoundary +required_capability: st_distance + +FROM distances +| EVAL d = ST_DISTANCE(location, TO_GEOPOINT("POINT(0 0)")) +| EVAL delta = ABS(distance - d) +| WHERE delta > 0 +| KEEP distance, d, delta, location +; + +distance:double | d:double | delta:double | location:geo_point +; + +distancesNearQuantizationBoundaryStats +required_capability: st_distance + +FROM distances +| EVAL d = ST_DISTANCE(location, TO_GEOPOINT("POINT(0 0)")) +| STATS count=COUNT(*) BY d +| SORT d ASC +; + +count:long | d:double +12 | 0.2685179415497728 +3 | 0.26851794154977293 +30 | 0.2848062860101459 +15 | 0.2848062860101461 +30 | 0.30021218524180354 +3 | 3.258374219844941 +6 | 3.2597569375901188 +6 | 3.262520615015394 +12 | 3.2625206150153967 +; + +distancesNearQuantizationBoundaryFilterStatsA +required_capability: st_distance + +FROM distances +| WHERE ST_DISTANCE(location, TO_GEOPOINT("POINT(0 0)")) == 0.2848062860101461 +| STATS count=COUNT(*) +; + +count:long +15 +; + +distancesNearQuantizationBoundaryFilterStatsB +required_capability: st_distance + +FROM distances +| WHERE ST_DISTANCE(location, TO_GEOPOINT("POINT(0 0)")) == 3.2625206150153967 +| STATS count=COUNT(*) +; + +count:long +12 ; ############################################### diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistance.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistance.java index 2e20fba74476b..1fdd4241aa222 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistance.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StDistance.java @@ -9,6 +9,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.SloppyMath; +import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.compute.ann.Evaluator; @@ -60,8 +61,12 @@ protected GeoDistanceCalculator() { @Override protected double distance(Point left, Point right) { - // TODO: investigate if we need to use the more complex validation in Lucenes Circle2D::HaversinDistance class - return SloppyMath.haversinMeters(left.getY(), left.getX(), right.getY(), right.getX()); + return SloppyMath.haversinMeters( + GeoUtils.quantizeLat(left.getY()), + GeoUtils.quantizeLon(left.getX()), + GeoUtils.quantizeLat(right.getY()), + GeoUtils.quantizeLon(right.getX()) + ); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java index 21c8ddb62bf86..d88b46cbbc530 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java @@ -625,15 +625,12 @@ protected PhysicalPlan rule(FilterExec filterExec, LocalPhysicalOptimizerContext } private boolean rewriteComparison(List rewritten, StDistance dist, Expression literal, ComparisonType comparisonType) { - // Currently we do not support Equals - if (comparisonType.lt || comparisonType.gt) { - Object value = literal.fold(); - if (value instanceof Number number) { - if (dist.right().foldable()) { - return rewriteDistanceFilter(rewritten, dist.source(), dist.left(), dist.right(), number, comparisonType); - } else if (dist.left().foldable()) { - return rewriteDistanceFilter(rewritten, dist.source(), dist.right(), dist.left(), number, comparisonType); - } + Object value = literal.fold(); + if (value instanceof Number number) { + if (dist.right().foldable()) { + return rewriteDistanceFilter(rewritten, dist.source(), dist.left(), dist.right(), number, comparisonType); + } else if (dist.left().foldable()) { + return rewriteDistanceFilter(rewritten, dist.source(), dist.right(), dist.left(), number, comparisonType); } } return false; @@ -642,30 +639,35 @@ private boolean rewriteComparison(List rewritten, StDistance dist, E private boolean rewriteDistanceFilter( List rewritten, Source source, - Expression spatialExpression, - Expression literalExpression, + Expression spatialExp, + Expression literalExp, Number number, ComparisonType comparisonType ) { - Geometry geometry = SpatialRelatesUtils.makeGeometryFromLiteral(literalExpression); + Geometry geometry = SpatialRelatesUtils.makeGeometryFromLiteral(literalExp); if (geometry instanceof Point point) { double distance = number.doubleValue(); - if (comparisonType.eq == false) { - distance = comparisonType.lt ? Math.nextDown(distance) : Math.nextUp(distance); + if (comparisonType.lt) { + distance = comparisonType.eq ? distance : Math.nextDown(distance); + rewritten.add(new SpatialIntersects(source, spatialExp, makeCircleLiteral(point, distance, literalExp))); + } else if (comparisonType.gt) { + distance = comparisonType.eq ? distance : Math.nextUp(distance); + rewritten.add(new SpatialDisjoint(source, spatialExp, makeCircleLiteral(point, distance, literalExp))); + } else if (comparisonType.eq) { + rewritten.add(new SpatialIntersects(source, spatialExp, makeCircleLiteral(point, distance, literalExp))); + rewritten.add(new SpatialDisjoint(source, spatialExp, makeCircleLiteral(point, Math.nextDown(distance), literalExp))); } - var circle = new Circle(point.getX(), point.getY(), distance); - var wkb = WellKnownBinary.toWKB(circle, ByteOrder.LITTLE_ENDIAN); - var cExp = new Literal(literalExpression.source(), new BytesRef(wkb), DataType.GEO_SHAPE); - rewritten.add( - comparisonType.lt - ? new SpatialIntersects(source, spatialExpression, cExp) - : new SpatialDisjoint(source, spatialExpression, cExp) - ); return true; } return false; } + private Literal makeCircleLiteral(Point point, double distance, Expression literalExpression) { + var circle = new Circle(point.getX(), point.getY(), distance); + var wkb = WellKnownBinary.toWKB(circle, ByteOrder.LITTLE_ENDIAN); + return new Literal(literalExpression.source(), new BytesRef(wkb), DataType.GEO_SHAPE); + } + /** * This enum captures the key differences between various inequalities as perceived from the spatial distance function. * In particular, we need to know which direction the inequality points, with lt=true meaning the left is expected to be smaller diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 51cc2483d73b7..dfd66657c653e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -3486,7 +3486,7 @@ public void testPushSpatialDistanceToSource() { "ST_DISTANCE(TO_GEOPOINT(\"POINT(12.565 55.673)\"), location)" }) { for (boolean reverse : new Boolean[] { false, true }) { - for (String op : new String[] { "<", "<=", ">", ">=" }) { + for (String op : new String[] { "<", "<=", ">", ">=", "==" }) { var expected = ExpectedComparison.from(op, reverse, 600000.0); var predicate = reverse ? "600000 " + op + " " + distanceFunction : distanceFunction + " " + op + " 600000"; var query = "FROM airports | WHERE " + predicate + " AND scalerank > 1"; @@ -3511,19 +3511,30 @@ public void testPushSpatialDistanceToSource() { var rangeQueryBuilders = bool.filter().stream().filter(p -> p instanceof SingleValueQuery.Builder).toList(); assertThat("Expected one range query builder", rangeQueryBuilders.size(), equalTo(1)); assertThat(((SingleValueQuery.Builder) rangeQueryBuilders.get(0)).field(), equalTo("scalerank")); - var shapeQueryBuilders = bool.filter() - .stream() - .filter(p -> p instanceof SpatialRelatesQuery.ShapeQueryBuilder) - .toList(); - assertThat("Expected one shape query builder", shapeQueryBuilders.size(), equalTo(1)); - var condition = as(shapeQueryBuilders.get(0), SpatialRelatesQuery.ShapeQueryBuilder.class); - assertThat("Geometry field name", condition.fieldName(), equalTo("location")); - assertThat("Spatial relationship", condition.relation(), equalTo(expected.shapeRelation())); - assertThat("Geometry is Circle", condition.shape().type(), equalTo(ShapeType.CIRCLE)); - var circle = as(condition.shape(), Circle.class); - assertThat("Circle center-x", circle.getX(), equalTo(12.565)); - assertThat("Circle center-y", circle.getY(), equalTo(55.673)); - assertThat("Circle radius for predicate " + predicate, circle.getRadiusMeters(), equalTo(expected.value)); + if (op.equals("==")) { + var boolQueryBuilders = bool.filter().stream().filter(p -> p instanceof BoolQueryBuilder).toList(); + assertThat("Expected one sub-bool query builder", boolQueryBuilders.size(), equalTo(1)); + var bool2 = as(boolQueryBuilders.get(0), BoolQueryBuilder.class); + var shapeQueryBuilders = bool2.must() + .stream() + .filter(p -> p instanceof SpatialRelatesQuery.ShapeQueryBuilder) + .toList(); + assertShapeQueryRange(shapeQueryBuilders, Math.nextDown(expected.value), expected.value); + } else { + var shapeQueryBuilders = bool.filter() + .stream() + .filter(p -> p instanceof SpatialRelatesQuery.ShapeQueryBuilder) + .toList(); + assertThat("Expected one shape query builder", shapeQueryBuilders.size(), equalTo(1)); + var condition = as(shapeQueryBuilders.get(0), SpatialRelatesQuery.ShapeQueryBuilder.class); + assertThat("Geometry field name", condition.fieldName(), equalTo("location")); + assertThat("Spatial relationship", condition.relation(), equalTo(expected.shapeRelation())); + assertThat("Geometry is Circle", condition.shape().type(), equalTo(ShapeType.CIRCLE)); + var circle = as(condition.shape(), Circle.class); + assertThat("Circle center-x", circle.getX(), equalTo(12.565)); + assertThat("Circle center-y", circle.getY(), equalTo(55.673)); + assertThat("Circle radius for predicate " + predicate, circle.getRadiusMeters(), equalTo(expected.value)); + } } } } @@ -3559,11 +3570,15 @@ AND ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) >= 400000 var rangeQueryBuilders = bool.filter().stream().filter(p -> p instanceof SingleValueQuery.Builder).toList(); assertThat("Expected zero range query builder", rangeQueryBuilders.size(), equalTo(0)); var shapeQueryBuilders = bool.must().stream().filter(p -> p instanceof SpatialRelatesQuery.ShapeQueryBuilder).toList(); + assertShapeQueryRange(shapeQueryBuilders, 400000.0, 600000.0); + } + + private void assertShapeQueryRange(List shapeQueryBuilders, double min, double max) { assertThat("Expected two shape query builders", shapeQueryBuilders.size(), equalTo(2)); var relationStats = new HashMap(); for (var builder : shapeQueryBuilders) { var condition = as(builder, SpatialRelatesQuery.ShapeQueryBuilder.class); - var expected = condition.relation() == ShapeRelation.INTERSECTS ? 600000.0 : 400000.0; + var expected = condition.relation() == ShapeRelation.INTERSECTS ? max : min; relationStats.compute(condition.relation(), (r, c) -> c == null ? 1 : c + 1); assertThat("Geometry field name", condition.fieldName(), equalTo("location")); assertThat("Geometry is Circle", condition.shape().type(), equalTo(ShapeType.CIRCLE)); @@ -3572,6 +3587,9 @@ AND ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) >= 400000 assertThat("Circle center-y", circle.getY(), equalTo(55.673)); assertThat("Circle radius for shape relation " + condition.relation(), circle.getRadiusMeters(), equalTo(expected)); } + assertThat("Expected one INTERSECTS and one DISJOINT", relationStats.size(), equalTo(2)); + assertThat("Expected one INTERSECTS", relationStats.get(ShapeRelation.INTERSECTS), equalTo(1)); + assertThat("Expected one DISJOINT", relationStats.get(ShapeRelation.DISJOINT), equalTo(1)); } private record ExpectedComparison(Class comp, double value) { From 1bbe74f5fbc615488d763f1cb3fa75884f977956 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Tue, 2 Jul 2024 12:29:15 -0400 Subject: [PATCH 119/216] Add auth and better tests to the ingest geoip http client (#110381) --- modules/ingest-geoip/build.gradle | 9 + .../ingest/geoip/HttpClient.java | 80 +++++-- .../ingest/geoip/HttpClientTests.java | 208 ++++++++++++++++++ 3 files changed, 278 insertions(+), 19 deletions(-) create mode 100644 modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/HttpClientTests.java diff --git a/modules/ingest-geoip/build.gradle b/modules/ingest-geoip/build.gradle index f755a27b478cc..5bdb6da5c7b29 100644 --- a/modules/ingest-geoip/build.gradle +++ b/modules/ingest-geoip/build.gradle @@ -64,6 +64,15 @@ tasks.named("internalClusterTest") { onlyIf("OS != windows") { OS.current() != OS.WINDOWS } } +tasks.named('forbiddenApisTest').configure { + //we are using jdk-internal instead of jdk-non-portable to allow for com.sun.net.httpserver.* usage + modifyBundledSignatures { bundledSignatures -> + bundledSignatures -= 'jdk-non-portable' + bundledSignatures += 'jdk-internal' + bundledSignatures + } +} + tasks.named("forbiddenPatterns").configure { exclude '**/*.mmdb' } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/HttpClient.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/HttpClient.java index 188d826b05ff5..8efc4dc2e74bd 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/HttpClient.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/HttpClient.java @@ -15,14 +15,16 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.rest.RestStatus; -import java.io.BufferedInputStream; import java.io.IOException; import java.io.InputStream; +import java.net.Authenticator; import java.net.HttpURLConnection; +import java.net.PasswordAuthentication; import java.net.URL; import java.security.AccessController; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; +import java.util.Objects; import static java.net.HttpURLConnection.HTTP_MOVED_PERM; import static java.net.HttpURLConnection.HTTP_MOVED_TEMP; @@ -32,49 +34,89 @@ class HttpClient { - byte[] getBytes(String url) throws IOException { - return get(url).readAllBytes(); + // a private sentinel value for representing the idea that there's no auth for some request. + // this allows us to have a not-null requirement on the methods that do accept an auth. + // if you don't want auth, then don't use those methods. ;) + private static final PasswordAuthentication NO_AUTH = new PasswordAuthentication("no_auth", "no_auth_unused".toCharArray()); + + PasswordAuthentication auth(final String username, final String password) { + return new PasswordAuthentication(username, password.toCharArray()); + } + + byte[] getBytes(final String url) throws IOException { + return getBytes(NO_AUTH, url); + } + + byte[] getBytes(final PasswordAuthentication auth, final String url) throws IOException { + return get(auth, url).readAllBytes(); + } + + InputStream get(final String url) throws IOException { + return get(NO_AUTH, url); } - InputStream get(String urlToGet) throws IOException { + InputStream get(final PasswordAuthentication auth, final String url) throws IOException { + Objects.requireNonNull(auth); + Objects.requireNonNull(url); + + final String originalAuthority = new URL(url).getAuthority(); + return doPrivileged(() -> { - String url = urlToGet; - HttpURLConnection conn = createConnection(url); + String innerUrl = url; + HttpURLConnection conn = createConnection(auth, innerUrl); int redirectsCount = 0; while (true) { switch (conn.getResponseCode()) { case HTTP_OK: - return new BufferedInputStream(getInputStream(conn)); + return getInputStream(conn); case HTTP_MOVED_PERM: case HTTP_MOVED_TEMP: case HTTP_SEE_OTHER: if (redirectsCount++ > 50) { - throw new IllegalStateException("too many redirects connection to [" + urlToGet + "]"); + throw new IllegalStateException("too many redirects connection to [" + url + "]"); + } + + // deal with redirections (including relative urls) + final String location = conn.getHeaderField("Location"); + final URL base = new URL(innerUrl); + final URL next = new URL(base, location); + innerUrl = next.toExternalForm(); + + // compare the *original* authority and the next authority to determine whether to include auth details. + // this means that the host and port (if it is provided explicitly) are considered. it also means that if we + // were to ping-pong back to the original authority, then we'd start including the auth details again. + final String nextAuthority = next.getAuthority(); + if (originalAuthority.equals(nextAuthority)) { + conn = createConnection(auth, innerUrl); + } else { + conn = createConnection(NO_AUTH, innerUrl); } - String location = conn.getHeaderField("Location"); - URL base = new URL(url); - URL next = new URL(base, location); // Deal with relative URLs - url = next.toExternalForm(); - conn = createConnection(url); break; case HTTP_NOT_FOUND: - throw new ResourceNotFoundException("{} not found", urlToGet); + throw new ResourceNotFoundException("{} not found", url); default: int responseCode = conn.getResponseCode(); - throw new ElasticsearchStatusException("error during downloading {}", RestStatus.fromCode(responseCode), urlToGet); + throw new ElasticsearchStatusException("error during downloading {}", RestStatus.fromCode(responseCode), url); } } }); } @SuppressForbidden(reason = "we need socket connection to download data from internet") - private static InputStream getInputStream(HttpURLConnection conn) throws IOException { + private static InputStream getInputStream(final HttpURLConnection conn) throws IOException { return conn.getInputStream(); } - private static HttpURLConnection createConnection(String url) throws IOException { - HttpURLConnection conn = (HttpURLConnection) new URL(url).openConnection(); + private static HttpURLConnection createConnection(final PasswordAuthentication auth, final String url) throws IOException { + final HttpURLConnection conn = (HttpURLConnection) new URL(url).openConnection(); + if (auth != NO_AUTH) { + conn.setAuthenticator(new Authenticator() { + protected PasswordAuthentication getPasswordAuthentication() { + return auth; + } + }); + } conn.setConnectTimeout(10000); conn.setReadTimeout(10000); conn.setDoOutput(false); @@ -82,7 +124,7 @@ private static HttpURLConnection createConnection(String url) throws IOException return conn; } - private static R doPrivileged(CheckedSupplier supplier) throws IOException { + private static R doPrivileged(final CheckedSupplier supplier) throws IOException { SpecialPermission.check(); try { return AccessController.doPrivileged((PrivilegedExceptionAction) supplier::get); diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/HttpClientTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/HttpClientTests.java new file mode 100644 index 0000000000000..a3f4395d74755 --- /dev/null +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/HttpClientTests.java @@ -0,0 +1,208 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.ingest.geoip; + +import com.sun.net.httpserver.BasicAuthenticator; +import com.sun.net.httpserver.HttpServer; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.test.ESTestCase; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import java.io.OutputStream; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.PasswordAuthentication; +import java.nio.charset.StandardCharsets; + +import static org.hamcrest.Matchers.equalTo; + +public class HttpClientTests extends ESTestCase { + + private static HttpServer server; + + @BeforeClass + public static void startServer() throws Throwable { + server = HttpServer.create(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); + server.createContext("/hello/", exchange -> { + try { + String response = "hello world"; + exchange.sendResponseHeaders(200, response.length()); + try (OutputStream os = exchange.getResponseBody()) { + os.write(response.getBytes(StandardCharsets.UTF_8)); + } + } catch (Exception e) { + fail(e); + } + }); + server.createContext("/404/", exchange -> { + try { + exchange.sendResponseHeaders(404, 0); + } catch (Exception e) { + fail(e); + } + }); + server.createContext("/auth/", exchange -> { + try { + String response = "super secret hello world"; + exchange.sendResponseHeaders(200, response.length()); + try (OutputStream os = exchange.getResponseBody()) { + os.write(response.getBytes(StandardCharsets.UTF_8)); + } + } catch (Exception e) { + fail(e); + } + }).setAuthenticator(new BasicAuthenticator("some realm") { + @Override + public boolean checkCredentials(String username, String password) { + return "user".equals(username) && "pass".equals(password); + } + }); + server.createContext("/redirect", exchange -> { + // path is either like this: /redirect/count/destination/ + // or just: /redirect + try { + final String path = exchange.getRequestURI().getPath(); + int count; + String destination; + if (path.lastIndexOf("/") > 0) { + // path is /redirect/count/destination/, so pull out the bits + String[] bits = path.split("/"); + count = Integer.parseInt(bits[2]); + destination = bits[3]; + } else { + // path is just /redirect + count = -1; + destination = "hello"; + } + + if (count == -1) { + // send a relative redirect, i.e. just "hello/" + exchange.getResponseHeaders().add("Location", destination + "/"); + } else if (count > 0) { + // decrement the count and send a redirect to either a full url ("http://...") + // or to an absolute url on this same server ("/...") + count--; + String location = "/redirect/" + count + "/" + destination + "/"; + if (count % 2 == 0) { + location = url(location); // do the full url + } + exchange.getResponseHeaders().add("Location", location); + } else { + // the count has hit zero, so ship them off to the destination + exchange.getResponseHeaders().add("Location", "/" + destination + "/"); + } + exchange.sendResponseHeaders(302, 0); + } catch (Exception e) { + fail(e); + } + }); + server.start(); + } + + @AfterClass + public static void stopServer() { + server.stop(0); + } + + private static String url(final String path) { + String hostname = server.getAddress().getHostString(); + int port = server.getAddress().getPort(); + return "http://" + hostname + ":" + port + path; + } + + private static String bytesToString(final byte[] bytes) { + return new String(bytes, StandardCharsets.UTF_8); + } + + public void testGetBytes() throws Exception { + HttpClient client = new HttpClient(); + String u = url("/hello/"); + String response = bytesToString(client.getBytes(u)); + assertThat(response, equalTo("hello world")); + } + + public void testGetBytes404() { + HttpClient client = new HttpClient(); + String u = url("/404/"); + Exception e = expectThrows(ResourceNotFoundException.class, () -> client.getBytes(u)); + assertThat(e.getMessage(), equalTo(u + " not found")); + } + + public void testRedirect() throws Exception { + HttpClient client = new HttpClient(); + String u = url("/redirect/3/hello/"); + String response = bytesToString(client.getBytes(u)); + assertThat(response, equalTo("hello world")); + } + + public void testRelativeRedirect() throws Exception { + HttpClient client = new HttpClient(); + String u = url("/redirect"); + String response = bytesToString(client.getBytes(u)); + assertThat(response, equalTo("hello world")); + } + + public void testRedirectTo404() { + HttpClient client = new HttpClient(); + String u = url("/redirect/5/404/"); + Exception e = expectThrows(ResourceNotFoundException.class, () -> client.getBytes(u)); + assertThat(e.getMessage(), equalTo(u + " not found")); + } + + public void testTooManyRedirects() { + HttpClient client = new HttpClient(); + String u = url("/redirect/100/hello/"); + Exception e = expectThrows(IllegalStateException.class, () -> client.getBytes(u)); + assertThat(e.getMessage(), equalTo("too many redirects connection to [" + u + "]")); + } + + public void testGetBytes401() { + HttpClient client = new HttpClient(); + String u = url("/auth/"); + { + Exception e = expectThrows(ElasticsearchStatusException.class, () -> client.getBytes(u)); + assertThat(e.getMessage(), equalTo("error during downloading " + u)); + } + { + PasswordAuthentication auth = client.auth("bad", "credentials"); + Exception e = expectThrows(ElasticsearchStatusException.class, () -> client.getBytes(auth, u)); + assertThat(e.getMessage(), equalTo("error during downloading " + u)); + } + } + + public void testGetBytesWithAuth() throws Exception { + HttpClient client = new HttpClient(); + String u = url("/auth/"); + PasswordAuthentication auth = client.auth("user", "pass"); + String response = bytesToString(client.getBytes(auth, u)); + assertThat(response, equalTo("super secret hello world")); + } + + public void testRedirectToAuth() throws Exception { + HttpClient client = new HttpClient(); + String u = url("/redirect/3/auth/"); + { + Exception e = expectThrows(ElasticsearchStatusException.class, () -> client.getBytes(u)); + assertThat(e.getMessage(), equalTo("error during downloading " + u)); + } + { + PasswordAuthentication auth = client.auth("bad", "credentials"); + Exception e = expectThrows(ElasticsearchStatusException.class, () -> client.getBytes(auth, u)); + assertThat(e.getMessage(), equalTo("error during downloading " + u)); + } + { + PasswordAuthentication auth = client.auth("user", "pass"); + String response = bytesToString(client.getBytes(auth, u)); + assertThat(response, equalTo("super secret hello world")); + } + } +} From 6dbf8d59e578c9cd4cea77b3f326aeaf9c49758b Mon Sep 17 00:00:00 2001 From: Volodymyr Krasnikov <129072588+volodk85@users.noreply.github.com> Date: Tue, 2 Jul 2024 10:00:03 -0700 Subject: [PATCH 120/216] Avoid possible flaky builds (#110301) * Segragate sys prop dependent tests by gradle tasks * Add dependency to gradle check task + style * Update server/src/test/java/org/elasticsearch/index/IndexSettingsOverrideTests.java Co-authored-by: Yang Wang --------- Co-authored-by: Yang Wang --- server/build.gradle | 12 +++++++++++ .../elasticsearch/index/IndexSettings.java | 8 +++---- .../index/IndexSettingsOverrideTests.java | 21 ++++++------------- 3 files changed, 21 insertions(+), 20 deletions(-) diff --git a/server/build.gradle b/server/build.gradle index 48a4febfc6cdf..e62abed2bc75a 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -148,6 +148,18 @@ if (BuildParams.isSnapshotBuild() == false) { tasks.named("test").configure { systemProperty 'es.insecure_network_trace_enabled', 'true' + excludes << '**/IndexSettingsOverrideTests.class' +} + +TaskProvider indexSettingsOverrideTest = tasks.register("indexSettingsOverrideTest", Test) { + include '**/IndexSettingsOverrideTests.class' + systemProperty 'es.stateless.allow.index.refresh_interval.override', 'true' + classpath = sourceSets.test.runtimeClasspath + testClassesDirs = sourceSets.test.output.classesDirs +} + +tasks.named("check").configure { + dependsOn(indexSettingsOverrideTest) } tasks.named("thirdPartyAudit").configure { diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettings.java b/server/src/main/java/org/elasticsearch/index/IndexSettings.java index 1e718fba0d08d..944d50f7ea06c 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.util.LazyInitializable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.ingest.IngestService; @@ -297,9 +296,8 @@ public final class IndexSettings { static class RefreshIntervalValidator implements Setting.Validator { static final String STATELESS_ALLOW_INDEX_REFRESH_INTERVAL_OVERRIDE = "es.stateless.allow.index.refresh_interval.override"; - - private LazyInitializable isOverrideAllowed = new LazyInitializable<>( - () -> Boolean.parseBoolean(System.getProperty(STATELESS_ALLOW_INDEX_REFRESH_INTERVAL_OVERRIDE, "false")) + private static final boolean IS_OVERRIDE_ALLOWED = Boolean.parseBoolean( + System.getProperty(STATELESS_ALLOW_INDEX_REFRESH_INTERVAL_OVERRIDE, "false") ); @Override @@ -317,7 +315,7 @@ public void validate(final TimeValue value, final Map, Object> settin && value.compareTo(STATELESS_MIN_NON_FAST_REFRESH_INTERVAL) < 0 && indexVersion.after(IndexVersions.V_8_10_0)) { - if (isOverrideAllowed.getOrCompute() == false) { + if (IS_OVERRIDE_ALLOWED == false) { throw new IllegalArgumentException( "index setting [" + IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey() diff --git a/server/src/test/java/org/elasticsearch/index/IndexSettingsOverrideTests.java b/server/src/test/java/org/elasticsearch/index/IndexSettingsOverrideTests.java index e4f87805f2c4f..307c770d22122 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexSettingsOverrideTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexSettingsOverrideTests.java @@ -10,17 +10,12 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESTestCase; -import org.junit.AfterClass; -import org.junit.BeforeClass; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_INDEX_VERSION_CREATED; import static org.elasticsearch.cluster.routing.allocation.ExistingShardsAllocator.EXISTING_SHARDS_ALLOCATOR_SETTING; -@ESTestCase.WithoutSecurityManager -@SuppressForbidden(reason = "manipulates system properties for testing") public class IndexSettingsOverrideTests extends ESTestCase { public static IndexMetadata newIndexMeta(String name, Settings indexSettings) { @@ -29,12 +24,13 @@ public static IndexMetadata newIndexMeta(String name, Settings indexSettings) { .build(); } - @BeforeClass - public static void setSystemProperty() { - System.setProperty(IndexSettings.RefreshIntervalValidator.STATELESS_ALLOW_INDEX_REFRESH_INTERVAL_OVERRIDE, "true"); - } - public void testStatelessMinRefreshIntervalOverride() { + assumeTrue( + "This test depends on system property configured in build.gradle", + Boolean.parseBoolean( + System.getProperty(IndexSettings.RefreshIntervalValidator.STATELESS_ALLOW_INDEX_REFRESH_INTERVAL_OVERRIDE, "false") + ) + ); IndexMetadata metadata = newIndexMeta( "index", Settings.builder() @@ -47,9 +43,4 @@ public void testStatelessMinRefreshIntervalOverride() { IndexSettings settings = new IndexSettings(metadata, Settings.EMPTY); assertEquals(TimeValue.timeValueSeconds(1), settings.getRefreshInterval()); } - - @AfterClass - public static void clearSystemProperty() { - System.clearProperty(IndexSettings.RefreshIntervalValidator.STATELESS_ALLOW_INDEX_REFRESH_INTERVAL_OVERRIDE); - } } From babd10b1426fc178cdf3028a067f0dbef6e495a8 Mon Sep 17 00:00:00 2001 From: James Baiera Date: Tue, 2 Jul 2024 13:01:44 -0400 Subject: [PATCH 121/216] Add an index setting that captures the version of the failure store definition (#110354) * Add an index setting that captures the version of the failure store definition * I wish my PRs would precommit-fix themselves --------- Co-authored-by: Elastic Machine --- .../rest-api-spec/test/data_stream/10_basic.yml | 4 +++- .../metadata/DataStreamFailureStoreDefinition.java | 9 +++++++++ .../common/settings/IndexScopedSettings.java | 2 ++ .../metadata/MetadataCreateDataStreamServiceTests.java | 6 ++++++ 4 files changed, 20 insertions(+), 1 deletion(-) diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml index 609b0c3d0c33c..35e3f38d55c26 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml @@ -211,7 +211,7 @@ setup: "Create data stream with failure store": - requires: cluster_features: ["gte_v8.15.0"] - reason: "data stream failure stores REST structure changed in 8.15+" + reason: "data stream failure stores default settings changed in 8.15+" - do: ingest.put_pipeline: @@ -368,6 +368,7 @@ setup: expand_wildcards: hidden - is_false: .$fsidx0name.settings.index.default_pipeline - is_false: .$fsidx0name.settings.index.final_pipeline + - is_true: .$fsidx0name.settings.index.failure_store.version - do: indices.get_settings: @@ -382,6 +383,7 @@ setup: expand_wildcards: hidden - is_false: .$fsidx1name.settings.index.default_pipeline - is_false: .$fsidx1name.settings.index.final_pipeline + - is_true: .$fsidx1name.settings.index.failure_store.version - do: indices.delete_data_stream: diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java index e4143f5fe4f35..0927c3d91a71f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreDefinition.java @@ -9,6 +9,7 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexSettings; @@ -27,6 +28,13 @@ public class DataStreamFailureStoreDefinition { public static final Settings DATA_STREAM_FAILURE_STORE_SETTINGS; public static final CompressedXContent DATA_STREAM_FAILURE_STORE_MAPPING; + public static final int FAILURE_STORE_DEFINITION_VERSION = 1; + public static final Setting FAILURE_STORE_DEFINITION_VERSION_SETTING = Setting.intSetting( + "index.failure_store.version", + 0, + Setting.Property.IndexScope + ); + static { DATA_STREAM_FAILURE_STORE_SETTINGS = Settings.builder() // Always start with the hidden settings for a backing index. @@ -36,6 +44,7 @@ public class DataStreamFailureStoreDefinition { // meant for the backing indices only. .putNull(IndexSettings.DEFAULT_PIPELINE.getKey()) .putNull(IndexSettings.FINAL_PIPELINE.getKey()) + .put(FAILURE_STORE_DEFINITION_VERSION_SETTING.getKey(), FAILURE_STORE_DEFINITION_VERSION) .build(); try { diff --git a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index f3eff9ae8838c..fe6616cb4fb8e 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -7,6 +7,7 @@ */ package org.elasticsearch.common.settings; +import org.elasticsearch.cluster.metadata.DataStreamFailureStoreDefinition; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.MetadataIndexStateService; import org.elasticsearch.cluster.routing.UnassignedInfo; @@ -179,6 +180,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings { IndexSettings.LIFECYCLE_PARSE_ORIGINATION_DATE_SETTING, IndexSettings.TIME_SERIES_ES87TSDB_CODEC_ENABLED_SETTING, IndexSettings.PREFER_ILM_SETTING, + DataStreamFailureStoreDefinition.FAILURE_STORE_DEFINITION_VERSION_SETTING, // validate that built-in similarities don't get redefined Setting.groupSetting("index.similarity.", (s) -> { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java index c900c3257a405..1766674ed42a1 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java @@ -255,6 +255,12 @@ public void testCreateDataStreamWithFailureStoreInitialized() throws Exception { assertThat(newState.metadata().index(backingIndexName).isSystem(), is(false)); assertThat(newState.metadata().index(failureStoreIndexName), notNullValue()); assertThat(newState.metadata().index(failureStoreIndexName).getSettings().get("index.hidden"), equalTo("true")); + assertThat( + DataStreamFailureStoreDefinition.FAILURE_STORE_DEFINITION_VERSION_SETTING.get( + newState.metadata().index(failureStoreIndexName).getSettings() + ), + equalTo(DataStreamFailureStoreDefinition.FAILURE_STORE_DEFINITION_VERSION) + ); assertThat(newState.metadata().index(failureStoreIndexName).isSystem(), is(false)); } From 7984b9b224d36ef108595300e3d9575b3195935c Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 2 Jul 2024 18:17:37 +0100 Subject: [PATCH 122/216] Tidy up `SecurityNetty4TransportCloseNotifyIT` (#110311) Some suggestions from a post-merge review of #109899: - Sometimes run on multi-node cluster. - Use `SAFE_AWAIT_TIMEOUT` for waits. - Remove unnecessary `assertBusy()` --- .../SecurityNetty4TransportCloseNotifyIT.java | 34 +++++++++---------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4TransportCloseNotifyIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4TransportCloseNotifyIT.java index 8a7bd0af817f7..b949d3ea8371a 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4TransportCloseNotifyIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4TransportCloseNotifyIT.java @@ -32,13 +32,10 @@ import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.SecurityIntegTestCase; import java.util.Collection; -import java.util.concurrent.ArrayBlockingQueue; -import java.util.concurrent.TimeUnit; +import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; @@ -46,7 +43,6 @@ import static org.elasticsearch.test.TaskAssertions.assertAllTasksHaveFinished; import static org.elasticsearch.test.rest.ESRestTestCase.basicAuthHeaderValue; -@ClusterScope(numDataNodes = 0, scope = Scope.TEST) public class SecurityNetty4TransportCloseNotifyIT extends SecurityIntegTestCase { @Override @@ -66,7 +62,7 @@ protected Collection> nodePlugins() { return CollectionUtils.appendToCopy(super.nodePlugins(), CancellableActionTestPlugin.class); } - Bootstrap setupNettyClient(String node, Consumer responseHandler) throws Exception { + private static Bootstrap setupNettyClient(String node, Consumer responseHandler) throws Exception { var sslCtx = SslContextBuilder.forClient().trustManager(InsecureTrustManagerFactory.INSTANCE).build(); var httpServer = internalCluster().getInstance(HttpServerTransport.class, node); var remoteAddr = randomFrom(httpServer.boundAddress().boundAddresses()); @@ -97,26 +93,30 @@ protected void channelRead0(ChannelHandlerContext ctx, FullHttpResponse msg) { * After an exchange client sends close_notify and expects the server to close connection. */ public void testSendCloseNotifyAfterHttpGetRequests() throws Exception { - var node = internalCluster().startNode(); - var serverRespQueue = new ArrayBlockingQueue(10); - var client = setupNettyClient(node, serverRespQueue::add); + final var nReq = randomIntBetween(0, 10); // nothing particular about number 10 + final var responsesReceivedLatch = new CountDownLatch(nReq); + final var client = setupNettyClient(internalCluster().getRandomNodeName(), response -> { + assertEquals(200, response.status().code()); + responsesReceivedLatch.countDown(); + }); try { var channel = client.connect().sync().channel(); // send some HTTP GET requests before closing a channel - var nReq = randomIntBetween(1, 10); // nothing particular about number 10 for (int i = 0; i < nReq; i++) { - var req = newHttpGetReq("/"); - channel.writeAndFlush(req).get(5, TimeUnit.SECONDS); + channel.write(newHttpGetReq("/")); + if (randomBoolean()) { + channel.flush(); + } } - assertBusy(() -> assertEquals(nReq, serverRespQueue.size())); - assertTrue(serverRespQueue.stream().allMatch(resp -> resp.status().code() == 200)); + channel.flush(); + safeAwait(responsesReceivedLatch); // send close_notify alert and wait for channel closure var sslHandler = channel.pipeline().get(SslHandler.class); sslHandler.closeOutbound(); try { - assertTrue("server must close connection", channel.closeFuture().await(5000)); + assertTrue("server must close connection", channel.closeFuture().await(SAFE_AWAIT_TIMEOUT.millis())); } finally { channel.close().sync(); } @@ -129,7 +129,7 @@ public void testSendCloseNotifyAfterHttpGetRequests() throws Exception { * Ensures that receiving close_notify will close connection and cancel running action. */ public void testSendCloseNotifyCancelAction() throws Exception { - var node = internalCluster().startNode(); + var node = internalCluster().getRandomNodeName(); var indexName = "close-notify-cancel"; createIndex(indexName); ensureGreen(indexName); @@ -143,7 +143,7 @@ public void testSendCloseNotifyCancelAction() throws Exception { var ssl = channel.pipeline().get(SslHandler.class); capturingAction.captureAndCancel(ssl::closeOutbound); try { - assertTrue("server must close connection", channel.closeFuture().await(5000)); + assertTrue("server must close connection", channel.closeFuture().await(SAFE_AWAIT_TIMEOUT.millis())); assertAllTasksHaveFinished(actionName); assertFalse("must cancel action before http response", gotResponse.get()); } finally { From 7b0cf00db4c45d371ee4f2869530429c3e0d7abb Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Tue, 2 Jul 2024 19:33:38 +0200 Subject: [PATCH 123/216] semantic_text: avoid nesting for multiple levels (#110344) --- .../index/mapper/DocumentParserContext.java | 3 ++- .../index/mapper/MapperBuilderContext.java | 17 ++++++++++---- .../index/mapper/NestedObjectMapper.java | 9 ++------ .../FieldAliasMapperValidationTests.java | 10 +++++++- .../mapper/MapperBuilderContextTests.java | 7 ++++++ .../index/mapper/NestedObjectMapperTests.java | 17 ++++++++++++++ .../index/mapper/ObjectMapperMergeTests.java | 6 +++-- .../mapper/SemanticTextFieldMapper.java | 3 ++- .../10_semantic_text_field_mapping.yml | 23 ++++++++++++++++++- 9 files changed, 77 insertions(+), 18 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java index 3afadffe2f0ca..d8fa2919b795f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java @@ -676,7 +676,8 @@ public final MapperBuilderContext createDynamicMapperBuilderContext() { false, containsDimensions, dynamic, - MergeReason.MAPPING_UPDATE + MergeReason.MAPPING_UPDATE, + false ); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperBuilderContext.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperBuilderContext.java index 5a35cfb11bbe0..ceb1749101d8c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperBuilderContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperBuilderContext.java @@ -27,7 +27,7 @@ public static MapperBuilderContext root(boolean isSourceSynthetic, boolean isDat } public static MapperBuilderContext root(boolean isSourceSynthetic, boolean isDataStream, MergeReason mergeReason) { - return new MapperBuilderContext(null, isSourceSynthetic, isDataStream, false, ObjectMapper.Defaults.DYNAMIC, mergeReason); + return new MapperBuilderContext(null, isSourceSynthetic, isDataStream, false, ObjectMapper.Defaults.DYNAMIC, mergeReason, false); } private final String path; @@ -36,6 +36,7 @@ public static MapperBuilderContext root(boolean isSourceSynthetic, boolean isDat private final boolean parentObjectContainsDimensions; private final ObjectMapper.Dynamic dynamic; private final MergeReason mergeReason; + private final boolean inNestedContext; MapperBuilderContext( String path, @@ -43,7 +44,8 @@ public static MapperBuilderContext root(boolean isSourceSynthetic, boolean isDat boolean isDataStream, boolean parentObjectContainsDimensions, ObjectMapper.Dynamic dynamic, - MergeReason mergeReason + MergeReason mergeReason, + boolean inNestedContext ) { Objects.requireNonNull(dynamic, "dynamic must not be null"); this.path = path; @@ -52,6 +54,7 @@ public static MapperBuilderContext root(boolean isSourceSynthetic, boolean isDat this.parentObjectContainsDimensions = parentObjectContainsDimensions; this.dynamic = dynamic; this.mergeReason = mergeReason; + this.inNestedContext = inNestedContext; } /** @@ -84,7 +87,8 @@ public MapperBuilderContext createChildContext( this.isDataStream, parentObjectContainsDimensions, getDynamic(dynamic), - this.mergeReason + this.mergeReason, + isInNestedContext() ); } @@ -135,7 +139,10 @@ public MergeReason getMergeReason() { return mergeReason; } - public boolean isNested() { - return false; + /** + * Returns true if this context is included in a nested context, either directly or any of its ancestors. + */ + public boolean isInNestedContext() { + return inNestedContext; } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java index 766fb2a8e574f..76212f9899f5c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java @@ -150,7 +150,7 @@ protected static void parseNested(String name, Map node, NestedO } } - private static class NestedMapperBuilderContext extends MapperBuilderContext { + static class NestedMapperBuilderContext extends MapperBuilderContext { final boolean parentIncludedInRoot; final Query nestedTypeFilter; @@ -164,7 +164,7 @@ private static class NestedMapperBuilderContext extends MapperBuilderContext { Dynamic dynamic, MapperService.MergeReason mergeReason ) { - super(path, isSourceSynthetic, isDataStream, parentObjectContainsDimensions, dynamic, mergeReason); + super(path, isSourceSynthetic, isDataStream, parentObjectContainsDimensions, dynamic, mergeReason, true); this.parentIncludedInRoot = parentIncludedInRoot; this.nestedTypeFilter = nestedTypeFilter; } @@ -182,11 +182,6 @@ public MapperBuilderContext createChildContext(String name, Dynamic dynamic) { getMergeReason() ); } - - @Override - public boolean isNested() { - return true; - } } private final Explicit includeInRoot; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java index d6b675ed0eb51..d913b86aed2d5 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java @@ -159,7 +159,15 @@ public void testFieldAliasWithDifferentNestedScopes() { private static FieldMapper createFieldMapper(String parent, String name) { return new BooleanFieldMapper.Builder(name, ScriptCompiler.NONE, false, IndexVersion.current()).build( - new MapperBuilderContext(parent, false, false, false, ObjectMapper.Defaults.DYNAMIC, MapperService.MergeReason.MAPPING_UPDATE) + new MapperBuilderContext( + parent, + false, + false, + false, + ObjectMapper.Defaults.DYNAMIC, + MapperService.MergeReason.MAPPING_UPDATE, + false + ) ); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperBuilderContextTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperBuilderContextTests.java index 8c9197b0f3173..24d070cbd0609 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperBuilderContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperBuilderContextTests.java @@ -27,4 +27,11 @@ public void testRootWithMergeReason() { assertEquals(mergeReason, root.getMergeReason()); } + public void tesIsInNestedContext() { + MapperBuilderContext root = MapperBuilderContext.root(true, false); + assertFalse(root.isInNestedContext()); + + MapperBuilderContext childContext = root.createChildContext("child", ObjectMapper.Dynamic.FALSE); + assertFalse(childContext.isInNestedContext()); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java index 4838fe58a8adb..306887099849b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java @@ -1825,4 +1825,21 @@ public void testNestedMapperMergeContextFromConstructor() { assertEquals(isDataStream, nestedBuilderContext.isDataStream()); assertEquals(parentContainsDimensions, nestedBuilderContext.parentObjectContainsDimensions()); } + + public void testIsInNestedContext() { + NestedObjectMapper.NestedMapperBuilderContext context = new NestedObjectMapper.NestedMapperBuilderContext( + "nested_path", + false, + false, + false, + null, + false, + Dynamic.FALSE, + MergeReason.INDEX_TEMPLATE + ); + assertTrue(context.isInNestedContext()); + + MapperBuilderContext childContext = context.createChildContext("child", false, Dynamic.FALSE); + assertTrue(childContext.isInNestedContext()); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java index da8d662c117c3..b3bb8cbe697a5 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java @@ -339,7 +339,8 @@ private static ObjectMapper.Builder createObjectSubobjectsFalseLeafWithDots() { false, false, ObjectMapper.Defaults.DYNAMIC, - MapperService.MergeReason.MAPPING_UPDATE + MapperService.MergeReason.MAPPING_UPDATE, + false ) ); assertEquals("host.name", fieldMapper.leafName()); @@ -358,7 +359,8 @@ private ObjectMapper.Builder createObjectSubobjectsFalseLeafWithMultiField() { false, false, ObjectMapper.Defaults.DYNAMIC, - MapperService.MergeReason.MAPPING_UPDATE + MapperService.MergeReason.MAPPING_UPDATE, + false ) ); assertEquals("host.name", textKeywordMultiField.leafName()); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index 81e3dc9d6adc7..b9b95afbf6dc6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -153,7 +153,8 @@ public SemanticTextFieldMapper build(MapperBuilderContext context) { throw new IllegalArgumentException(CONTENT_TYPE + " field [" + leafName() + "] does not support multi-fields"); } final String fullName = context.buildFullName(leafName()); - if (context.isNested()) { + + if (context.isInNestedContext()) { throw new IllegalArgumentException(CONTENT_TYPE + " field [" + fullName + "] cannot be nested"); } var childContext = context.createChildContext(leafName(), ObjectMapper.Dynamic.FALSE); diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml index d177ce08b3847..d7f7e21e6f428 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml @@ -157,7 +157,7 @@ setup: type: keyword --- -"Cannot be used as a nested field": +"Cannot be used directly as a nested field": - do: catch: /semantic_text field \[nested.semantic\] cannot be nested/ @@ -175,3 +175,24 @@ setup: another_field: type: keyword +--- +"Cannot be used as a nested field on nested objects": + + - do: + catch: /semantic_text field \[nested.nested_object.semantic\] cannot be nested/ + indices.create: + index: test-nested-index + body: + mappings: + properties: + nested: + type: nested + properties: + nested_object: + type: object + properties: + semantic: + type: semantic_text + inference_id: sparse-inference-id + another_field: + type: keyword From 7a1d532ffb08a86e644bc946d6d3f2dd7108904d Mon Sep 17 00:00:00 2001 From: Kathleen DeRusso Date: Tue, 2 Jul 2024 13:37:25 -0400 Subject: [PATCH 124/216] Pass over Sparse Vector docs for correctness (#110282) * Remove legacy mentions of text expansion queries * Add missing query_vector param to sparse_vector query docs * Fix formatting errors in sparse vector query dsl doc * Remove unnecessary test setup block --- .../ingest/search-nlp-tutorial.asciidoc | 1 - .../mapping/types/sparse-vector.asciidoc | 2 +- .../query-dsl/sparse-vector-query.asciidoc | 22 +++-- .../semantic-search-elser.asciidoc | 2 +- .../search-your-data/semantic-search.asciidoc | 88 ++++++++----------- 5 files changed, 52 insertions(+), 63 deletions(-) diff --git a/docs/reference/ingest/search-nlp-tutorial.asciidoc b/docs/reference/ingest/search-nlp-tutorial.asciidoc index d5eacb6951023..afdceeeb8bac2 100644 --- a/docs/reference/ingest/search-nlp-tutorial.asciidoc +++ b/docs/reference/ingest/search-nlp-tutorial.asciidoc @@ -256,4 +256,3 @@ In this guide, we covered how to: * {ml-docs}/ml-nlp-deploy-models.html[Deploying a model ML guide^] * {ml-docs}/ml-nlp-import-model.html#ml-nlp-authentication[Eland Authentication methods^] * <> -// * <> diff --git a/docs/reference/mapping/types/sparse-vector.asciidoc b/docs/reference/mapping/types/sparse-vector.asciidoc index a382753cb6ed3..d0c2c83b8a8fa 100644 --- a/docs/reference/mapping/types/sparse-vector.asciidoc +++ b/docs/reference/mapping/types/sparse-vector.asciidoc @@ -94,6 +94,6 @@ Negative values will be rejected. NOTE: `sparse_vector` fields do not support querying, sorting or aggregating. They may only be used within specialized queries. The recommended query to use on these fields are <> queries. -They may also be used within <> queries. +They may also be used within legacy <> queries. NOTE: `sparse_vector` fields only preserve 9 significant bits for the precision, which translates to a relative error of about 0.4%. diff --git a/docs/reference/query-dsl/sparse-vector-query.asciidoc b/docs/reference/query-dsl/sparse-vector-query.asciidoc index 9a269ad9712a8..80616ff174e36 100644 --- a/docs/reference/query-dsl/sparse-vector-query.asciidoc +++ b/docs/reference/query-dsl/sparse-vector-query.asciidoc @@ -57,27 +57,33 @@ GET _search [[sparse-vector-field-params]] === Top level parameters for `sparse_vector` -``::: -(Required, object) The name of the field that contains the token-weight pairs to be searched against. +`field`:: +(Required, string) The name of the field that contains the token-weight pairs to be searched against. -`inference_id`:::: +`inference_id`:: (Optional, string) The <> to use to convert the query text into token-weight pairs. It must be the same inference ID that was used to create the tokens from the input text. Only one of `inference_id` and `query_vector` is allowed. If `inference_id` is specified, `query` must also be specified. -`query`:::: +`query`:: (Optional, string) The query text you want to use for search. If `inference_id` is specified, `query` must also be specified. +If `query_vector` is specified, `query` must not be specified. -`prune` :::: +`query_vector`:: +(Optional, dictionary) A dictionary of token-weight pairs representing the precomputed query vector to search. +Searching using this query vector will bypass additional inference. +Only one of `inference_id` and `query_vector` is allowed. + +`prune` :: (Optional, boolean) preview:[] Whether to perform pruning, omitting the non-significant tokens from the query to improve query performance. If `prune` is true but the `pruning_config` is not specified, pruning will occur but default values will be used. Default: false. -`pruning_config` :::: +`pruning_config` :: (Optional, object) preview:[] Optional pruning configuration. @@ -86,7 +92,7 @@ This is only used if `prune` is set to `true`. If `prune` is set to `true` but `pruning_config` is not specified, default values will be used. + -- -Parameters for `` are: +Parameters for `pruning_config` are: `tokens_freq_ratio_threshold`:: (Optional, integer) @@ -285,3 +291,5 @@ GET my-index/_search //TEST[skip: Requires inference] NOTE: When performing <>, inference is performed on the local cluster. + + diff --git a/docs/reference/search/search-your-data/semantic-search-elser.asciidoc b/docs/reference/search/search-your-data/semantic-search-elser.asciidoc index bf700eb7b1ff4..11aec59a00b30 100644 --- a/docs/reference/search/search-your-data/semantic-search-elser.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search-elser.asciidoc @@ -325,7 +325,7 @@ PUT my-index [NOTE] ==== -Depending on your data, the text expansion query may be faster with `track_total_hits: false`. +Depending on your data, the `sparse_vector` query may be faster with `track_total_hits: false`. ==== [discrete] diff --git a/docs/reference/search/search-your-data/semantic-search.asciidoc b/docs/reference/search/search-your-data/semantic-search.asciidoc index 2d776077e13c5..fa84c3848b78c 100644 --- a/docs/reference/search/search-your-data/semantic-search.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search.asciidoc @@ -1,13 +1,12 @@ [[semantic-search]] == Semantic search -Semantic search is a search method that helps you find data based on the intent and contextual meaning of a search query, instead of a match on query terms -(lexical search). +Semantic search is a search method that helps you find data based on the intent and contextual meaning of a search query, instead of a match on query terms (lexical search). {es} provides various semantic search capabilities using {ml-docs}/ml-nlp.html[natural language processing (NLP)] and vector search. Using an NLP model enables you to extract text embeddings out of text. Embeddings are vectors that provide a numeric representation of a text. -Pieces of content with similar meaning have similar representations. +Pieces of content with similar meaning have similar representations. NLP models can be used in the {stack} various ways, you can: * deploy models in {es} @@ -29,44 +28,32 @@ IMPORTANT: For the easiest way to perform semantic search in the {stack}, refer [[semantic-search-select-nlp-model]] === Select an NLP model -{es} offers the usage of a -{ml-docs}/ml-nlp-model-ref.html#ml-nlp-model-ref-text-embedding[wide range of NLP models], -including both dense and sparse vector models. Your choice of the language model -is critical for implementing semantic search successfully. - -While it is possible to bring your own text embedding model, achieving good -search results through model tuning is challenging. Selecting an appropriate -model from our third-party model list is the first step. Training the model on -your own data is essential to ensure better search results than using only BM25. -However, the model training process requires a team of data scientists and ML -experts, making it expensive and time-consuming. - -To address this issue, Elastic provides a pre-trained representational model -called {ml-docs}/ml-nlp-elser.html[Elastic Learned Sparse EncodeR (ELSER)]. -ELSER, currently available only for English, is an out-of-domain sparse vector -model that does not require fine-tuning. This adaptability makes it suitable for -various NLP use cases out of the box. Unless you have a team of ML specialists, -it is highly recommended to use the ELSER model. - -In the case of sparse vector representation, the vectors mostly consist of zero -values, with only a small subset containing non-zero values. This representation -is commonly used for textual data. In the case of ELSER, each document in an -index and the query text itself are represented by high-dimensional sparse -vectors. Each non-zero element of the vector corresponds to a term in the model -vocabulary. The ELSER vocabulary contains around 30000 terms, so the sparse -vectors created by ELSER contain about 30000 values, the majority of which are -zero. Effectively the ELSER model is replacing the terms in the original query -with other terms that have been learnt to exist in the documents that best match -the original search terms in a training dataset, and weights to control how -important each is. +{es} offers the usage of a +{ml-docs}/ml-nlp-model-ref.html#ml-nlp-model-ref-text-embedding[wide range of NLP models], including both dense and sparse vector models. +Your choice of the language model is critical for implementing semantic search successfully. +While it is possible to bring your own text embedding model, achieving good search results through model tuning is challenging. +Selecting an appropriate model from our third-party model list is the first step. +Training the model on your own data is essential to ensure better search results than using only BM25. +However, the model training process requires a team of data scientists and ML experts, making it expensive and time-consuming. + +To address this issue, Elastic provides a pre-trained representational model called {ml-docs}/ml-nlp-elser.html[Elastic Learned Sparse EncodeR (ELSER)]. +ELSER, currently available only for English, is an out-of-domain sparse vector model that does not require fine-tuning. +This adaptability makes it suitable for various NLP use cases out of the box. +Unless you have a team of ML specialists, it is highly recommended to use the ELSER model. + +In the case of sparse vector representation, the vectors mostly consist of zero values, with only a small subset containing non-zero values. +This representation is commonly used for textual data. +In the case of ELSER, each document in an index and the query text itself are represented by high-dimensional sparse vectors. +Each non-zero element of the vector corresponds to a term in the model vocabulary. +The ELSER vocabulary contains around 30000 terms, so the sparse vectors created by ELSER contain about 30000 values, the majority of which are zero. +Effectively the ELSER model is replacing the terms in the original query with other terms that have been learnt to exist in the documents that best match the original search terms in a training dataset, and weights to control how important each is. [discrete] [[semantic-search-deploy-nlp-model]] === Deploy the model -After you decide which model you want to use for implementing semantic search, -you need to deploy the model in {es}. +After you decide which model you want to use for implementing semantic search, you need to deploy the model in {es}. include::{es-ref-dir}/tab-widgets/semantic-search/deploy-nlp-model-widget.asciidoc[] @@ -74,9 +61,8 @@ include::{es-ref-dir}/tab-widgets/semantic-search/deploy-nlp-model-widget.asciid [[semantic-search-field-mappings]] === Map a field for the text embeddings -Before you start using the deployed model to generate embeddings based on your -input text, you need to prepare your index mapping first. The mapping of the -index depends on the type of model. +Before you start using the deployed model to generate embeddings based on your input text, you need to prepare your index mapping first. +The mapping of the index depends on the type of model. include::{es-ref-dir}/tab-widgets/semantic-search/field-mappings-widget.asciidoc[] @@ -84,14 +70,12 @@ include::{es-ref-dir}/tab-widgets/semantic-search/field-mappings-widget.asciidoc [[semantic-search-generate-embeddings]] === Generate text embeddings -Once you have created the mappings for the index, you can generate text -embeddings from your input text. This can be done by using an -<> with an <>. -The ingest pipeline processes the input data and indexes it into the destination -index. At index time, the inference ingest processor uses the trained model to -infer against the data ingested through the pipeline. After you created the -ingest pipeline with the inference processor, you can ingest your data through -it to generate the model output. +Once you have created the mappings for the index, you can generate text embeddings from your input text. +This can be done by using an +<> with an <>. +The ingest pipeline processes the input data and indexes it into the destination index. +At index time, the inference ingest processor uses the trained model to infer against the data ingested through the pipeline. +After you created the ingest pipeline with the inference processor, you can ingest your data through it to generate the model output. include::{es-ref-dir}/tab-widgets/semantic-search/generate-embeddings-widget.asciidoc[] @@ -101,8 +85,7 @@ Now it is time to perform semantic search! [[semantic-search-search]] === Search the data -Depending on the type of model you have deployed, you can query rank features -with a text expansion query, or dense vectors with a kNN search. +Depending on the type of model you have deployed, you can query rank features with a <> query, or dense vectors with a kNN search. include::{es-ref-dir}/tab-widgets/semantic-search/search-widget.asciidoc[] @@ -110,13 +93,12 @@ include::{es-ref-dir}/tab-widgets/semantic-search/search-widget.asciidoc[] [[semantic-search-hybrid-search]] === Beyond semantic search with hybrid search -In some situations, lexical search may perform better than semantic search. For -example, when searching for single words or IDs, like product numbers. +In some situations, lexical search may perform better than semantic search. +For example, when searching for single words or IDs, like product numbers. Combining semantic and lexical search into one hybrid search request using -<> provides the best of both worlds. Not only that, -but hybrid search using reciprocal rank fusion {blog-ref}improving-information-retrieval-elastic-stack-hybrid[has been shown to perform better -in general]. +<> provides the best of both worlds. +Not only that, but hybrid search using reciprocal rank fusion {blog-ref}improving-information-retrieval-elastic-stack-hybrid[has been shown to perform better in general]. include::{es-ref-dir}/tab-widgets/semantic-search/hybrid-search-widget.asciidoc[] From 10462143772e8aa017a5865e8795c0f8dfa41a96 Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Tue, 2 Jul 2024 19:38:27 +0200 Subject: [PATCH 125/216] Fix https://github.com/elastic/elasticsearch/issues/110212 (#110392) Closes https://github.com/elastic/elasticsearch/issues/110212 Fixes ArrayOutOfBounds when choosing a random element from the created synonyms. --- muted-tests.yml | 3 --- .../elasticsearch/synonyms/SynonymsManagementAPIServiceIT.java | 2 +- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index f91aa7c7173ec..376ab0164c314 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -68,9 +68,6 @@ tests: - class: org.elasticsearch.index.store.FsDirectoryFactoryTests method: testPreload issue: https://github.com/elastic/elasticsearch/issues/110211 -- class: org.elasticsearch.synonyms.SynonymsManagementAPIServiceIT - method: testUpdateRuleWithMaxSynonyms - issue: https://github.com/elastic/elasticsearch/issues/110212 - class: "org.elasticsearch.rest.RestControllerIT" issue: "https://github.com/elastic/elasticsearch/issues/110225" - class: "org.elasticsearch.xpack.security.authz.store.NativePrivilegeStoreCacheTests" diff --git a/server/src/internalClusterTest/java/org/elasticsearch/synonyms/SynonymsManagementAPIServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/synonyms/SynonymsManagementAPIServiceIT.java index b12869b767fd8..f6392954912ac 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/synonyms/SynonymsManagementAPIServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/synonyms/SynonymsManagementAPIServiceIT.java @@ -194,7 +194,7 @@ public void onResponse(SynonymsManagementAPIService.SynonymsReloadResult synonym // Updating a rule fails synonymsManagementAPIService.putSynonymRule( synonymSetId, - synonymsSet[randomIntBetween(0, maxSynonymSets)], + synonymsSet[randomIntBetween(0, maxSynonymSets - 1)], new ActionListener<>() { @Override public void onResponse(SynonymsManagementAPIService.SynonymsReloadResult synonymsReloadResult) { From 6312bd8139a514a82d2d030066f82381686b2bf6 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 2 Jul 2024 19:45:27 +0200 Subject: [PATCH 126/216] Stop using ReleasableLock in Translog (#107600) As discussed in #107555 there's some overhead to the use of `ReleasableLock` and it's probably not a good idea using it on the hot(ish) path in this class. --- .../index/translog/Translog.java | 209 ++++++++++++------ .../index/translog/TranslogTests.java | 6 +- 2 files changed, 145 insertions(+), 70 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/translog/Translog.java b/server/src/main/java/org/elasticsearch/index/translog/Translog.java index fb0f1ec4b4a51..a079a852021bd 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/server/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -21,7 +21,6 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.concurrent.ReleasableLock; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; @@ -52,7 +51,7 @@ import java.util.Objects; import java.util.OptionalLong; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.function.LongConsumer; import java.util.function.LongSupplier; @@ -109,8 +108,8 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC private final List readers = new ArrayList<>(); private final BigArrays bigArrays; private final DiskIoBufferPool diskIoBufferPool; - protected final ReleasableLock readLock; - protected final ReleasableLock writeLock; + protected final Lock readLock; + protected final Lock writeLock; private final Path location; private TranslogWriter current; @@ -162,9 +161,9 @@ public Translog( this.translogUUID = translogUUID; this.bigArrays = config.getBigArrays(); this.diskIoBufferPool = config.getDiskIoBufferPool(); - ReadWriteLock rwl = new ReentrantReadWriteLock(); - this.readLock = new ReleasableLock(rwl.readLock()); - this.writeLock = new ReleasableLock(rwl.writeLock()); + var rwl = new ReentrantReadWriteLock(); + this.readLock = rwl.readLock(); + this.writeLock = rwl.writeLock(); this.location = config.getTranslogPath(); Files.createDirectories(this.location); @@ -224,7 +223,8 @@ public Translog( private ArrayList recoverFromFiles(Checkpoint checkpoint) throws IOException { boolean success = false; ArrayList foundTranslogs = new ArrayList<>(); - try (ReleasableLock ignored = writeLock.acquire()) { + writeLock.lock(); + try { logger.debug("open uncommitted translog checkpoint {}", checkpoint); final long minGenerationToRecoverFrom = checkpoint.minTranslogGeneration; @@ -295,6 +295,7 @@ private ArrayList recoverFromFiles(Checkpoint checkpoint) throws if (success == false) { IOUtils.closeWhileHandlingException(foundTranslogs); } + writeLock.unlock(); } return foundTranslogs; } @@ -384,13 +385,15 @@ public void close() throws IOException { assert calledFromOutsideOrViaTragedyClose() : "Translog.close method is called from inside Translog, but not via closeOnTragicEvent method"; if (closed.compareAndSet(false, true)) { - try (ReleasableLock lock = writeLock.acquire()) { + writeLock.lock(); + try { try { current.sync(); } finally { closeFilesIfNoPendingRetentionLocks(); } } finally { + writeLock.unlock(); logger.debug("translog closed"); } } @@ -409,8 +412,11 @@ public Path location() { * Returns the generation of the current transaction log. */ public long currentFileGeneration() { - try (ReleasableLock ignored = readLock.acquire()) { + readLock.lock(); + try { return current.getGeneration(); + } finally { + readLock.unlock(); } } @@ -418,7 +424,8 @@ public long currentFileGeneration() { * Returns the minimum file generation referenced by the translog */ public long getMinFileGeneration() { - try (ReleasableLock ignored = readLock.acquire()) { + readLock.lock(); + try { if (readers.isEmpty()) { return current.getGeneration(); } else { @@ -426,6 +433,8 @@ public long getMinFileGeneration() { : "the first translog isn't the one with the minimum generation:" + readers; return readers.get(0).getGeneration(); } + } finally { + readLock.unlock(); } } @@ -444,11 +453,14 @@ public long sizeInBytes() { } long earliestLastModifiedAge() { - try (ReleasableLock ignored = readLock.acquire()) { + readLock.lock(); + try { ensureOpen(); return findEarliestLastModifiedAge(System.currentTimeMillis(), readers, current); } catch (IOException e) { throw new TranslogException(shardId, "Unable to get the earliest last modified time for the transaction log"); + } finally { + readLock.unlock(); } } @@ -467,12 +479,15 @@ static long findEarliestLastModifiedAge(long currentTime, Iterable r.getGeneration() >= minGeneration) .mapToInt(BaseTranslogReader::totalOperations) .sum(); + } finally { + readLock.unlock(); } } @@ -480,9 +495,15 @@ public int totalOperationsByMinGen(long minGeneration) { * Returns the number of operations in the transaction files that contain operations with seq# above the given number. */ public int estimateTotalOperationsFromMinSeq(long minSeqNo) { - try (ReleasableLock ignored = readLock.acquire()) { + readLock.lock(); + try { ensureOpen(); - return readersAboveMinSeqNo(minSeqNo).mapToInt(BaseTranslogReader::totalOperations).sum(); + return Stream.concat(readers.stream(), Stream.of(current)) + .filter(reader -> minSeqNo <= reader.getCheckpoint().maxEffectiveSeqNo()) + .mapToInt(BaseTranslogReader::totalOperations) + .sum(); + } finally { + readLock.unlock(); } } @@ -490,12 +511,15 @@ public int estimateTotalOperationsFromMinSeq(long minSeqNo) { * Returns the size in bytes of the translog files at least the given generation */ public long sizeInBytesByMinGen(long minGeneration) { - try (ReleasableLock ignored = readLock.acquire()) { + readLock.lock(); + try { ensureOpen(); return Stream.concat(readers.stream(), Stream.of(current)) .filter(r -> r.getGeneration() >= minGeneration) .mapToLong(BaseTranslogReader::sizeInBytes) .sum(); + } finally { + readLock.unlock(); } } @@ -576,7 +600,8 @@ public Location add(final Operation operation) throws IOException { try (ReleasableBytesStreamOutput out = new ReleasableBytesStreamOutput(bigArrays)) { writeOperationWithSize(out, operation); final BytesReference bytes = out.bytes(); - try (ReleasableLock ignored = readLock.acquire()) { + readLock.lock(); + try { ensureOpen(); if (operation.primaryTerm() > current.getPrimaryTerm()) { assert false @@ -596,6 +621,8 @@ public Location add(final Operation operation) throws IOException { ); } return current.add(bytes, operation.seqNo()); + } finally { + readLock.unlock(); } } catch (final AlreadyClosedException | IOException ex) { closeOnTragicEvent(ex); @@ -615,8 +642,11 @@ public Location add(final Operation operation) throws IOException { */ public boolean shouldRollGeneration() { final long threshold = this.indexSettings.getGenerationThresholdSize().getBytes(); - try (ReleasableLock ignored = readLock.acquire()) { + readLock.lock(); + try { return this.current.sizeInBytes() > threshold; + } finally { + readLock.unlock(); } } @@ -625,13 +655,16 @@ public boolean shouldRollGeneration() { * can be returned by the next write. */ public Location getLastWriteLocation() { - try (ReleasableLock lock = readLock.acquire()) { + readLock.lock(); + try { /* * We use position = current - 1 and size = Integer.MAX_VALUE here instead of position current and size = 0 for two reasons: * 1. Translog.Location's compareTo doesn't actually pay attention to size even though it's equals method does. * 2. It feels more right to return a *position* that is before the next write's position rather than rely on the size. */ return new Location(current.generation, current.sizeInBytes() - 1, Integer.MAX_VALUE); + } finally { + readLock.unlock(); } } @@ -645,8 +678,11 @@ public long getLastSyncedGlobalCheckpoint() { } final Checkpoint getLastSyncedCheckpoint() { - try (ReleasableLock ignored = readLock.acquire()) { + readLock.lock(); + try { return current.getLastSyncedCheckpoint(); + } finally { + readLock.unlock(); } } @@ -665,7 +701,8 @@ public Snapshot newSnapshot() throws IOException { public Snapshot newSnapshot(long fromSeqNo, long toSeqNo) throws IOException { assert fromSeqNo <= toSeqNo : fromSeqNo + " > " + toSeqNo; assert fromSeqNo >= 0 : "from_seq_no must be non-negative " + fromSeqNo; - try (ReleasableLock ignored = readLock.acquire()) { + readLock.lock(); + try { ensureOpen(); TranslogSnapshot[] snapshots = Stream.concat(readers.stream(), Stream.of(current)) .filter(reader -> reader.getCheckpoint().minSeqNo <= toSeqNo && fromSeqNo <= reader.getCheckpoint().maxEffectiveSeqNo()) @@ -673,6 +710,8 @@ public Snapshot newSnapshot(long fromSeqNo, long toSeqNo) throws IOException { .toArray(TranslogSnapshot[]::new); final Snapshot snapshot = newMultiSnapshot(snapshots); return new SeqNoFilterSnapshot(snapshot, fromSeqNo, toSeqNo); + } finally { + readLock.unlock(); } } @@ -681,23 +720,28 @@ public Snapshot newSnapshot(long fromSeqNo, long toSeqNo) throws IOException { * this method will return null. */ public Operation readOperation(Location location) throws IOException { - try (ReleasableLock ignored = readLock.acquire()) { - ensureOpen(); - if (location.generation < getMinFileGeneration()) { - return null; - } - if (current.generation == location.generation) { - // no need to fsync here the read operation will ensure that buffers are written to disk - // if they are still in RAM and we are reading onto that position - return current.read(location); - } else { - // read backwards - it's likely we need to read on that is recent - for (int i = readers.size() - 1; i >= 0; i--) { - TranslogReader translogReader = readers.get(i); - if (translogReader.generation == location.generation) { - return translogReader.read(location); + try { + readLock.lock(); + try { + ensureOpen(); + if (location.generation < getMinFileGeneration()) { + return null; + } + if (current.generation == location.generation) { + // no need to fsync here the read operation will ensure that buffers are written to disk + // if they are still in RAM and we are reading onto that position + return current.read(location); + } else { + // read backwards - it's likely we need to read on that is recent + for (int i = readers.size() - 1; i >= 0; i--) { + TranslogReader translogReader = readers.get(i); + if (translogReader.generation == location.generation) { + return translogReader.read(location); + } } } + } finally { + readLock.unlock(); } } catch (final Exception ex) { closeOnTragicEvent(ex); @@ -727,24 +771,17 @@ private Snapshot newMultiSnapshot(TranslogSnapshot[] snapshots) throws IOExcepti } } - private Stream readersAboveMinSeqNo(long minSeqNo) { - assert readLock.isHeldByCurrentThread() || writeLock.isHeldByCurrentThread() - : "callers of readersAboveMinSeqNo must hold a lock: readLock [" - + readLock.isHeldByCurrentThread() - + "], writeLock [" - + readLock.isHeldByCurrentThread() - + "]"; - return Stream.concat(readers.stream(), Stream.of(current)).filter(reader -> minSeqNo <= reader.getCheckpoint().maxEffectiveSeqNo()); - } - /** * Acquires a lock on the translog files, preventing them from being trimmed */ public Closeable acquireRetentionLock() { - try (ReleasableLock lock = readLock.acquire()) { + readLock.lock(); + try { ensureOpen(); final long viewGen = getMinFileGeneration(); return acquireTranslogGenFromDeletionPolicy(viewGen); + } finally { + readLock.unlock(); } } @@ -764,9 +801,14 @@ private Closeable acquireTranslogGenFromDeletionPolicy(long viewGen) { * Sync's the translog. */ public void sync() throws IOException { - try (ReleasableLock lock = readLock.acquire()) { - if (closed.get() == false) { - current.sync(); + try { + readLock.lock(); + try { + if (closed.get() == false) { + current.sync(); + } + } finally { + readLock.unlock(); } } catch (final Exception ex) { closeOnTragicEvent(ex); @@ -778,8 +820,11 @@ public void sync() throws IOException { * Returns true if an fsync is required to ensure durability of the translogs operations or it's metadata. */ public boolean syncNeeded() { - try (ReleasableLock lock = readLock.acquire()) { + readLock.lock(); + try { return current.syncNeeded(); + } finally { + readLock.unlock(); } } @@ -799,7 +844,8 @@ static String getCommitCheckpointFileName(long generation) { public void trimOperations(long belowTerm, long aboveSeqNo) throws IOException { assert aboveSeqNo >= SequenceNumbers.NO_OPS_PERFORMED : "aboveSeqNo has to a valid sequence number"; - try (ReleasableLock lock = writeLock.acquire()) { + writeLock.lock(); + try { ensureOpen(); if (current.getPrimaryTerm() < belowTerm) { throw new IllegalArgumentException( @@ -831,6 +877,8 @@ public void trimOperations(long belowTerm, long aboveSeqNo) throws IOException { this.readers.clear(); this.readers.addAll(newReaders); + } finally { + writeLock.unlock(); } } @@ -840,13 +888,19 @@ public void trimOperations(long belowTerm, long aboveSeqNo) throws IOException { * @return Returns true iff this call caused an actual sync operation otherwise false */ public boolean ensureSynced(Location location, long globalCheckpoint) throws IOException { - try (ReleasableLock lock = readLock.acquire()) { - // if we have a new generation and the persisted global checkpoint is greater than or equal to the sync global checkpoint it's - // already synced - long persistedGlobalCheckpoint = current.getLastSyncedCheckpoint().globalCheckpoint; - if (location.generation == current.getGeneration() || persistedGlobalCheckpoint < globalCheckpoint) { - ensureOpen(); - return current.syncUpTo(location.translogLocation + location.size, globalCheckpoint); + try { + readLock.lock(); + try { + // if we have a new generation and the persisted global checkpoint is greater than or equal to the sync global checkpoint + // it's + // already synced + long persistedGlobalCheckpoint = current.getLastSyncedCheckpoint().globalCheckpoint; + if (location.generation == current.getGeneration() || persistedGlobalCheckpoint < globalCheckpoint) { + ensureOpen(); + return current.syncUpTo(location.translogLocation + location.size, globalCheckpoint); + } + } finally { + readLock.unlock(); } } catch (final Exception ex) { closeOnTragicEvent(ex); @@ -865,7 +919,6 @@ public boolean ensureSynced(Location location, long globalCheckpoint) throws IOE */ protected void closeOnTragicEvent(final Exception ex) { // we can not hold a read lock here because closing will attempt to obtain a write lock and that would result in self-deadlock - assert readLock.isHeldByCurrentThread() == false : Thread.currentThread().getName(); if (tragedy.get() != null) { try { close(); @@ -887,7 +940,8 @@ protected void closeOnTragicEvent(final Exception ex) { */ public TranslogStats stats() { // acquire lock to make the two numbers roughly consistent (no file change half way) - try (ReleasableLock lock = readLock.acquire()) { + readLock.lock(); + try { final long uncommittedGen = minGenerationForSeqNo(deletionPolicy.getLocalCheckpointOfSafeCommit() + 1, current, readers); return new TranslogStats( totalOperations(), @@ -896,6 +950,8 @@ public TranslogStats stats() { sizeInBytesByMinGen(uncommittedGen), earliestLastModifiedAge() ); + } finally { + readLock.unlock(); } } @@ -1602,8 +1658,11 @@ public static void writeOperationWithSize(BytesStreamOutput out, Translog.Operat * @return the minimum generation for the sequence number */ public TranslogGeneration getMinGenerationForSeqNo(final long seqNo) { - try (ReleasableLock ignored = readLock.acquire()) { + readLock.lock(); + try { return new TranslogGeneration(translogUUID, minGenerationForSeqNo(seqNo, current, readers)); + } finally { + readLock.unlock(); } } @@ -1627,7 +1686,8 @@ public void rollGeneration() throws IOException { if (current.totalOperations() == 0 && primaryTermSupplier.getAsLong() == current.getPrimaryTerm()) { return; } - try (Releasable ignored = writeLock.acquire()) { + writeLock.lock(); + try { ensureOpen(); try { final TranslogReader reader = current.closeIntoReader(); @@ -1642,6 +1702,8 @@ public void rollGeneration() throws IOException { closeOnTragicEvent(e); throw e; } + } finally { + writeLock.unlock(); } } @@ -1657,7 +1719,8 @@ void syncBeforeRollGeneration() throws IOException { */ public void trimUnreferencedReaders() throws IOException { // first check under read lock if any readers can be trimmed - try (ReleasableLock ignored = readLock.acquire()) { + readLock.lock(); + try { if (closed.get()) { // we're shutdown potentially on some tragic event, don't delete anything return; @@ -1665,11 +1728,14 @@ public void trimUnreferencedReaders() throws IOException { if (getMinReferencedGen() == getMinFileGeneration()) { return; } + } finally { + readLock.unlock(); } // move most of the data to disk to reduce the time the write lock is held sync(); - try (ReleasableLock ignored = writeLock.acquire()) { + writeLock.lock(); + try { if (closed.get()) { // we're shutdown potentially on some tragic event, don't delete anything return; @@ -1701,11 +1767,12 @@ public void trimUnreferencedReaders() throws IOException { } catch (final Exception ex) { closeOnTragicEvent(ex); throw ex; + } finally { + writeLock.unlock(); } } private long getMinReferencedGen() { - assert readLock.isHeldByCurrentThread() || writeLock.isHeldByCurrentThread(); long minReferencedGen = Math.min( deletionPolicy.getMinTranslogGenRequiredByLocks(), minGenerationForSeqNo(deletionPolicy.getLocalCheckpointOfSafeCommit() + 1, current, readers) @@ -1736,13 +1803,16 @@ void deleteReaderFiles(TranslogReader reader) { } void closeFilesIfNoPendingRetentionLocks() throws IOException { - try (ReleasableLock ignored = writeLock.acquire()) { + writeLock.lock(); + try { if (closed.get() && deletionPolicy.pendingTranslogRefCount() == 0) { logger.trace("closing files. translog is closed and there are no pending retention locks"); ArrayList toClose = new ArrayList<>(readers); toClose.add(current); IOUtils.close(toClose); } + } finally { + writeLock.unlock(); } } @@ -1835,13 +1905,16 @@ public String getTranslogUUID() { * existing readers, this value is not necessary to be the max seq_no of all operations have been stored in this translog. */ public long getMaxSeqNo() { - try (ReleasableLock ignored = readLock.acquire()) { + readLock.lock(); + try { ensureOpen(); final OptionalLong maxSeqNo = Stream.concat(readers.stream(), Stream.of(current)) .mapToLong(reader -> reader.getCheckpoint().maxSeqNo) .max(); assert maxSeqNo.isPresent() : "must have at least one translog generation"; return maxSeqNo.getAsLong(); + } finally { + readLock.unlock(); } } diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 6aaeabdc175da..cd7e637d58bcc 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -43,7 +43,6 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.common.util.concurrent.ReleasableLock; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Assertions; import org.elasticsearch.core.IOUtils; @@ -3468,12 +3467,15 @@ public void testRollGeneration() throws Exception { translog.add(new Translog.NoOp(seqNo++, primaryTerm.get(), "test")); totalOperations++; } - try (ReleasableLock ignored = translog.writeLock.acquire()) { + translog.writeLock.lock(); + try { if (randomBoolean()) { primaryTerm.incrementAndGet(); } translog.rollGeneration(); primaryTerms.add(primaryTerm.get()); + } finally { + translog.writeLock.unlock(); } assertThat(translog.currentFileGeneration(), equalTo(generation + i + 1)); assertThat(translog.getCurrent().getPrimaryTerm(), equalTo(primaryTerm.get())); From 8f8d19c4cd2f5715e6578ae0d7c42b2e990a1ad3 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 2 Jul 2024 20:11:18 +0200 Subject: [PATCH 127/216] Speedup BigByteArray index math (#110377) We only have a single hard code page size here. We can generate faster code by using a static page size, saving field loads etc. --- .../common/util/BigByteArray.java | 48 +++++++++++-------- 1 file changed, 29 insertions(+), 19 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/util/BigByteArray.java b/server/src/main/java/org/elasticsearch/common/util/BigByteArray.java index 1e714f122d885..06ee78d99c532 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BigByteArray.java +++ b/server/src/main/java/org/elasticsearch/common/util/BigByteArray.java @@ -41,15 +41,15 @@ public void writeTo(StreamOutput out) throws IOException { @Override public byte get(long index) { - final int pageIndex = pageIndex(index); - final int indexInPage = indexInPage(index); + final int pageIndex = pageIdx(index); + final int indexInPage = idxInPage(index); return pages[pageIndex][indexInPage]; } @Override public void set(long index, byte value) { - final int pageIndex = pageIndex(index); - final int indexInPage = indexInPage(index); + final int pageIndex = pageIdx(index); + final int indexInPage = idxInPage(index); final byte[] page = getPageForWriting(pageIndex); page[indexInPage] = value; } @@ -61,9 +61,9 @@ public boolean get(long index, int len, BytesRef ref) { ref.length = 0; return false; } - int pageIndex = pageIndex(index); - final int indexInPage = indexInPage(index); - if (indexInPage + len <= pageSize()) { + int pageIndex = pageIdx(index); + final int indexInPage = idxInPage(index); + if (indexInPage + len <= BYTE_PAGE_SIZE) { ref.bytes = pages[pageIndex]; ref.offset = indexInPage; ref.length = len; @@ -71,11 +71,11 @@ public boolean get(long index, int len, BytesRef ref) { } else { ref.bytes = new byte[len]; ref.offset = 0; - ref.length = pageSize() - indexInPage; + ref.length = BYTE_PAGE_SIZE - indexInPage; System.arraycopy(pages[pageIndex], indexInPage, ref.bytes, 0, ref.length); do { ++pageIndex; - final int copyLength = Math.min(pageSize(), len - ref.length); + final int copyLength = Math.min(BYTE_PAGE_SIZE, len - ref.length); System.arraycopy(pages[pageIndex], 0, ref.bytes, ref.length, copyLength); ref.length += copyLength; } while (ref.length < len); @@ -86,18 +86,18 @@ public boolean get(long index, int len, BytesRef ref) { @Override public void set(long index, byte[] buf, int offset, int len) { assert index + len <= size(); - int pageIndex = pageIndex(index); - final int indexInPage = indexInPage(index); - if (indexInPage + len <= pageSize()) { + int pageIndex = pageIdx(index); + final int indexInPage = idxInPage(index); + if (indexInPage + len <= BYTE_PAGE_SIZE) { System.arraycopy(buf, offset, getPageForWriting(pageIndex), indexInPage, len); } else { - int copyLen = pageSize() - indexInPage; + int copyLen = BYTE_PAGE_SIZE - indexInPage; System.arraycopy(buf, offset, getPageForWriting(pageIndex), indexInPage, copyLen); do { ++pageIndex; offset += copyLen; len -= copyLen; - copyLen = Math.min(len, pageSize()); + copyLen = Math.min(len, BYTE_PAGE_SIZE); System.arraycopy(buf, offset, getPageForWriting(pageIndex), 0, copyLen); } while (len > copyLen); } @@ -108,16 +108,16 @@ public void fill(long fromIndex, long toIndex, byte value) { if (fromIndex > toIndex) { throw new IllegalArgumentException(); } - final int fromPage = pageIndex(fromIndex); - final int toPage = pageIndex(toIndex - 1); + final int fromPage = pageIdx(fromIndex); + final int toPage = pageIdx(toIndex - 1); if (fromPage == toPage) { - Arrays.fill(getPageForWriting(fromPage), indexInPage(fromIndex), indexInPage(toIndex - 1) + 1, value); + Arrays.fill(getPageForWriting(fromPage), idxInPage(fromIndex), idxInPage(toIndex - 1) + 1, value); } else { - Arrays.fill(getPageForWriting(fromPage), indexInPage(fromIndex), pages[fromPage].length, value); + Arrays.fill(getPageForWriting(fromPage), idxInPage(fromIndex), pages[fromPage].length, value); for (int i = fromPage + 1; i < toPage; ++i) { Arrays.fill(getPageForWriting(i), value); } - Arrays.fill(getPageForWriting(toPage), 0, indexInPage(toIndex - 1) + 1, value); + Arrays.fill(getPageForWriting(toPage), 0, idxInPage(toIndex - 1) + 1, value); } } @@ -169,4 +169,14 @@ public static long estimateRamBytes(final long size) { return ESTIMATOR.ramBytesEstimated(size); } + private static final int PAGE_SHIFT = Integer.numberOfTrailingZeros(PAGE_SIZE_IN_BYTES); + + private static int pageIdx(long index) { + return (int) (index >>> PAGE_SHIFT); + } + + private static int idxInPage(long index) { + return (int) (index & PAGE_SIZE_IN_BYTES - 1); + } + } From 0680d7ef351fa806b4aa281a27031c6e29f682c6 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 2 Jul 2024 14:20:01 -0400 Subject: [PATCH 128/216] ESQL: Skip lookup csv tests on release builds (#110166) LOOKUP isn't supported in release builds yet and it'll fail with a helpful error message if you try it there. But some of the csv-spec tests didn't realize that. Lots did, but these particular ones didn't. Close #109170 --- .../xpack/esql/action/EsqlCapabilities.java | 6 ++- .../elasticsearch/xpack/esql/CsvTests.java | 9 ++++ .../xpack/esql/analysis/AnalyzerTests.java | 24 ++++++---- .../optimizer/LogicalPlanOptimizerTests.java | 24 +++++++--- .../optimizer/PhysicalPlanOptimizerTests.java | 47 +++++++++++++------ .../esql/parser/StatementParserTests.java | 18 ++++++- 6 files changed, 97 insertions(+), 31 deletions(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 71fccf4af0714..7f4f1c070f999 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -103,6 +103,8 @@ public enum Cap { */ DOUBLE_QUOTES_SOURCE_ENCLOSING; + private final boolean snapshotOnly; + Cap() { snapshotOnly = false; }; @@ -115,7 +117,9 @@ public String capabilityName() { return name().toLowerCase(Locale.ROOT); } - private final boolean snapshotOnly; + public boolean snapshotOnly() { + return snapshotOnly; + } } public static final Set CAPABILITIES = capabilities(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index ab9133a8aa523..b67840aae3bcb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -240,6 +240,15 @@ public final void test() throws Throwable { testCase.requiredCapabilities, everyItem(in(EsqlCapabilities.CAPABILITIES)) ); + } else { + for (EsqlCapabilities.Cap c : EsqlCapabilities.Cap.values()) { + if (c.snapshotOnly()) { + assumeFalse( + c.capabilityName() + " is not supported in non-snapshot releases", + testCase.requiredCapabilities.contains(c.capabilityName()) + ); + } + } } doTest(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 28d2046a0ea36..1f2ec0c236ecf 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -1931,7 +1931,7 @@ public void testLookup() { | LOOKUP int_number_names ON int """; if (Build.current().isProductionRelease()) { - var e = expectThrows(VerificationException.class, () -> analyze(query)); + var e = expectThrows(ParsingException.class, () -> analyze(query)); assertThat(e.getMessage(), containsString("line 3:4: LOOKUP is in preview and only available in SNAPSHOT build")); return; } @@ -1982,39 +1982,45 @@ public void testLookup() { } public void testLookupMissingField() { - var e = expectThrows(VerificationException.class, () -> analyze(""" + String query = """ FROM test | LOOKUP int_number_names ON garbage - """)); + """; if (Build.current().isProductionRelease()) { - assertThat(e.getMessage(), containsString("line 3:4: LOOKUP is in preview and only available in SNAPSHOT build")); + var e = expectThrows(ParsingException.class, () -> analyze(query)); + assertThat(e.getMessage(), containsString("line 2:4: LOOKUP is in preview and only available in SNAPSHOT build")); return; } + var e = expectThrows(VerificationException.class, () -> analyze(query)); assertThat(e.getMessage(), containsString("Unknown column in lookup target [garbage]")); } public void testLookupMissingTable() { - var e = expectThrows(VerificationException.class, () -> analyze(""" + String query = """ FROM test | LOOKUP garbage ON a - """)); + """; if (Build.current().isProductionRelease()) { - assertThat(e.getMessage(), containsString("line 3:4: LOOKUP is in preview and only available in SNAPSHOT build")); + var e = expectThrows(ParsingException.class, () -> analyze(query)); + assertThat(e.getMessage(), containsString("line 2:4: LOOKUP is in preview and only available in SNAPSHOT build")); return; } + var e = expectThrows(VerificationException.class, () -> analyze(query)); assertThat(e.getMessage(), containsString("Unknown table [garbage]")); } public void testLookupMatchTypeWrong() { - var e = expectThrows(VerificationException.class, () -> analyze(""" + String query = """ FROM test | RENAME last_name AS int | LOOKUP int_number_names ON int - """)); + """; if (Build.current().isProductionRelease()) { + var e = expectThrows(ParsingException.class, () -> analyze(query)); assertThat(e.getMessage(), containsString("line 3:4: LOOKUP is in preview and only available in SNAPSHOT build")); return; } + var e = expectThrows(VerificationException.class, () -> analyze(query)); assertThat(e.getMessage(), containsString("column type mismatch, table column was [integer] and original column was [keyword]")); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 05c40ce5bd85f..6a9e7a4000734 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -116,6 +116,7 @@ import org.elasticsearch.xpack.esql.optimizer.rules.PushDownAndCombineLimits; import org.elasticsearch.xpack.esql.optimizer.rules.SplitInWithFoldableValue; import org.elasticsearch.xpack.esql.parser.EsqlParser; +import org.elasticsearch.xpack.esql.parser.ParsingException; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Enrich; @@ -158,6 +159,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.localSource; import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.elasticsearch.xpack.esql.analysis.Analyzer.NO_FIELDS; +import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.analyze; import static org.elasticsearch.xpack.esql.core.expression.Literal.FALSE; import static org.elasticsearch.xpack.esql.core.expression.Literal.NULL; import static org.elasticsearch.xpack.esql.core.expression.Literal.TRUE; @@ -5009,11 +5011,16 @@ public void testIsNullDisjunction() throws Exception { * } */ public void testLookupSimple() { - var plan = optimizedPlan(""" + String query = """ FROM test | RENAME languages AS int - | LOOKUP int_number_names ON int - """); + | LOOKUP int_number_names ON int"""; + if (Build.current().isProductionRelease()) { + var e = expectThrows(ParsingException.class, () -> analyze(query)); + assertThat(e.getMessage(), containsString("line 3:4: LOOKUP is in preview and only available in SNAPSHOT build")); + return; + } + var plan = optimizedPlan(query); var join = as(plan, Join.class); // Right is the lookup table @@ -5082,12 +5089,17 @@ public void testLookupSimple() { * } */ public void testLookupStats() { - var plan = optimizedPlan(""" + String query = """ FROM test | RENAME languages AS int | LOOKUP int_number_names ON int - | STATS MIN(emp_no) BY name - """); + | STATS MIN(emp_no) BY name"""; + if (Build.current().isProductionRelease()) { + var e = expectThrows(ParsingException.class, () -> analyze(query)); + assertThat(e.getMessage(), containsString("line 3:4: LOOKUP is in preview and only available in SNAPSHOT build")); + return; + } + var plan = optimizedPlan(query); var limit = as(plan, Limit.class); assertThat(limit.limit().fold(), equalTo(1000)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index dfd66657c653e..210c4d1be6225 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -9,6 +9,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.Build; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.settings.Settings; @@ -135,6 +136,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.statsForMissingField; import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.elasticsearch.xpack.esql.SerializationTestUtils.assertSerialization; +import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.analyze; import static org.elasticsearch.xpack.esql.core.expression.Expressions.name; import static org.elasticsearch.xpack.esql.core.expression.Expressions.names; import static org.elasticsearch.xpack.esql.core.expression.Order.OrderDirection.ASC; @@ -4231,10 +4233,16 @@ public void testMaxQueryDepthPlusExpressionDepth() { } public void testLookupSimple() { - PhysicalPlan plan = physicalPlan(""" - FROM test | - RENAME languages AS int | - LOOKUP int_number_names ON int"""); + String query = """ + FROM test + | RENAME languages AS int + | LOOKUP int_number_names ON int"""; + if (Build.current().isProductionRelease()) { + var e = expectThrows(ParsingException.class, () -> analyze(query)); + assertThat(e.getMessage(), containsString("line 3:4: LOOKUP is in preview and only available in SNAPSHOT build")); + return; + } + PhysicalPlan plan = physicalPlan(query); var join = as(plan, HashJoinExec.class); assertMap(join.matchFields().stream().map(Object::toString).toList(), matchesList().item(startsWith("int{r}"))); assertMap( @@ -4270,14 +4278,20 @@ public void testLookupSimple() { * } */ public void testLookupThenProject() { - PhysicalPlan plan = optimizedPlan(physicalPlan(""" + String query = """ FROM employees | SORT emp_no | LIMIT 4 | RENAME languages AS int | LOOKUP int_number_names ON int | RENAME int AS languages, name AS lang_name - | KEEP emp_no, languages, lang_name""")); + | KEEP emp_no, languages, lang_name"""; + if (Build.current().isProductionRelease()) { + var e = expectThrows(ParsingException.class, () -> analyze(query)); + assertThat(e.getMessage(), containsString("line 5:4: LOOKUP is in preview and only available in SNAPSHOT build")); + return; + } + PhysicalPlan plan = optimizedPlan(physicalPlan(query)); var outerProject = as(plan, ProjectExec.class); assertThat(outerProject.projections().toString(), containsString("AS lang_name")); @@ -4322,14 +4336,19 @@ public void testLookupThenProject() { * } */ public void testLookupThenTopN() { - var plan = physicalPlan(""" - FROM employees - | RENAME languages AS int - | LOOKUP int_number_names ON int - | RENAME name AS languages - | KEEP languages, emp_no - | SORT languages ASC, emp_no ASC - """); + String query = """ + FROM employees + | RENAME languages AS int + | LOOKUP int_number_names ON int + | RENAME name AS languages + | KEEP languages, emp_no + | SORT languages ASC, emp_no ASC"""; + if (Build.current().isProductionRelease()) { + var e = expectThrows(ParsingException.class, () -> analyze(query)); + assertThat(e.getMessage(), containsString("line 3:4: LOOKUP is in preview and only available in SNAPSHOT build")); + return; + } + var plan = physicalPlan(query); ProjectExec outerProject = as(plan, ProjectExec.class); TopNExec outerTopN = as(outerProject.child(), TopNExec.class); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index bd4ae4ee53c10..8dcc87608c85b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -1274,6 +1274,11 @@ public void testQuotedName() { } private void assertStringAsIndexPattern(String string, String statement) { + if (Build.current().isProductionRelease() && statement.contains("METRIC")) { + var e = expectThrows(IllegalArgumentException.class, () -> statement(statement)); + assertThat(e.getMessage(), containsString("METRICS command currently requires a snapshot build")); + return; + } LogicalPlan from = statement(statement); assertThat(from, instanceOf(EsqlUnresolvedRelation.class)); EsqlUnresolvedRelation table = (EsqlUnresolvedRelation) from; @@ -1281,6 +1286,11 @@ private void assertStringAsIndexPattern(String string, String statement) { } private void assertStringAsLookupIndexPattern(String string, String statement) { + if (Build.current().isProductionRelease()) { + var e = expectThrows(ParsingException.class, () -> statement(statement)); + assertThat(e.getMessage(), containsString("line 1:14: LOOKUP is in preview and only available in SNAPSHOT build")); + return; + } var plan = statement(statement); var lookup = as(plan, Lookup.class); var tableName = as(lookup.tableName(), Literal.class); @@ -1343,7 +1353,13 @@ public void testInlineConvertWithNonexistentType() { } public void testLookup() { - var plan = statement("ROW a = 1 | LOOKUP t ON j"); + String query = "ROW a = 1 | LOOKUP t ON j"; + if (Build.current().isProductionRelease()) { + var e = expectThrows(ParsingException.class, () -> statement(query)); + assertThat(e.getMessage(), containsString("line 1:14: LOOKUP is in preview and only available in SNAPSHOT build")); + return; + } + var plan = statement(query); var lookup = as(plan, Lookup.class); var tableName = as(lookup.tableName(), Literal.class); assertThat(tableName.fold(), equalTo("t")); From 6fbc52d170e02cbae4ea859abf46a1551b45d6f5 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 2 Jul 2024 14:22:50 -0400 Subject: [PATCH 129/216] ESQL docs: Push down needs index and doc_values (#110353) This adds a `NOTE` to each comparison saying that pushing the comparison to the search index requires that the field have an `index` and `doc_values`. This is unique compared to the rest of Elasticsearch which only requires an `index` and it's caused by our insistence that comparisons only return true for single-valued fields. We can in future accelerate comparisons without `doc_values`, but we just haven't written that code yet. --- docs/reference/esql/functions/binary.asciidoc | 69 +++++++++++++++++++ 1 file changed, 69 insertions(+) diff --git a/docs/reference/esql/functions/binary.asciidoc b/docs/reference/esql/functions/binary.asciidoc index 959bbe11c040e..72d466ae83d11 100644 --- a/docs/reference/esql/functions/binary.asciidoc +++ b/docs/reference/esql/functions/binary.asciidoc @@ -7,6 +7,12 @@ [.text-center] image::esql/functions/signature/equals.svg[Embedded,opts=inline] +Check if two fields are equal. If either field is <> then +the result is `null`. + +NOTE: This is pushed to the underlying search index if one side of the comparison is constant + and the other side is a field in the index that has both an <> and <>. + Supported types: include::types/equals.asciidoc[] @@ -15,6 +21,12 @@ include::types/equals.asciidoc[] [.text-center] image::esql/functions/signature/not_equals.svg[Embedded,opts=inline] +Check if two fields are unequal. If either field is <> then +the result is `null`. + +NOTE: This is pushed to the underlying search index if one side of the comparison is constant + and the other side is a field in the index that has both an <> and <>. + Supported types: include::types/not_equals.asciidoc[] @@ -23,55 +35,112 @@ include::types/not_equals.asciidoc[] [.text-center] image::esql/functions/signature/less_than.svg[Embedded,opts=inline] +Check if one field is less than another. If either field is <> +then the result is `null`. + +NOTE: This is pushed to the underlying search index if one side of the comparison is constant + and the other side is a field in the index that has both an <> and <>. + +Supported types: + include::types/less_than.asciidoc[] ==== Less than or equal to `<=` [.text-center] image::esql/functions/signature/less_than_or_equal.svg[Embedded,opts=inline] +Check if one field is less than or equal to another. If either field is <> +then the result is `null`. + +NOTE: This is pushed to the underlying search index if one side of the comparison is constant + and the other side is a field in the index that has both an <> and <>. + +Supported types: + include::types/less_than_or_equal.asciidoc[] ==== Greater than `>` [.text-center] image::esql/functions/signature/greater_than.svg[Embedded,opts=inline] +Check if one field is greater than another. If either field is <> +then the result is `null`. + +NOTE: This is pushed to the underlying search index if one side of the comparison is constant + and the other side is a field in the index that has both an <> and <>. + +Supported types: + include::types/greater_than.asciidoc[] ==== Greater than or equal to `>=` [.text-center] image::esql/functions/signature/greater_than_or_equal.svg[Embedded,opts=inline] +Check if one field is greater than or equal to another. If either field is <> +then the result is `null`. + +NOTE: This is pushed to the underlying search index if one side of the comparison is constant + and the other side is a field in the index that has both an <> and <>. + +Supported types: + include::types/greater_than_or_equal.asciidoc[] ==== Add `+` [.text-center] image::esql/functions/signature/add.svg[Embedded,opts=inline] +Add two numbers together. If either field is <> +then the result is `null`. + +Supported types: + include::types/add.asciidoc[] ==== Subtract `-` [.text-center] image::esql/functions/signature/sub.svg[Embedded,opts=inline] +Subtract one number from another. If either field is <> +then the result is `null`. + +Supported types: + include::types/sub.asciidoc[] ==== Multiply `*` [.text-center] image::esql/functions/signature/mul.svg[Embedded,opts=inline] +Multiply two numbers together. If either field is <> +then the result is `null`. + +Supported types: + include::types/mul.asciidoc[] ==== Divide `/` [.text-center] image::esql/functions/signature/div.svg[Embedded,opts=inline] +Divide one number by another. If either field is <> +then the result is `null`. + NOTE: Division of two integer types will yield an integer result, rounding towards 0. If you need floating point division, <> one of the arguments to a `DOUBLE`. +Supported types: + include::types/div.asciidoc[] ==== Modulus `%` [.text-center] image::esql/functions/signature/mod.svg[Embedded,opts=inline] +Divide one number by another and return the remainder. If either field is <> +then the result is `null`. + +Supported types: + include::types/mod.asciidoc[] From a83ec0e5fb51ed1fb967343ecdaeb7e89736788a Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 2 Jul 2024 21:07:18 +0100 Subject: [PATCH 130/216] AwaitsFix for #110398 --- .../inference/rank/textsimilarity/TextSimilarityRankTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java index 7fbfe70dbcfe7..8cb9305edd057 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java @@ -151,6 +151,7 @@ public void testRerankInferenceFailure() { ); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/110398") public void testRerankInferenceResultMismatch() { ElasticsearchAssertions.assertFailures( // Execute search with text similarity reranking From 9282c1bc622f807cb583ca94fe879bbdb4decf19 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 2 Jul 2024 16:16:34 -0400 Subject: [PATCH 131/216] ESQL: Migrate all serialization of Expression (#110157) This moves all of `Expression` serialization from ESQL's home grown system into `NamedWriteable` which is how the rest of Elasticsearch works. We previously implemented `NamedWriteable` on all `Expression` subclasses and tested it. This just switches all production serialization over. It is wire compatible with the old custom way, so no one should notice anything other than less custom code. --- .../xpack/esql/core/expression/Alias.java | 5 +- .../esql/core/expression/Expression.java | 24 ++-- .../xpack/esql/core/expression/Order.java | 12 ++ .../function/UnresolvedFunction.java | 12 ++ .../function/scalar/BinaryScalarFunction.java | 9 +- .../function/scalar/UnaryScalarFunction.java | 5 +- .../esql/core/expression/predicate/Range.java | 12 ++ .../predicate/fulltext/FullTextPredicate.java | 5 +- .../predicate/logical/BinaryLogic.java | 9 +- .../predicate/operator/arithmetic/Neg.java | 13 ++ .../predicate/operator/comparison/Equals.java | 12 ++ .../operator/comparison/GreaterThan.java | 12 ++ .../comparison/GreaterThanOrEqual.java | 12 ++ .../predicate/operator/comparison/In.java | 12 ++ .../operator/comparison/LessThan.java | 12 ++ .../operator/comparison/LessThanOrEqual.java | 12 ++ .../operator/comparison/NotEquals.java | 12 ++ .../operator/comparison/NullEquals.java | 12 ++ .../core/expression/predicate/regex/Like.java | 13 ++ .../expression/predicate/regex/RLike.java | 13 ++ .../predicate/regex/WildcardLike.java | 13 ++ .../xpack/esql/core/util/PlanStreamInput.java | 8 -- .../esql/core/util/PlanStreamOutput.java | 27 ---- .../core/expression/NullabilityTests.java | 13 ++ .../function/FunctionRegistryTests.java | 12 ++ .../core/optimizer/OptimizerRulesTests.java | 12 ++ .../xpack/esql/expression/Order.java | 5 +- .../function/UnsupportedAttribute.java | 6 + .../function/aggregate/AggregateFunction.java | 7 +- .../function/aggregate/CountDistinct.java | 13 +- .../function/aggregate/FromPartial.java | 13 +- .../function/aggregate/Percentile.java | 7 +- .../expression/function/aggregate/Rate.java | 45 ++++--- .../function/aggregate/ToPartial.java | 13 +- .../expression/function/aggregate/Top.java | 13 +- .../expression/function/grouping/Bucket.java | 18 ++- .../function/scalar/EsqlScalarFunction.java | 86 +++++++------ .../function/scalar/UnaryScalarFunction.java | 102 +++++++-------- .../function/scalar/conditional/Case.java | 11 +- .../function/scalar/conditional/Greatest.java | 11 +- .../function/scalar/conditional/Least.java | 11 +- .../convert/AbstractConvertFunction.java | 2 +- .../function/scalar/date/DateDiff.java | 13 +- .../function/scalar/date/DateExtract.java | 9 +- .../function/scalar/date/DateFormat.java | 9 +- .../function/scalar/date/DateParse.java | 9 +- .../function/scalar/date/DateTrunc.java | 7 +- .../function/scalar/ip/CIDRMatch.java | 11 +- .../function/scalar/ip/IpPrefix.java | 15 +-- .../function/scalar/math/Atan2.java | 7 +- .../expression/function/scalar/math/Log.java | 9 +- .../expression/function/scalar/math/Pow.java | 7 +- .../function/scalar/math/Round.java | 9 +- .../AbstractMultivalueFunction.java | 5 +- .../function/scalar/multivalue/MvAppend.java | 2 +- .../function/scalar/multivalue/MvSlice.java | 15 +-- .../function/scalar/multivalue/MvSort.java | 11 +- .../function/scalar/multivalue/MvZip.java | 15 +-- .../function/scalar/nulls/Coalesce.java | 11 +- .../function/scalar/package-info.java | 22 ++-- .../scalar/spatial/BinarySpatialFunction.java | 10 +- .../function/scalar/string/Concat.java | 11 +- .../function/scalar/string/EndsWith.java | 7 +- .../function/scalar/string/Left.java | 7 +- .../function/scalar/string/Locate.java | 13 +- .../function/scalar/string/RLike.java | 5 +- .../function/scalar/string/Repeat.java | 7 +- .../function/scalar/string/Replace.java | 14 +-- .../function/scalar/string/Right.java | 7 +- .../function/scalar/string/Split.java | 7 +- .../function/scalar/string/StartsWith.java | 7 +- .../function/scalar/string/Substring.java | 13 +- .../function/scalar/string/ToLower.java | 5 +- .../function/scalar/string/ToUpper.java | 5 +- .../function/scalar/string/WildcardLike.java | 5 +- .../arithmetic/EsqlArithmeticOperation.java | 4 +- .../comparison/EsqlBinaryComparison.java | 9 +- .../predicate/operator/comparison/In.java | 11 +- .../xpack/esql/io/stream/PlanNamedTypes.java | 119 ++++-------------- .../xpack/esql/io/stream/PlanStreamInput.java | 24 ---- .../esql/io/stream/PlanStreamOutput.java | 17 +-- .../xpack/esql/plan/logical/Aggregate.java | 7 +- .../xpack/esql/plan/logical/Lookup.java | 4 +- .../xpack/esql/plugin/EsqlPlugin.java | 7 ++ .../xpack/esql/type/MultiTypeEsField.java | 4 +- .../xpack/esql/SerializationTestUtils.java | 11 +- .../AbstractExpressionSerializationTests.java | 11 +- ...AbstractUnaryScalarSerializationTests.java | 7 -- .../xpack/esql/expression/AliasTests.java | 1 + .../expression/LiteralSerializationTests.java | 7 -- .../expression/OrderSerializationTests.java | 7 -- .../esql/expression/function/DeepCopy.java | 12 ++ .../function/EsqlFunctionRegistryTests.java | 11 ++ .../aggregate/AvgSerializationTests.java | 7 -- .../CountDistinctSerializationTests.java | 11 +- .../aggregate/CountSerializationTests.java | 7 -- .../aggregate/MaxSerializationTests.java | 7 -- ...anAbsoluteDeviationSerializationTests.java | 7 -- .../aggregate/MedianSerializationTests.java | 7 -- .../aggregate/MinSerializationTests.java | 7 -- .../PercentileSerializationTests.java | 7 -- .../aggregate/RateSerializationTests.java | 39 ++++++ .../SpatialCentroidSerializationTests.java | 7 -- .../aggregate/SumSerializationTests.java | 7 -- .../aggregate/TopSerializationTests.java | 7 -- .../aggregate/ValuesSerializationTests.java | 7 -- .../grouping/BucketSerializationTests.java | 8 -- .../scalar/AndSerializationTests.java | 7 -- .../scalar/NotSerializationTests.java | 7 -- .../function/scalar/OrSerializationTests.java | 7 -- .../conditional/CaseSerializationTests.java | 7 -- .../GreatestSerializationTests.java | 7 -- .../conditional/LeastSerializationTests.java | 7 -- .../date/DateDiffSerializationTests.java | 8 -- .../date/DateExtractSerializationTests.java | 8 -- .../date/DateFormatSerializationTests.java | 8 -- .../date/DateParseSerializationTests.java | 8 -- .../date/DateTruncSerializationTests.java | 8 -- .../scalar/date/NowSerializationTests.java | 8 -- .../ip/CIDRMatchSerializationTests.java | 7 -- .../scalar/ip/IpPrefixSerializationTests.java | 8 -- .../scalar/math/Atan2SerializationTests.java | 8 -- .../scalar/math/ESerializationTests.java | 8 -- .../scalar/math/LogSerializationTests.java | 8 -- .../scalar/math/PiSerializationTests.java | 8 -- .../scalar/math/PowSerializationTests.java | 8 -- .../scalar/math/RoundSerializationTests.java | 8 -- .../scalar/math/TauSerializationTests.java | 8 -- .../AbstractMvSerializationTests.java | 21 ---- .../MvAppendSerializationTests.java | 2 +- .../multivalue/MvAvgSerializationTests.java | 2 +- .../MvConcatSerializationTests.java | 2 +- .../multivalue/MvCountSerializationTests.java | 2 +- .../MvDedupeSerializationTests.java | 2 +- .../multivalue/MvFirstSerializationTests.java | 2 +- .../multivalue/MvLastSerializationTests.java | 2 +- .../multivalue/MvMaxSerializationTests.java | 2 +- .../MvMedianSerializationTests.java | 2 +- .../multivalue/MvMinSerializationTests.java | 2 +- .../multivalue/MvSliceSerializationTests.java | 2 +- .../multivalue/MvSortSerializationTests.java | 2 +- .../multivalue/MvSumSerializationTests.java | 2 +- .../multivalue/MvZipSerializationTests.java | 2 +- .../nulls/CoalesceSerializationTests.java | 7 -- .../nulls/IsNotNullSerializationTests.java | 8 -- .../nulls/IsNullSerializationTests.java | 8 -- ...ySpatialFunctionSerializationTestCase.java | 7 -- .../scalar/spatial/StXSerializationTests.java | 8 -- .../scalar/spatial/StYSerializationTests.java | 8 -- .../string/ConcatSerializationTests.java | 7 -- .../string/EndsWithSerializationTests.java | 8 -- .../scalar/string/LeftSerializationTests.java | 8 -- .../string/LocateSerializationTests.java | 8 -- .../string/RLikeSerializationTests.java | 8 -- .../string/RepeatSerializationTests.java | 8 -- .../string/ReplaceSerializationTests.java | 8 -- .../string/RightSerializationTests.java | 8 -- .../string/SplitSerializationTests.java | 8 -- .../string/StartsWithSerializationTests.java | 8 -- .../string/SubstringSerializationTests.java | 8 -- .../string/ToLowerSerializationTests.java | 8 -- .../string/ToUpperSerializationTests.java | 8 -- .../WildcardLikeSerializationTests.java | 8 -- .../AbstractArithmeticSerializationTests.java | 7 -- .../AbstractComparisonSerializationTests.java | 7 -- .../comparison/InSerializationTests.java | 7 -- .../InsensitiveEqualsSerializationTests.java | 7 -- .../AbstractFulltextSerializationTests.java | 7 -- .../esql/io/stream/PlanNamedTypesTests.java | 43 ------- .../esql/type/MultiTypeEsFieldTests.java | 5 +- 170 files changed, 713 insertions(+), 1130 deletions(-) delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/RateSerializationTests.java delete mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMvSerializationTests.java diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Alias.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Alias.java index d9f99b6d92318..01cc716a20547 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Alias.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Alias.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.List; @@ -64,7 +63,7 @@ public Alias(StreamInput in) throws IOException { Source.readFrom((StreamInput & PlanStreamInput) in), in.readString(), in.readOptionalString(), - ((PlanStreamInput) in).readExpression(), + in.readNamedWriteable(Expression.class), NameId.readFrom((StreamInput & PlanStreamInput) in), in.readBoolean() ); @@ -75,7 +74,7 @@ public void writeTo(StreamOutput out) throws IOException { Source.EMPTY.writeTo(out); out.writeString(name()); out.writeOptionalString(qualifier()); - ((PlanStreamOutput) out).writeExpression(child()); + out.writeNamedWriteable(child()); id().writeTo(out); out.writeBoolean(synthetic()); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expression.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expression.java index ee7e0aa81f81e..df8b6732ac0d4 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expression.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expression.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.esql.core.expression; import org.elasticsearch.common.io.stream.NamedWriteable; -import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.capabilities.Resolvable; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; @@ -16,7 +16,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.StringUtils; -import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; @@ -30,6 +30,14 @@ * (which is a type of expression) with a single child, c. */ public abstract class Expression extends Node implements Resolvable, NamedWriteable { + public static List getNamedWriteables() { + List entries = new ArrayList<>(); + for (NamedWriteableRegistry.Entry e : NamedExpression.getNamedWriteables()) { + entries.add(new NamedWriteableRegistry.Entry(Expression.class, e.name, in -> (NamedExpression) e.reader.read(in))); + } + entries.add(Literal.ENTRY); + return entries; + } public static class TypeResolution { private final boolean failed; @@ -81,18 +89,6 @@ public Expression(Source source, List children) { super(source, children); } - @Override - public void writeTo(StreamOutput out) throws IOException { - // TODO remove this function entirely once all subclasses implement it - throw new UnsupportedOperationException("todo unsupported"); - } - - @Override - public String getWriteableName() { - // TODO remove this function entirely once all subclasses implement it - throw new UnsupportedOperationException("todo unsupported"); - } - // whether the expression can be evaluated statically (folded) or not public boolean foldable() { return false; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Order.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Order.java index a7377aab369b7..f73051fd9662a 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Order.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Order.java @@ -6,10 +6,12 @@ */ package org.elasticsearch.xpack.esql.core.expression; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; +import java.io.IOException; import java.util.List; import java.util.Objects; @@ -46,6 +48,16 @@ public Order(Source source, Expression child, OrderDirection direction, NullsPos this.nulls = nulls == null ? NullsPosition.ANY : nulls; } + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException(); + } + @Override protected NodeInfo info() { return NodeInfo.create(this, Order::new, child, direction, nulls); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/UnresolvedFunction.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/UnresolvedFunction.java index 012c39e26d904..49791e5820e7a 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/UnresolvedFunction.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/UnresolvedFunction.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.esql.core.expression.function; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.capabilities.Unresolvable; import org.elasticsearch.xpack.esql.core.capabilities.UnresolvedException; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -16,6 +17,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.StringUtils; +import java.io.IOException; import java.util.LinkedHashSet; import java.util.List; import java.util.Objects; @@ -38,6 +40,16 @@ public UnresolvedFunction(Source source, String name, FunctionResolutionStrategy this(source, name, resolutionStrategy, children, false, null); } + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException(); + } + /** * Constructor used for specifying a more descriptive message (typically * 'did you mean') instead of the default one. diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/BinaryScalarFunction.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/BinaryScalarFunction.java index 4b462719a375b..d49fa07cd82c9 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/BinaryScalarFunction.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/BinaryScalarFunction.java @@ -11,7 +11,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.Arrays; @@ -30,16 +29,16 @@ protected BinaryScalarFunction(Source source, Expression left, Expression right) protected BinaryScalarFunction(StreamInput in) throws IOException { this( Source.readFrom((StreamInput & PlanStreamInput) in), - ((PlanStreamInput) in).readExpression(), - ((PlanStreamInput) in).readExpression() + in.readNamedWriteable(Expression.class), + in.readNamedWriteable(Expression.class) ); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(left()); - ((PlanStreamOutput) out).writeExpression(right()); + out.writeNamedWriteable(left); + out.writeNamedWriteable(right); } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/UnaryScalarFunction.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/UnaryScalarFunction.java index e821ed33ffc0d..8704a42ed33e2 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/UnaryScalarFunction.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/scalar/UnaryScalarFunction.java @@ -11,7 +11,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.List; @@ -28,13 +27,13 @@ protected UnaryScalarFunction(Source source, Expression field) { } protected UnaryScalarFunction(StreamInput in) throws IOException { - this(Source.readFrom((StreamInput & PlanStreamInput) in), ((PlanStreamInput) in).readExpression()); + this(Source.readFrom((StreamInput & PlanStreamInput) in), in.readNamedWriteable(Expression.class)); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(field); + out.writeNamedWriteable(field); } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/Range.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/Range.java index ee48fd84b8add..e734f97573c1c 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/Range.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/Range.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.esql.core.expression.predicate; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; @@ -14,6 +15,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.DateUtils; +import java.io.IOException; import java.time.DateTimeException; import java.time.ZoneId; import java.util.List; @@ -39,6 +41,16 @@ public Range(Source src, Expression value, Expression lower, boolean inclLower, this.zoneId = zoneId; } + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException(); + } + @Override protected NodeInfo info() { return NodeInfo.create(this, Range::new, value, lower, includeLower, upper, includeUpper, zoneId); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/FullTextPredicate.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/FullTextPredicate.java index e8ca84bc72988..29a567e83211d 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/FullTextPredicate.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/FullTextPredicate.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; import java.util.List; @@ -57,7 +56,7 @@ protected FullTextPredicate(StreamInput in) throws IOException { Source.readFrom((StreamInput & PlanStreamInput) in), in.readString(), in.readOptionalString(), - in.readCollectionAsList(input -> ((PlanStreamInput) in).readExpression()) + in.readNamedWriteableCollectionAsList(Expression.class) ); } @@ -92,7 +91,7 @@ public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); out.writeString(query); out.writeOptionalString(options); - out.writeCollection(children(), (o, v) -> ((PlanStreamOutput) o).writeExpression(v)); + out.writeNamedWriteableCollection(children()); } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogic.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogic.java index f40db61774fc5..210e8265dcfe9 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogic.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogic.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import java.io.IOException; @@ -30,8 +29,8 @@ protected BinaryLogic(Source source, Expression left, Expression right, BinaryLo protected BinaryLogic(StreamInput in, BinaryLogicOperation op) throws IOException { this( Source.readFrom((StreamInput & PlanStreamInput) in), - ((StreamInput & PlanStreamInput) in).readExpression(), - ((StreamInput & PlanStreamInput) in).readExpression(), + in.readNamedWriteable(Expression.class), + in.readNamedWriteable(Expression.class), op ); } @@ -39,8 +38,8 @@ protected BinaryLogic(StreamInput in, BinaryLogicOperation op) throws IOExceptio @Override public final void writeTo(StreamOutput out) throws IOException { Source.EMPTY.writeTo(out); - ((StreamOutput & PlanStreamOutput) out).writeExpression(left()); - ((StreamOutput & PlanStreamOutput) out).writeExpression(right()); + out.writeNamedWriteable(left()); + out.writeNamedWriteable(right()); } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/Neg.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/Neg.java index ddd4ce736879b..9a8a14f320cd6 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/Neg.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/Neg.java @@ -6,12 +6,15 @@ */ package org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; +import java.io.IOException; + import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNumeric; @@ -24,6 +27,16 @@ public Neg(Source source, Expression field) { super(source, field); } + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException(); + } + @Override protected NodeInfo info() { return NodeInfo.create(this, Neg::new, field()); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/Equals.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/Equals.java index 96e174d9afa5d..533ce4b76b595 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/Equals.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/Equals.java @@ -6,11 +6,13 @@ */ package org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.predicate.Negatable; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; +import java.io.IOException; import java.time.ZoneId; public class Equals extends BinaryComparison implements Negatable { @@ -23,6 +25,16 @@ public Equals(Source source, Expression left, Expression right, ZoneId zoneId) { super(source, left, right, BinaryComparisonOperation.EQ, zoneId); } + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException(); + } + @Override protected NodeInfo info() { return NodeInfo.create(this, Equals::new, left(), right(), zoneId()); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/GreaterThan.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/GreaterThan.java index cb165e42d0098..f4ffa1a12ae5b 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/GreaterThan.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/GreaterThan.java @@ -6,11 +6,13 @@ */ package org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.predicate.Negatable; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; +import java.io.IOException; import java.time.ZoneId; public class GreaterThan extends BinaryComparison implements Negatable { @@ -19,6 +21,16 @@ public GreaterThan(Source source, Expression left, Expression right, ZoneId zone super(source, left, right, BinaryComparisonOperation.GT, zoneId); } + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException(); + } + @Override protected NodeInfo info() { return NodeInfo.create(this, GreaterThan::new, left(), right(), zoneId()); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/GreaterThanOrEqual.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/GreaterThanOrEqual.java index 2f0b3feeaf7d8..28aa4124f0987 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/GreaterThanOrEqual.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/GreaterThanOrEqual.java @@ -6,11 +6,13 @@ */ package org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.predicate.Negatable; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; +import java.io.IOException; import java.time.ZoneId; public class GreaterThanOrEqual extends BinaryComparison implements Negatable { @@ -19,6 +21,16 @@ public GreaterThanOrEqual(Source source, Expression left, Expression right, Zone super(source, left, right, BinaryComparisonOperation.GTE, zoneId); } + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException(); + } + @Override protected NodeInfo info() { return NodeInfo.create(this, GreaterThanOrEqual::new, left(), right(), zoneId()); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/In.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/In.java index abbfaabd09ba2..bd645064289a5 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/In.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/In.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.Foldables; @@ -18,6 +19,7 @@ import org.elasticsearch.xpack.esql.core.type.DataTypeConverter; import org.elasticsearch.xpack.esql.core.util.CollectionUtils; +import java.io.IOException; import java.time.ZoneId; import java.util.ArrayList; import java.util.Collections; @@ -46,6 +48,16 @@ public In(Source source, Expression value, List list, ZoneId zoneId) this.zoneId = zoneId; } + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException(); + } + @Override protected NodeInfo info() { return NodeInfo.create(this, In::new, value(), list(), zoneId()); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/LessThan.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/LessThan.java index 5f59e4abf8abf..150db16521480 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/LessThan.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/LessThan.java @@ -6,11 +6,13 @@ */ package org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.predicate.Negatable; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; +import java.io.IOException; import java.time.ZoneId; public class LessThan extends BinaryComparison implements Negatable { @@ -19,6 +21,16 @@ public LessThan(Source source, Expression left, Expression right, ZoneId zoneId) super(source, left, right, BinaryComparisonOperation.LT, zoneId); } + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException(); + } + @Override protected NodeInfo info() { return NodeInfo.create(this, LessThan::new, left(), right(), zoneId()); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/LessThanOrEqual.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/LessThanOrEqual.java index 0b7f4c732c87d..a0e5abd4317b3 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/LessThanOrEqual.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/LessThanOrEqual.java @@ -6,11 +6,13 @@ */ package org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.predicate.Negatable; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; +import java.io.IOException; import java.time.ZoneId; public class LessThanOrEqual extends BinaryComparison implements Negatable { @@ -19,6 +21,16 @@ public LessThanOrEqual(Source source, Expression left, Expression right, ZoneId super(source, left, right, BinaryComparisonOperation.LTE, zoneId); } + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException(); + } + @Override protected NodeInfo info() { return NodeInfo.create(this, LessThanOrEqual::new, left(), right(), zoneId()); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/NotEquals.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/NotEquals.java index a4cfb8ca2e9b9..6d52195ec9452 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/NotEquals.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/NotEquals.java @@ -6,11 +6,13 @@ */ package org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.predicate.Negatable; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; +import java.io.IOException; import java.time.ZoneId; public class NotEquals extends BinaryComparison implements Negatable { @@ -19,6 +21,16 @@ public NotEquals(Source source, Expression left, Expression right, ZoneId zoneId super(source, left, right, BinaryComparisonOperation.NEQ, zoneId); } + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException(); + } + @Override protected NodeInfo info() { return NodeInfo.create(this, NotEquals::new, left(), right(), zoneId()); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/NullEquals.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/NullEquals.java index a647cdeb5ca20..bb2196a5ae3b9 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/NullEquals.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/comparison/NullEquals.java @@ -6,11 +6,13 @@ */ package org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; +import java.io.IOException; import java.time.ZoneId; /** @@ -22,6 +24,16 @@ public NullEquals(Source source, Expression left, Expression right, ZoneId zoneI super(source, left, right, BinaryComparisonOperation.NULLEQ, zoneId); } + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException(); + } + @Override protected NodeInfo info() { return NodeInfo.create(this, NullEquals::new, left(), right(), zoneId()); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/Like.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/Like.java index 84ed88da0fe42..6d8ce8cbdf47f 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/Like.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/Like.java @@ -6,16 +6,29 @@ */ package org.elasticsearch.xpack.esql.core.expression.predicate.regex; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; +import java.io.IOException; + public class Like extends RegexMatch { public Like(Source source, Expression left, LikePattern pattern) { this(source, left, pattern, false); } + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException(); + } + public Like(Source source, Expression left, LikePattern pattern, boolean caseInsensitive) { super(source, left, pattern, caseInsensitive); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLike.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLike.java index 8020491c50212..5f095a654fc89 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLike.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLike.java @@ -6,10 +6,13 @@ */ package org.elasticsearch.xpack.esql.core.expression.predicate.regex; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; +import java.io.IOException; + public class RLike extends RegexMatch { public RLike(Source source, Expression value, RLikePattern pattern) { @@ -20,6 +23,16 @@ public RLike(Source source, Expression field, RLikePattern rLikePattern, boolean super(source, field, rLikePattern, caseInsensitive); } + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException(); + } + @Override protected NodeInfo info() { return NodeInfo.create(this, RLike::new, field(), pattern(), caseInsensitive()); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardLike.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardLike.java index 8834c1a0211b4..bf54744667217 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardLike.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardLike.java @@ -6,10 +6,13 @@ */ package org.elasticsearch.xpack.esql.core.expression.predicate.regex; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; +import java.io.IOException; + public class WildcardLike extends RegexMatch { public WildcardLike(Source source, Expression left, WildcardPattern pattern) { @@ -20,6 +23,16 @@ public WildcardLike(Source source, Expression left, WildcardPattern pattern, boo super(source, left, pattern, caseInsensitive); } + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException(); + } + @Override protected NodeInfo info() { return NodeInfo.create(this, WildcardLike::new, field(), pattern(), caseInsensitive()); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java index 485084bac60b3..df8fac06dd478 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamInput.java @@ -7,8 +7,6 @@ package org.elasticsearch.xpack.esql.core.util; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -35,10 +33,4 @@ public interface PlanStreamInput { * the same result. */ NameId mapNameId(long id) throws IOException; - - /** - * Read an {@link Expression} from the stream. This will soon be replaced with - * {@link StreamInput#readNamedWriteable}. - */ - Expression readExpression() throws IOException; } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java deleted file mode 100644 index 6a3d8fb77316c..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/PlanStreamOutput.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.core.util; - -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.core.expression.Expression; - -import java.io.IOException; - -/** - * Interface for streams that can serialize plan components. This exists so - * ESQL proper can expose streaming capability to ESQL-core. If the world is kind - * and just we'll remove this when we flatten everything from ESQL-core into - * ESQL proper. - */ -public interface PlanStreamOutput { - /** - * Write an {@link Expression} to the stream. This will soon be replaced with - * {@link StreamOutput#writeNamedWriteable}. - */ - void writeExpression(Expression expression) throws IOException; -} diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/NullabilityTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/NullabilityTests.java index fbeac1748ac81..9bcbaef0060ff 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/NullabilityTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/NullabilityTests.java @@ -6,11 +6,14 @@ */ package org.elasticsearch.xpack.esql.core.expression; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; +import java.io.IOException; + import static java.util.Arrays.asList; import static org.elasticsearch.xpack.esql.core.expression.Nullability.FALSE; import static org.elasticsearch.xpack.esql.core.expression.Nullability.TRUE; @@ -28,6 +31,16 @@ public Nullable(Source source, Nullability nullability) { this.nullability = nullability; } + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException(); + } + @Override public Nullability nullable() { return nullability; diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/FunctionRegistryTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/FunctionRegistryTests.java index 8691b5e9153fb..8d39cc74779f2 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/FunctionRegistryTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/FunctionRegistryTests.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.esql.core.expression.function; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.ParsingException; import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; @@ -16,6 +17,7 @@ import org.elasticsearch.xpack.esql.core.tree.SourceTests; import org.elasticsearch.xpack.esql.core.type.DataType; +import java.io.IOException; import java.util.Arrays; import java.util.List; @@ -173,6 +175,16 @@ public DummyFunction(Source source) { super(source, emptyList()); } + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException(); + } + @Override protected NodeInfo info() { return NodeInfo.create(this); diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRulesTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRulesTests.java index 12dbb23a86c59..789e9a22d39d1 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRulesTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRulesTests.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.esql.core.optimizer; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.TestUtils; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -17,6 +18,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; +import java.io.IOException; import java.util.Collections; import java.util.List; @@ -38,6 +40,16 @@ public DummyBooleanExpression(Source source, int id) { this.id = id; } + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException(); + } + @Override protected NodeInfo info() { return NodeInfo.create(this, DummyBooleanExpression::new, id); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/Order.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/Order.java index 11a98d3a11504..dd51c5ba6473a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/Order.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/Order.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; import java.util.List; @@ -30,7 +29,7 @@ public Order(Source source, Expression child, OrderDirection direction, NullsPos public Order(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), - ((PlanStreamInput) in).readExpression(), + in.readNamedWriteable(Expression.class), in.readEnum(org.elasticsearch.xpack.esql.core.expression.Order.OrderDirection.class), in.readEnum(org.elasticsearch.xpack.esql.core.expression.Order.NullsPosition.class) ); @@ -39,7 +38,7 @@ public Order(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { Source.EMPTY.writeTo(out); - ((PlanStreamOutput) out).writeExpression(child()); + out.writeNamedWriteable(child()); out.writeEnum(direction()); out.writeEnum(nullsPosition()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java index 79dcc6a3d3920..22c4aa9c6bf07 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnsupportedAttribute.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.capabilities.Unresolvable; import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; @@ -42,6 +43,11 @@ public final class UnsupportedAttribute extends FieldAttribute implements Unreso ENTRY.name, UnsupportedAttribute::new ); + public static final NamedWriteableRegistry.Entry EXPRESSION_ENTRY = new NamedWriteableRegistry.Entry( + Expression.class, + ENTRY.name, + UnsupportedAttribute::new + ); private final String message; private final boolean hasCustomMessage; // TODO remove me and just use message != null? diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java index da44b15bdb69d..38d86083df74b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.util.CollectionUtils; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; import java.util.List; @@ -39,11 +38,11 @@ public static List getNamedWriteables() { MedianAbsoluteDeviation.ENTRY, Min.ENTRY, Percentile.ENTRY, + Rate.ENTRY, SpatialCentroid.ENTRY, Sum.ENTRY, Top.ENTRY, Values.ENTRY, - Rate.ENTRY, // internal functions ToPartial.ENTRY, FromPartial.ENTRY @@ -64,13 +63,13 @@ protected AggregateFunction(Source source, Expression field, List ((PlanStreamInput) i).readExpression()) + in.readNamedWriteable(Expression.class), + in.readOptionalNamedWriteable(Expression.class) ); } @Override public void writeTo(StreamOutput out) throws IOException { Source.EMPTY.writeTo(out); - ((PlanStreamOutput) out).writeExpression(field()); - ((PlanStreamOutput) out).writeOptionalExpression(precision); + out.writeNamedWriteable(field()); + out.writeOptionalNamedWriteable(precision); } @Override @@ -160,4 +159,8 @@ public Expression surrogate() { ? new ToLong(s, new Coalesce(s, new MvCount(s, new MvDedupe(s, field)), List.of(new Literal(s, 0, DataType.INTEGER)))) : null; } + + Expression precision() { + return precision; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java index d7d6237c564c3..e16b872f654e1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java @@ -24,8 +24,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import org.elasticsearch.xpack.esql.planner.ToAggregator; import java.io.IOException; @@ -50,14 +48,19 @@ public FromPartial(Source source, Expression field, Expression function) { } private FromPartial(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), ((PlanStreamInput) in).readExpression(), ((PlanStreamInput) in).readExpression()); + super(in); + this.function = in.readNamedWriteable(Expression.class); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - PlanStreamOutput planOut = (PlanStreamOutput) out; - planOut.writeExpression(function); + out.writeNamedWriteable(function); + } + + @Override + public String getWriteableName() { + return ENTRY.name; } public Expression function() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java index e2156f4d3b97d..b65e78b431159 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java @@ -21,7 +21,6 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; import java.util.List; @@ -56,14 +55,14 @@ public Percentile( } private Percentile(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), ((PlanStreamInput) in).readExpression(), ((PlanStreamInput) in).readExpression()); + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class)); } @Override public void writeTo(StreamOutput out) throws IOException { Source.EMPTY.writeTo(out); - ((PlanStreamOutput) out).writeExpression(children().get(0)); - ((PlanStreamOutput) out).writeExpression(children().get(1)); + out.writeNamedWriteable(children().get(0)); + out.writeNamedWriteable(children().get(1)); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java index 6da6d42f2a8f1..227bea0789366 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java @@ -25,7 +25,6 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import org.elasticsearch.xpack.esql.planner.ToAggregator; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -38,7 +37,7 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; public class Rate extends AggregateFunction implements OptionalArgument, ToAggregator { - public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Rate", Rate::readFrom); + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Rate", Rate::new); private static final TimeValue DEFAULT_UNIT = TimeValue.timeValueSeconds(1); private final Expression timestamp; @@ -49,7 +48,6 @@ public class Rate extends AggregateFunction implements OptionalArgument, ToAggre description = "compute the rate of a counter field. Available in METRICS command only", isAggregation = true ) - public Rate( Source source, @Param(name = "field", type = { "counter_long|counter_integer|counter_double" }, description = "counter field") Expression field, @@ -61,25 +59,30 @@ public Rate( this.unit = unit; } - public static Rate withUnresolvedTimestamp(Source source, Expression field, Expression unit) { - return new Rate(source, field, new UnresolvedAttribute(source, "@timestamp"), unit); + public Rate(StreamInput in) throws IOException { + this( + Source.readFrom((PlanStreamInput) in), + in.readNamedWriteable(Expression.class), + in.readNamedWriteable(Expression.class), + in.readOptionalNamedWriteable(Expression.class) + ); } - private static Rate readFrom(StreamInput in) throws IOException { - PlanStreamInput planIn = (PlanStreamInput) in; - Source source = Source.readFrom(planIn); - Expression field = planIn.readExpression(); - Expression timestamp = planIn.readExpression(); - Expression unit = planIn.readOptionalNamed(Expression.class); - return new Rate(source, field, timestamp, unit); + @Override + public void writeTo(StreamOutput out) throws IOException { + source().writeTo(out); + out.writeNamedWriteable(field()); + out.writeNamedWriteable(timestamp); + out.writeOptionalNamedWriteable(unit); } @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - PlanStreamOutput planOut = (PlanStreamOutput) out; - planOut.writeExpression(timestamp); - planOut.writeOptionalExpression(unit); + public String getWriteableName() { + return ENTRY.name; + } + + public static Rate withUnresolvedTimestamp(Source source, Expression field, Expression unit) { + return new Rate(source, field, new UnresolvedAttribute(source, "@timestamp"), unit); } @Override @@ -175,4 +178,12 @@ public String toString() { return "rate(" + field() + ")"; } } + + Expression timestamp() { + return timestamp; + } + + Expression unit() { + return unit; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java index 805985b5302cf..e03574f9cb094 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java @@ -24,8 +24,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import org.elasticsearch.xpack.esql.planner.ToAggregator; import java.io.IOException; @@ -77,14 +75,19 @@ private ToPartial(Source source, Expression field, Expression function) { } private ToPartial(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), ((PlanStreamInput) in).readExpression(), ((PlanStreamInput) in).readExpression()); + super(in); + this.function = in.readNamedWriteable(Expression.class); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - PlanStreamOutput planOut = (PlanStreamOutput) out; - planOut.writeExpression(function); + out.writeNamedWriteable(function); + } + + @Override + public String getWriteableName() { + return ENTRY.name; } public Expression function() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java index da7a14e5b28eb..c966ef7afb7c9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Top.java @@ -25,7 +25,6 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import org.elasticsearch.xpack.esql.planner.ToAggregator; import java.io.IOException; @@ -72,9 +71,9 @@ public Top( private Top(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), - ((PlanStreamInput) in).readExpression(), - ((PlanStreamInput) in).readExpression(), - ((PlanStreamInput) in).readExpression() + in.readNamedWriteable(Expression.class), + in.readNamedWriteable(Expression.class), + in.readNamedWriteable(Expression.class) ); } @@ -83,9 +82,9 @@ public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); List fields = children(); assert fields.size() == 3; - ((PlanStreamOutput) out).writeExpression(fields.get(0)); - ((PlanStreamOutput) out).writeExpression(fields.get(1)); - ((PlanStreamOutput) out).writeExpression(fields.get(2)); + out.writeNamedWriteable(fields.get(0)); + out.writeNamedWriteable(fields.get(1)); + out.writeNamedWriteable(fields.get(2)); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java index dab2019a50682..7e6f3999bf11e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java @@ -33,7 +33,6 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mul; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import java.io.IOException; @@ -204,21 +203,20 @@ public Bucket( private Bucket(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), - ((PlanStreamInput) in).readExpression(), - ((PlanStreamInput) in).readExpression(), - ((PlanStreamInput) in).readOptionalNamed(Expression.class), - ((PlanStreamInput) in).readOptionalNamed(Expression.class) + in.readNamedWriteable(Expression.class), + in.readNamedWriteable(Expression.class), + in.readOptionalNamedWriteable(Expression.class), + in.readOptionalNamedWriteable(Expression.class) ); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(field); - ((PlanStreamOutput) out).writeExpression(buckets); - ((PlanStreamOutput) out).writeOptionalExpression(from); - ((PlanStreamOutput) out).writeOptionalExpression(to); - + out.writeNamedWriteable(field); + out.writeNamedWriteable(buckets); + out.writeOptionalNamedWriteable(from); + out.writeOptionalNamedWriteable(to); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java index f8adf4e5d9e16..563847473c992 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; +import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.FullTextPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -34,6 +35,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tau; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.BinarySpatialFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; import org.elasticsearch.xpack.esql.expression.function.scalar.string.EndsWith; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Left; @@ -46,9 +48,12 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.Substring; import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToLower; import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToUpper; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.EsqlBinaryComparison; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.InsensitiveEquals; +import java.util.ArrayList; import java.util.List; /** @@ -63,44 +68,49 @@ */ public abstract class EsqlScalarFunction extends ScalarFunction implements EvaluatorMapper { public static List getNamedWriteables() { - return List.of( - And.ENTRY, - Atan2.ENTRY, - Bucket.ENTRY, - Case.ENTRY, - CIDRMatch.ENTRY, - Coalesce.ENTRY, - Concat.ENTRY, - E.ENTRY, - EndsWith.ENTRY, - Greatest.ENTRY, - In.ENTRY, - InsensitiveEquals.ENTRY, - DateExtract.ENTRY, - DateDiff.ENTRY, - DateFormat.ENTRY, - DateParse.ENTRY, - DateTrunc.ENTRY, - IpPrefix.ENTRY, - Least.ENTRY, - Left.ENTRY, - Locate.ENTRY, - Log.ENTRY, - Now.ENTRY, - Or.ENTRY, - Pi.ENTRY, - Pow.ENTRY, - Right.ENTRY, - Repeat.ENTRY, - Replace.ENTRY, - Round.ENTRY, - Split.ENTRY, - Substring.ENTRY, - StartsWith.ENTRY, - Tau.ENTRY, - ToLower.ENTRY, - ToUpper.ENTRY - ); + List entries = new ArrayList<>(); + entries.add(And.ENTRY); + entries.add(Atan2.ENTRY); + entries.add(Bucket.ENTRY); + entries.add(Case.ENTRY); + entries.add(CIDRMatch.ENTRY); + entries.add(Coalesce.ENTRY); + entries.add(Concat.ENTRY); + entries.add(E.ENTRY); + entries.add(EndsWith.ENTRY); + entries.add(Greatest.ENTRY); + entries.add(In.ENTRY); + entries.add(InsensitiveEquals.ENTRY); + entries.add(DateExtract.ENTRY); + entries.add(DateDiff.ENTRY); + entries.add(DateFormat.ENTRY); + entries.add(DateParse.ENTRY); + entries.add(DateTrunc.ENTRY); + entries.add(IpPrefix.ENTRY); + entries.add(Least.ENTRY); + entries.add(Left.ENTRY); + entries.add(Locate.ENTRY); + entries.add(Log.ENTRY); + entries.add(Now.ENTRY); + entries.add(Or.ENTRY); + entries.add(Pi.ENTRY); + entries.add(Pow.ENTRY); + entries.add(Right.ENTRY); + entries.add(Repeat.ENTRY); + entries.add(Replace.ENTRY); + entries.add(Round.ENTRY); + entries.add(Split.ENTRY); + entries.add(Substring.ENTRY); + entries.add(StartsWith.ENTRY); + entries.add(Tau.ENTRY); + entries.add(ToLower.ENTRY); + entries.add(ToUpper.ENTRY); + entries.addAll(BinarySpatialFunction.getNamedWriteables()); + entries.addAll(EsqlArithmeticOperation.getNamedWriteables()); + entries.addAll(EsqlBinaryComparison.getNamedWriteables()); + entries.addAll(FullTextPredicate.getNamedWriteables()); + entries.addAll(UnaryScalarFunction.getNamedWriteables()); + return entries; } protected EsqlScalarFunction(Source source) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java index 6c43e74593335..0e9dbf3057c1b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java @@ -50,6 +50,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sqrt; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tan; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tanh; +import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.AbstractMultivalueFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StX; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StY; import org.elasticsearch.xpack.esql.expression.function.scalar.string.LTrim; @@ -60,9 +61,9 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Neg; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -70,53 +71,54 @@ public abstract class UnaryScalarFunction extends EsqlScalarFunction { public static List getNamedWriteables() { - return List.of( - Abs.ENTRY, - Acos.ENTRY, - Asin.ENTRY, - Atan.ENTRY, - Cbrt.ENTRY, - Ceil.ENTRY, - Cos.ENTRY, - Cosh.ENTRY, - Floor.ENTRY, - FromBase64.ENTRY, - IsNotNull.ENTRY, - IsNull.ENTRY, - Length.ENTRY, - Log10.ENTRY, - LTrim.ENTRY, - Neg.ENTRY, - Not.ENTRY, - RLike.ENTRY, - RTrim.ENTRY, - Signum.ENTRY, - Sin.ENTRY, - Sinh.ENTRY, - Sqrt.ENTRY, - StX.ENTRY, - StY.ENTRY, - Tan.ENTRY, - Tanh.ENTRY, - ToBase64.ENTRY, - ToBoolean.ENTRY, - ToCartesianPoint.ENTRY, - ToDatetime.ENTRY, - ToDegrees.ENTRY, - ToDouble.ENTRY, - ToGeoShape.ENTRY, - ToCartesianShape.ENTRY, - ToGeoPoint.ENTRY, - ToIP.ENTRY, - ToInteger.ENTRY, - ToLong.ENTRY, - ToRadians.ENTRY, - ToString.ENTRY, - ToUnsignedLong.ENTRY, - ToVersion.ENTRY, - Trim.ENTRY, - WildcardLike.ENTRY - ); + List entries = new ArrayList<>(); + entries.add(Abs.ENTRY); + entries.add(Acos.ENTRY); + entries.add(Asin.ENTRY); + entries.add(Atan.ENTRY); + entries.add(Cbrt.ENTRY); + entries.add(Ceil.ENTRY); + entries.add(Cos.ENTRY); + entries.add(Cosh.ENTRY); + entries.add(Floor.ENTRY); + entries.add(FromBase64.ENTRY); + entries.add(IsNotNull.ENTRY); + entries.add(IsNull.ENTRY); + entries.add(Length.ENTRY); + entries.add(Log10.ENTRY); + entries.add(LTrim.ENTRY); + entries.add(Neg.ENTRY); + entries.add(Not.ENTRY); + entries.add(RLike.ENTRY); + entries.add(RTrim.ENTRY); + entries.add(Signum.ENTRY); + entries.add(Sin.ENTRY); + entries.add(Sinh.ENTRY); + entries.add(Sqrt.ENTRY); + entries.add(StX.ENTRY); + entries.add(StY.ENTRY); + entries.add(Tan.ENTRY); + entries.add(Tanh.ENTRY); + entries.add(ToBase64.ENTRY); + entries.add(ToBoolean.ENTRY); + entries.add(ToCartesianPoint.ENTRY); + entries.add(ToDatetime.ENTRY); + entries.add(ToDegrees.ENTRY); + entries.add(ToDouble.ENTRY); + entries.add(ToGeoShape.ENTRY); + entries.add(ToCartesianShape.ENTRY); + entries.add(ToGeoPoint.ENTRY); + entries.add(ToIP.ENTRY); + entries.add(ToInteger.ENTRY); + entries.add(ToLong.ENTRY); + entries.add(ToRadians.ENTRY); + entries.add(ToString.ENTRY); + entries.add(ToUnsignedLong.ENTRY); + entries.add(ToVersion.ENTRY); + entries.add(Trim.ENTRY); + entries.add(WildcardLike.ENTRY); + entries.addAll(AbstractMultivalueFunction.getNamedWriteables()); + return entries; } protected final Expression field; @@ -127,13 +129,13 @@ public UnaryScalarFunction(Source source, Expression field) { } protected UnaryScalarFunction(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), ((PlanStreamInput) in).readExpression()); + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class)); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(field); + out.writeNamedWriteable(field); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java index 50d0e5484756e..3239afabf6a24 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java @@ -31,7 +31,6 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import java.io.IOException; @@ -43,8 +42,6 @@ import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.esql.core.type.DataType.NULL; -import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanReader.readerFromPlanReader; -import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanWriter.writerFromPlanWriter; public final class Case extends EsqlScalarFunction { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Case", Case::new); @@ -123,16 +120,16 @@ public Case( private Case(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), - ((PlanStreamInput) in).readExpression(), - in.readCollectionAsList(readerFromPlanReader(PlanStreamInput::readExpression)) + in.readNamedWriteable(Expression.class), + in.readNamedWriteableCollectionAsList(Expression.class) ); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(children().get(0)); - out.writeCollection(children().subList(1, children().size()), writerFromPlanWriter(PlanStreamOutput::writeExpression)); + out.writeNamedWriteable(children().get(0)); + out.writeNamedWriteableCollection(children().subList(1, children().size())); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java index 580e2f9900208..d6fe76b119cb5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java @@ -27,7 +27,6 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMax; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; import java.util.List; @@ -35,8 +34,6 @@ import java.util.stream.Stream; import static org.elasticsearch.xpack.esql.core.type.DataType.NULL; -import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanReader.readerFromPlanReader; -import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanWriter.writerFromPlanWriter; /** * Returns the maximum value of multiple columns. @@ -74,16 +71,16 @@ public Greatest( private Greatest(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), - ((PlanStreamInput) in).readExpression(), - in.readCollectionAsList(readerFromPlanReader(PlanStreamInput::readExpression)) + in.readNamedWriteable(Expression.class), + in.readNamedWriteableCollectionAsList(Expression.class) ); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(children().get(0)); - out.writeCollection(children().subList(1, children().size()), writerFromPlanWriter(PlanStreamOutput::writeExpression)); + out.writeNamedWriteable(children().get(0)); + out.writeNamedWriteableCollection(children().subList(1, children().size())); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java index 2255fed9d4947..221a7d466da71 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java @@ -27,7 +27,6 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMin; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; import java.util.List; @@ -35,8 +34,6 @@ import java.util.stream.Stream; import static org.elasticsearch.xpack.esql.core.type.DataType.NULL; -import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanReader.readerFromPlanReader; -import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanWriter.writerFromPlanWriter; /** * Returns the minimum value of multiple columns. @@ -72,16 +69,16 @@ public Least( private Least(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), - ((PlanStreamInput) in).readExpression(), - in.readCollectionAsList(readerFromPlanReader(PlanStreamInput::readExpression)) + in.readNamedWriteable(Expression.class), + in.readNamedWriteableCollectionAsList(Expression.class) ); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(children().get(0)); - out.writeCollection(children().subList(1, children().size()), writerFromPlanWriter(PlanStreamOutput::writeExpression)); + out.writeNamedWriteable(children().get(0)); + out.writeNamedWriteableCollection(children().subList(1, children().size())); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java index 96601905d40c9..0fed02f89fd92 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java @@ -57,7 +57,7 @@ protected AbstractConvertFunction(Source source, Expression field) { } protected AbstractConvertFunction(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), ((PlanStreamInput) in).readExpression()); + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class)); } /** diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java index 2a224598253f9..582785d023945 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiff.java @@ -24,7 +24,6 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; import java.time.Instant; @@ -176,18 +175,18 @@ public DateDiff( private DateDiff(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), - ((PlanStreamInput) in).readExpression(), - ((PlanStreamInput) in).readExpression(), - ((PlanStreamInput) in).readExpression() + in.readNamedWriteable(Expression.class), + in.readNamedWriteable(Expression.class), + in.readNamedWriteable(Expression.class) ); } @Override public void writeTo(StreamOutput out) throws IOException { Source.EMPTY.writeTo(out); - ((PlanStreamOutput) out).writeExpression(unit); - ((PlanStreamOutput) out).writeExpression(startTimestamp); - ((PlanStreamOutput) out).writeExpression(endTimestamp); + out.writeNamedWriteable(unit); + out.writeNamedWriteable(startTimestamp); + out.writeNamedWriteable(endTimestamp); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java index f3448a2b7c5ff..5a57e98be38b9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java @@ -26,7 +26,6 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlConfigurationFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import java.io.IOException; @@ -83,8 +82,8 @@ public DateExtract( private DateExtract(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), - ((PlanStreamInput) in).readExpression(), - ((PlanStreamInput) in).readExpression(), + in.readNamedWriteable(Expression.class), + in.readNamedWriteable(Expression.class), ((PlanStreamInput) in).configuration() ); } @@ -92,8 +91,8 @@ private DateExtract(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(datePart()); - ((PlanStreamOutput) out).writeExpression(field()); + out.writeNamedWriteable(datePart()); + out.writeNamedWriteable(field()); } Expression datePart() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java index 9a789c2bb6fb2..8662116fe5b67 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java @@ -26,7 +26,6 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlConfigurationFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -73,8 +72,8 @@ Date format (optional). If no format is specified, the `yyyy-MM-dd'T'HH:mm:ss.S private DateFormat(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), - ((PlanStreamInput) in).readExpression(), - in.readOptionalWriteable(i -> ((PlanStreamInput) i).readExpression()), + in.readNamedWriteable(Expression.class), + in.readOptionalNamedWriteable(Expression.class), ((PlanStreamInput) in).configuration() ); } @@ -82,8 +81,8 @@ private DateFormat(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(children().get(0)); - out.writeOptionalWriteable(children().size() == 1 ? null : o -> ((PlanStreamOutput) o).writeExpression(children().get(1))); + out.writeNamedWriteable(children().get(0)); + out.writeOptionalNamedWriteable(children().size() == 2 ? children().get(1) : null); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java index 12ffe092287ed..10551cae9eba2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java @@ -26,7 +26,6 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import java.io.IOException; @@ -78,16 +77,16 @@ public DateParse( private DateParse(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), - ((PlanStreamInput) in).readExpression(), - in.readOptionalWriteable(i -> ((PlanStreamInput) i).readExpression()) + in.readNamedWriteable(Expression.class), + in.readOptionalNamedWriteable(Expression.class) ); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(children().get(0)); - out.writeOptionalWriteable(children().size() == 2 ? o -> ((PlanStreamOutput) out).writeExpression(children().get(1)) : null); + out.writeNamedWriteable(children().get(0)); + out.writeOptionalNamedWriteable(children().size() == 2 ? children().get(1) : null); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java index 995e525dda9ec..c39905f261d88 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java @@ -24,7 +24,6 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import java.io.IOException; @@ -82,14 +81,14 @@ public DateTrunc( } private DateTrunc(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), ((PlanStreamInput) in).readExpression(), ((PlanStreamInput) in).readExpression()); + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class)); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(interval); - ((PlanStreamOutput) out).writeExpression(timestampField); + out.writeNamedWriteable(interval); + out.writeNamedWriteable(timestampField); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java index e24ee80fe7972..c141beeefb1ea 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java @@ -26,7 +26,6 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; import java.util.Arrays; @@ -38,8 +37,6 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.fromIndex; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isIPAndExact; import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isStringAndExact; -import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanReader.readerFromPlanReader; -import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanWriter.writerFromPlanWriter; /** * This function takes a first parameter of type IP, followed by one or more parameters evaluated to a CIDR specification: @@ -84,8 +81,8 @@ public CIDRMatch( private CIDRMatch(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), - ((PlanStreamInput) in).readExpression(), - in.readCollectionAsList(readerFromPlanReader(PlanStreamInput::readExpression)) + in.readNamedWriteable(Expression.class), + in.readNamedWriteableCollectionAsList(Expression.class) ); } @@ -93,8 +90,8 @@ private CIDRMatch(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); assert children().size() > 1; - ((PlanStreamOutput) out).writeExpression(children().get(0)); - out.writeCollection(children().subList(1, children().size()), writerFromPlanWriter(PlanStreamOutput::writeExpression)); + out.writeNamedWriteable(children().get(0)); + out.writeNamedWriteableCollection(children().subList(1, children().size())); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefix.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefix.java index 696ba1c09d08a..ba51e5a9c4c0d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefix.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefix.java @@ -25,7 +25,6 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; import java.util.Arrays; @@ -84,20 +83,18 @@ public IpPrefix( private IpPrefix(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), - ((PlanStreamInput) in).readExpression(), - ((PlanStreamInput) in).readExpression(), - ((PlanStreamInput) in).readExpression() + in.readNamedWriteable(Expression.class), + in.readNamedWriteable(Expression.class), + in.readNamedWriteable(Expression.class) ); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - List fields = children(); - assert fields.size() == 3; - ((PlanStreamOutput) out).writeExpression(fields.get(0)); - ((PlanStreamOutput) out).writeExpression(fields.get(1)); - ((PlanStreamOutput) out).writeExpression(fields.get(2)); + out.writeNamedWriteable(ipField); + out.writeNamedWriteable(prefixLengthV4Field); + out.writeNamedWriteable(prefixLengthV6Field); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java index 5370a31023522..f940cb6d68554 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java @@ -23,7 +23,6 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; import java.util.List; @@ -65,14 +64,14 @@ public Atan2( } private Atan2(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), ((PlanStreamInput) in).readExpression(), ((PlanStreamInput) in).readExpression()); + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class)); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(y()); - ((PlanStreamOutput) out).writeExpression(x()); + out.writeNamedWriteable(y); + out.writeNamedWriteable(x); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java index d17f24cade17b..348bbaf1fe85c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java @@ -22,7 +22,6 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; import java.util.Arrays; @@ -70,8 +69,8 @@ public Log( private Log(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), - ((PlanStreamInput) in).readExpression(), - ((PlanStreamInput) in).readOptionalNamed(Expression.class) + in.readNamedWriteable(Expression.class), + in.readOptionalNamedWriteable(Expression.class) ); } @@ -79,8 +78,8 @@ private Log(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); assert children().size() == 1 || children().size() == 2; - ((PlanStreamOutput) out).writeExpression(children().get(0)); - out.writeOptionalWriteable(children().size() == 2 ? o -> ((PlanStreamOutput) o).writeExpression(children().get(1)) : null); + out.writeNamedWriteable(children().get(0)); + out.writeOptionalNamedWriteable(children().size() == 2 ? children().get(1) : null); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java index 3865e3b946ab4..46d80635823ca 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java @@ -22,7 +22,6 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; import java.util.Arrays; @@ -66,14 +65,14 @@ public Pow( } private Pow(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), ((PlanStreamInput) in).readExpression(), ((PlanStreamInput) in).readExpression()); + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class)); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(base); - ((PlanStreamOutput) out).writeExpression(exponent); + out.writeNamedWriteable(base); + out.writeNamedWriteable(exponent); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java index bd7e58a3b4fbb..07953a478e2f0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java @@ -25,7 +25,6 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; import java.math.BigInteger; @@ -77,16 +76,16 @@ public Round( private Round(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), - ((PlanStreamInput) in).readExpression(), - ((PlanStreamInput) in).readOptionalNamed(Expression.class) + in.readNamedWriteable(Expression.class), + in.readOptionalNamedWriteable(Expression.class) ); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(field); - ((PlanStreamOutput) out).writeOptionalExpression(decimals); + out.writeNamedWriteable(field); + out.writeOptionalNamedWriteable(decimals); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java index 9b7e0b729cde9..cffb208940aa5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java @@ -18,7 +18,6 @@ import org.elasticsearch.core.Releasables; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; @@ -57,13 +56,13 @@ protected AbstractMultivalueFunction(Source source, Expression field) { } protected AbstractMultivalueFunction(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), ((PlanStreamInput) in).readExpression()); + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class)); } @Override public final void writeTo(StreamOutput out) throws IOException { Source.EMPTY.writeTo(out); - ((PlanStreamOutput) out).writeExpression(field); + out.writeNamedWriteable(field); } /** diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppend.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppend.java index 99844d40e0565..dc4b78d980c28 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppend.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppend.java @@ -111,7 +111,7 @@ public MvAppend( } private MvAppend(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), ((PlanStreamInput) in).readExpression(), ((PlanStreamInput) in).readExpression()); + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class)); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java index f824d0821cfbf..2b3afe093fa96 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java @@ -26,7 +26,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; @@ -117,20 +116,18 @@ public MvSlice( private MvSlice(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), - ((PlanStreamInput) in).readExpression(), - ((PlanStreamInput) in).readExpression(), - // TODO readOptionalNamedWriteable - in.readOptionalWriteable(i -> ((PlanStreamInput) i).readExpression()) + in.readNamedWriteable(Expression.class), + in.readNamedWriteable(Expression.class), + in.readOptionalNamedWriteable(Expression.class) ); } @Override public void writeTo(StreamOutput out) throws IOException { Source.EMPTY.writeTo(out); - ((PlanStreamOutput) out).writeExpression(field); - ((PlanStreamOutput) out).writeExpression(start); - // TODO writeOptionalNamedWriteable - out.writeOptionalWriteable(end == null ? null : o -> ((PlanStreamOutput) o).writeExpression(end)); + out.writeNamedWriteable(field); + out.writeNamedWriteable(start); + out.writeOptionalNamedWriteable(end); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java index fd5f493ae405e..aa41c58cef894 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java @@ -36,7 +36,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -93,18 +92,16 @@ public MvSort( private MvSort(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), - ((PlanStreamInput) in).readExpression(), - // TODO readOptionalNamedWriteable - in.readOptionalWriteable(i -> ((PlanStreamInput) i).readExpression()) + in.readNamedWriteable(Expression.class), + in.readOptionalNamedWriteable(Expression.class) ); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(field); - // TODO writeOptionalNamedWriteable - out.writeOptionalWriteable(order == null ? null : o -> ((PlanStreamOutput) o).writeExpression(order)); + out.writeNamedWriteable(field); + out.writeOptionalNamedWriteable(order); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZip.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZip.java index 15bd09a4089e6..b53ead40d1e57 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZip.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZip.java @@ -22,7 +22,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; @@ -74,20 +73,18 @@ public MvZip( private MvZip(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), - ((PlanStreamInput) in).readExpression(), - ((PlanStreamInput) in).readExpression(), - // TODO readOptionalNamedWriteable - in.readOptionalWriteable(i -> ((PlanStreamInput) i).readExpression()) + in.readNamedWriteable(Expression.class), + in.readNamedWriteable(Expression.class), + in.readOptionalNamedWriteable(Expression.class) ); } @Override public void writeTo(StreamOutput out) throws IOException { Source.EMPTY.writeTo(out); - ((PlanStreamOutput) out).writeExpression(mvLeft); - ((PlanStreamOutput) out).writeExpression(mvRight); - // TODO writeOptionalNamedWriteable - out.writeOptionalWriteable(delim == null ? null : o -> ((PlanStreamOutput) o).writeExpression(delim)); + out.writeNamedWriteable(mvLeft); + out.writeNamedWriteable(mvRight); + out.writeOptionalNamedWriteable(delim); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java index 6a02eb4b94f12..e1553fa29fac9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java @@ -31,7 +31,6 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import java.io.IOException; @@ -41,8 +40,6 @@ import java.util.stream.Stream; import static org.elasticsearch.xpack.esql.core.type.DataType.NULL; -import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanReader.readerFromPlanReader; -import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanWriter.writerFromPlanWriter; /** * Function returning the first non-null value. @@ -113,16 +110,16 @@ public Coalesce( private Coalesce(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), - ((PlanStreamInput) in).readExpression(), - in.readCollectionAsList(readerFromPlanReader(PlanStreamInput::readExpression)) + in.readNamedWriteable(Expression.class), + in.readNamedWriteableCollectionAsList(Expression.class) ); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(children().get(0)); - out.writeCollection(children().subList(1, children().size()), writerFromPlanWriter(PlanStreamOutput::writeExpression)); + out.writeNamedWriteable(children().get(0)); + out.writeNamedWriteableCollection(children().subList(1, children().size())); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java index cd88619c4fdbe..4f9219247d5c2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java @@ -107,20 +107,14 @@ * This links it into the language and {@code META FUNCTIONS}. * *
  • - * Register your function for serialization. We're in the process of migrating this serialization - * from an older way to the more common, {@link org.elasticsearch.common.io.stream.NamedWriteable}. - *

    - * All subclasses of {@link org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction}, - * {@link org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.EsqlBinaryComparison}, - * and {@link org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation} - * are migrated and should include a "getWriteableName", "writeTo", and a deserializing constructor. - * They should also include a {@link org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry} - * and it should be linked in {@link org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction}. - *

    - *

    - * Other functions serialized in {@link org.elasticsearch.xpack.esql.io.stream.PlanNamedTypes} - * and you should copy what's done there. - *

    + * Implement serialization for your function by implementing + * {@link org.elasticsearch.common.io.stream.NamedWriteable#getWriteableName}, + * {@link org.elasticsearch.common.io.stream.NamedWriteable#writeTo}, + * and a deserializing constructor. Then add an {@link org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry} + * constant and register it. To register it, look for a method like + * {@link org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction#getNamedWriteables()} + * in your function's class hierarchy. Keep going up until you hit a function with that name. + * Then add your new "ENTRY" constant to the list it returns. *
  • *
  • * Rerun the {@code CsvTests}. They should find your function and maybe even pass. Add a diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunction.java index 75d5641458e3f..1beef40ce0c42 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunction.java @@ -18,8 +18,6 @@ import org.elasticsearch.xpack.esql.core.expression.function.scalar.BinaryScalarFunction; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes; import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -66,8 +64,8 @@ protected BinarySpatialFunction( protected BinarySpatialFunction(StreamInput in, boolean leftDocValues, boolean rightDocValues, boolean pointsOnly) throws IOException { this( Source.EMPTY, - ((PlanStreamInput) in).readExpression(), - ((PlanStreamInput) in).readExpression(), + in.readNamedWriteable(Expression.class), + in.readNamedWriteable(Expression.class), leftDocValues, rightDocValues, pointsOnly @@ -76,8 +74,8 @@ protected BinarySpatialFunction(StreamInput in, boolean leftDocValues, boolean r @Override public void writeTo(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeExpression(left()); - ((PlanStreamOutput) out).writeExpression(right()); + out.writeNamedWriteable(left()); + out.writeNamedWriteable(right()); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java index 69464787f9288..23ee942bcf53a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java @@ -26,7 +26,6 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; import java.util.List; @@ -36,8 +35,6 @@ import static org.elasticsearch.common.unit.ByteSizeUnit.MB; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; -import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanReader.readerFromPlanReader; -import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanWriter.writerFromPlanWriter; /** * Join strings. @@ -63,16 +60,16 @@ public Concat( private Concat(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), - ((PlanStreamInput) in).readExpression(), - in.readCollectionAsList(readerFromPlanReader(PlanStreamInput::readExpression)) + in.readNamedWriteable(Expression.class), + in.readNamedWriteableCollectionAsList(Expression.class) ); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(children().get(0)); - out.writeCollection(children().subList(1, children().size()), writerFromPlanWriter(PlanStreamOutput::writeExpression)); + out.writeNamedWriteable(children().get(0)); + out.writeNamedWriteableCollection(children().subList(1, children().size())); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java index f117ddf9816ad..1d2b743fe5a7a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java @@ -22,7 +22,6 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; import java.util.Arrays; @@ -63,14 +62,14 @@ public EndsWith( } private EndsWith(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), ((PlanStreamInput) in).readExpression(), ((PlanStreamInput) in).readExpression()); + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class)); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(str); - ((PlanStreamOutput) out).writeExpression(suffix); + out.writeNamedWriteable(str); + out.writeNamedWriteable(suffix); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java index 4f93ec8525dc6..b0e5b41f971e1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java @@ -25,7 +25,6 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; import java.util.Arrays; @@ -62,14 +61,14 @@ public Left( } private Left(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), ((PlanStreamInput) in).readExpression(), ((PlanStreamInput) in).readExpression()); + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class)); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(str); - ((PlanStreamOutput) out).writeExpression(length); + out.writeNamedWriteable(str); + out.writeNamedWriteable(length); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java index 3ea741d3a42d4..5d7bb97469db6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java @@ -24,7 +24,6 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; import java.util.Arrays; @@ -71,18 +70,18 @@ public Locate( private Locate(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), - ((PlanStreamInput) in).readExpression(), - ((PlanStreamInput) in).readExpression(), - ((PlanStreamInput) in).readOptionalNamed(Expression.class) + in.readNamedWriteable(Expression.class), + in.readNamedWriteable(Expression.class), + in.readOptionalNamedWriteable(Expression.class) ); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(str); - ((PlanStreamOutput) out).writeExpression(substr); - ((PlanStreamOutput) out).writeOptionalExpression(start); + out.writeNamedWriteable(str); + out.writeNamedWriteable(substr); + out.writeOptionalNamedWriteable(start); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java index c35e568596c13..56090b0fb9e32 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java @@ -17,7 +17,6 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; import java.util.function.Function; @@ -37,13 +36,13 @@ public RLike(Source source, Expression field, RLikePattern rLikePattern, boolean } private RLike(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), ((PlanStreamInput) in).readExpression(), new RLikePattern(in.readString())); + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), new RLikePattern(in.readString())); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(field()); + out.writeNamedWriteable(field()); out.writeString(pattern().asJavaRegex()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Repeat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Repeat.java index 9c5fee999c332..2404beb6ffb5a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Repeat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Repeat.java @@ -25,7 +25,6 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; import java.util.Arrays; @@ -62,14 +61,14 @@ public Repeat( } private Repeat(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), ((PlanStreamInput) in).readExpression(), ((PlanStreamInput) in).readExpression()); + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class)); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(str); - ((PlanStreamOutput) out).writeExpression(number); + out.writeNamedWriteable(str); + out.writeNamedWriteable(number); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java index 33d2792df0d9b..30c8793fe371a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java @@ -22,8 +22,6 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; import java.util.Arrays; @@ -70,17 +68,17 @@ public Replace( private Replace(StreamInput in) throws IOException { this( Source.EMPTY, - ((PlanStreamInput) in).readExpression(), - ((PlanStreamInput) in).readExpression(), - ((PlanStreamInput) in).readExpression() + in.readNamedWriteable(Expression.class), + in.readNamedWriteable(Expression.class), + in.readNamedWriteable(Expression.class) ); } @Override public void writeTo(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeExpression(str); - ((PlanStreamOutput) out).writeExpression(regex); - ((PlanStreamOutput) out).writeExpression(newStr); + out.writeNamedWriteable(str); + out.writeNamedWriteable(regex); + out.writeNamedWriteable(newStr); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java index 71c99b38b46aa..ab6d3bf6cef99 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java @@ -25,7 +25,6 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; import java.util.Arrays; @@ -62,14 +61,14 @@ public Right( } private Right(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), ((PlanStreamInput) in).readExpression(), ((PlanStreamInput) in).readExpression()); + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class)); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(str); - ((PlanStreamOutput) out).writeExpression(length); + out.writeNamedWriteable(str); + out.writeNamedWriteable(length); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java index ec2b291fea492..79ff23ac6737a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java @@ -26,7 +26,6 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; import java.util.function.Function; @@ -63,14 +62,14 @@ public Split( } private Split(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), ((PlanStreamInput) in).readExpression(), ((PlanStreamInput) in).readExpression()); + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class)); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(str()); - ((PlanStreamOutput) out).writeExpression(delim()); + out.writeNamedWriteable(str()); + out.writeNamedWriteable(delim()); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java index 2fca50b53cf73..fc40a73471194 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java @@ -22,7 +22,6 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; import java.util.Arrays; @@ -67,14 +66,14 @@ public StartsWith( } private StartsWith(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), ((PlanStreamInput) in).readExpression(), ((PlanStreamInput) in).readExpression()); + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class)); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(str); - ((PlanStreamOutput) out).writeExpression(prefix); + out.writeNamedWriteable(str); + out.writeNamedWriteable(prefix); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java index cb8aa1c8e2a44..c243e8383b47f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java @@ -25,7 +25,6 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; import java.util.Arrays; @@ -83,18 +82,18 @@ public Substring( private Substring(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), - ((PlanStreamInput) in).readExpression(), - ((PlanStreamInput) in).readExpression(), - ((PlanStreamInput) in).readOptionalNamed(Expression.class) + in.readNamedWriteable(Expression.class), + in.readNamedWriteable(Expression.class), + in.readOptionalNamedWriteable(Expression.class) ); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(str); - ((PlanStreamOutput) out).writeExpression(start); - ((PlanStreamOutput) out).writeOptionalExpression(length); + out.writeNamedWriteable(str); + out.writeNamedWriteable(start); + out.writeOptionalNamedWriteable(length); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLower.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLower.java index aadb0b3ac7886..9cea1cebe926c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLower.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLower.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -60,12 +59,12 @@ public ToLower( } private ToLower(StreamInput in) throws IOException { - this(Source.EMPTY, ((PlanStreamInput) in).readExpression(), ((PlanStreamInput) in).configuration()); + this(Source.EMPTY, in.readNamedWriteable(Expression.class), ((PlanStreamInput) in).configuration()); } @Override public void writeTo(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeExpression(field()); + out.writeNamedWriteable(field()); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpper.java index 398fe1c76a49f..4703bd5b2ba91 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpper.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.util.PlanStreamOutput; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -60,12 +59,12 @@ public ToUpper( } private ToUpper(StreamInput in) throws IOException { - this(Source.EMPTY, ((PlanStreamInput) in).readExpression(), ((PlanStreamInput) in).configuration()); + this(Source.EMPTY, in.readNamedWriteable(Expression.class), ((PlanStreamInput) in).configuration()); } @Override public void writeTo(StreamOutput out) throws IOException { - ((PlanStreamOutput) out).writeExpression(field()); + out.writeNamedWriteable(field); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java index 931c10455db55..325cc0aea4461 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java @@ -21,7 +21,6 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; import java.util.function.Function; @@ -55,13 +54,13 @@ public WildcardLike( } private WildcardLike(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), ((PlanStreamInput) in).readExpression(), new WildcardPattern(in.readString())); + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), new WildcardPattern(in.readString())); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(field()); + out.writeNamedWriteable(field()); out.writeString(pattern().pattern()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java index 7ab6d96181f53..647071c44cfd3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java @@ -116,8 +116,8 @@ public interface BinaryEvaluator { ) throws IOException { this( Source.readFrom((PlanStreamInput) in), - ((PlanStreamInput) in).readExpression(), - ((PlanStreamInput) in).readExpression(), + in.readNamedWriteable(Expression.class), + in.readNamedWriteable(Expression.class), op, ints, longs, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java index a4eea2d676aad..52d4c111b2eae 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java @@ -22,7 +22,6 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cast; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import org.elasticsearch.xpack.esql.type.EsqlDataTypeRegistry; import java.io.IOException; @@ -148,8 +147,8 @@ public static EsqlBinaryComparison readFrom(StreamInput in) throws IOException { // TODO this uses a constructor on the operation *and* a name which is confusing. It only needs one. Everything else uses a name. var source = Source.readFrom((PlanStreamInput) in); EsqlBinaryComparison.BinaryComparisonOperation operation = EsqlBinaryComparison.BinaryComparisonOperation.readFromStream(in); - var left = ((PlanStreamInput) in).readExpression(); - var right = ((PlanStreamInput) in).readExpression(); + var left = in.readNamedWriteable(Expression.class); + var right = in.readNamedWriteable(Expression.class); // TODO: Remove zoneId entirely var zoneId = in.readOptionalZoneId(); return operation.buildNewInstance(source, left, right); @@ -159,8 +158,8 @@ public static EsqlBinaryComparison readFrom(StreamInput in) throws IOException { public final void writeTo(StreamOutput out) throws IOException { source().writeTo(out); functionType.writeTo(out); - ((PlanStreamOutput) out).writeExpression(left()); - ((PlanStreamOutput) out).writeExpression(right()); + out.writeNamedWriteable(left()); + out.writeNamedWriteable(right()); out.writeOptionalZoneId(zoneId()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java index 924c483717d16..b7ebf114501cd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import java.io.IOException; @@ -29,8 +28,6 @@ import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.util.StringUtils.ordinal; -import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanReader.readerFromPlanReader; -import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanWriter.writerFromPlanWriter; public class In extends org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.In { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "In", In::new); @@ -48,16 +45,16 @@ public In(Source source, Expression value, List list) { private In(StreamInput in) throws IOException { this( Source.readFrom((PlanStreamInput) in), - ((PlanStreamInput) in).readExpression(), - in.readCollectionAsList(readerFromPlanReader(PlanStreamInput::readExpression)) + in.readNamedWriteable(Expression.class), + in.readNamedWriteableCollectionAsList(Expression.class) ); } @Override public void writeTo(StreamOutput out) throws IOException { source().writeTo(out); - ((PlanStreamOutput) out).writeExpression(value()); - out.writeCollection(list(), writerFromPlanWriter(PlanStreamOutput::writeExpression)); + out.writeNamedWriteable(value()); + out.writeNamedWriteableCollection(list()); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index fc23e0494732b..8034eba20690d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -10,7 +10,6 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteable; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.BytesRefs; @@ -24,13 +23,8 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; -import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.Order; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.FullTextPredicate; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.esql.core.index.EsIndex; import org.elasticsearch.xpack.esql.core.plan.logical.Filter; import org.elasticsearch.xpack.esql.core.plan.logical.Limit; @@ -38,14 +32,6 @@ import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.EsField; -import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; -import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; -import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; -import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.AbstractMultivalueFunction; -import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.BinarySpatialFunction; -import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.EsqlBinaryComparison; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Dissect.Parser; @@ -85,13 +71,10 @@ import org.elasticsearch.xpack.esql.plan.physical.TopNExec; import java.io.IOException; -import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.function.BiFunction; -import static java.util.Map.entry; import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.Entry.of; import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanReader.readerFromPlanReader; import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanWriter.writerFromPlanWriter; @@ -173,27 +156,7 @@ public static List namedTypeEntries() { of(LogicalPlan.class, Project.class, PlanNamedTypes::writeProject, PlanNamedTypes::readProject), of(LogicalPlan.class, TopN.class, PlanNamedTypes::writeTopN, PlanNamedTypes::readTopN) ); - List entries = new ArrayList<>(declared); - - // From NamedWriteables - for (List ee : List.of( - AbstractMultivalueFunction.getNamedWriteables(), - AggregateFunction.getNamedWriteables(), - BinarySpatialFunction.getNamedWriteables(), - EsqlArithmeticOperation.getNamedWriteables(), - EsqlBinaryComparison.getNamedWriteables(), - EsqlScalarFunction.getNamedWriteables(), - FullTextPredicate.getNamedWriteables(), - NamedExpression.getNamedWriteables(), - UnaryScalarFunction.getNamedWriteables(), - List.of(UnsupportedAttribute.ENTRY, Literal.ENTRY, org.elasticsearch.xpack.esql.expression.Order.ENTRY) - )) { - for (NamedWriteableRegistry.Entry e : ee) { - entries.add(of(Expression.class, e)); - } - } - - return entries; + return declared; } // -- physical plan nodes @@ -201,7 +164,7 @@ static AggregateExec readAggregateExec(PlanStreamInput in) throws IOException { return new AggregateExec( Source.readFrom(in), in.readPhysicalPlanNode(), - in.readCollectionAsList(readerFromPlanReader(PlanStreamInput::readExpression)), + in.readNamedWriteableCollectionAsList(Expression.class), in.readNamedWriteableCollectionAsList(NamedExpression.class), in.readEnum(AggregateExec.Mode.class), in.readOptionalVInt() @@ -211,7 +174,7 @@ static AggregateExec readAggregateExec(PlanStreamInput in) throws IOException { static void writeAggregateExec(PlanStreamOutput out, AggregateExec aggregateExec) throws IOException { Source.EMPTY.writeTo(out); out.writePhysicalPlanNode(aggregateExec.child()); - out.writeCollection(aggregateExec.groupings(), writerFromPlanWriter(PlanStreamOutput::writeExpression)); + out.writeNamedWriteableCollection(aggregateExec.groupings()); out.writeNamedWriteableCollection(aggregateExec.aggregates()); out.writeEnum(aggregateExec.getMode()); out.writeOptionalVInt(aggregateExec.estimatedRowSize()); @@ -221,7 +184,7 @@ static DissectExec readDissectExec(PlanStreamInput in) throws IOException { return new DissectExec( Source.readFrom(in), in.readPhysicalPlanNode(), - in.readExpression(), + in.readNamedWriteable(Expression.class), readDissectParser(in), in.readNamedWriteableCollectionAsList(Attribute.class) ); @@ -230,7 +193,7 @@ static DissectExec readDissectExec(PlanStreamInput in) throws IOException { static void writeDissectExec(PlanStreamOutput out, DissectExec dissectExec) throws IOException { Source.EMPTY.writeTo(out); out.writePhysicalPlanNode(dissectExec.child()); - out.writeExpression(dissectExec.inputExpression()); + out.writeNamedWriteable(dissectExec.inputExpression()); writeDissectParser(out, dissectExec.parser()); out.writeNamedWriteableCollection(dissectExec.extractedFields()); } @@ -255,7 +218,7 @@ static void writeEsQueryExec(PlanStreamOutput out, EsQueryExec esQueryExec) thro writeIndexMode(out, esQueryExec.indexMode()); out.writeNamedWriteableCollection(esQueryExec.output()); out.writeOptionalNamedWriteable(esQueryExec.query()); - out.writeOptionalExpression(esQueryExec.limit()); + out.writeOptionalNamedWriteable(esQueryExec.limit()); out.writeOptionalCollection(esQueryExec.sorts(), writerFromPlanWriter(PlanNamedTypes::writeFieldSort)); out.writeOptionalInt(esQueryExec.estimatedRowSize()); } @@ -414,13 +377,13 @@ static void writeFieldExtractExec(PlanStreamOutput out, FieldExtractExec fieldEx } static FilterExec readFilterExec(PlanStreamInput in) throws IOException { - return new FilterExec(Source.readFrom(in), in.readPhysicalPlanNode(), in.readExpression()); + return new FilterExec(Source.readFrom(in), in.readPhysicalPlanNode(), in.readNamedWriteable(Expression.class)); } static void writeFilterExec(PlanStreamOutput out, FilterExec filterExec) throws IOException { Source.EMPTY.writeTo(out); out.writePhysicalPlanNode(filterExec.child()); - out.writeExpression(filterExec.condition()); + out.writeNamedWriteable(filterExec.condition()); } static FragmentExec readFragmentExec(PlanStreamInput in) throws IOException { @@ -448,7 +411,7 @@ static GrokExec readGrokExec(PlanStreamInput in) throws IOException { return new GrokExec( source = Source.readFrom(in), in.readPhysicalPlanNode(), - in.readExpression(), + in.readNamedWriteable(Expression.class), Grok.pattern(source, in.readString()), in.readNamedWriteableCollectionAsList(Attribute.class) ); @@ -457,19 +420,19 @@ static GrokExec readGrokExec(PlanStreamInput in) throws IOException { static void writeGrokExec(PlanStreamOutput out, GrokExec grokExec) throws IOException { Source.EMPTY.writeTo(out); out.writePhysicalPlanNode(grokExec.child()); - out.writeExpression(grokExec.inputExpression()); + out.writeNamedWriteable(grokExec.inputExpression()); out.writeString(grokExec.pattern().pattern()); out.writeNamedWriteableCollection(grokExec.extractedFields()); } static LimitExec readLimitExec(PlanStreamInput in) throws IOException { - return new LimitExec(Source.readFrom(in), in.readPhysicalPlanNode(), in.readNamed(Expression.class)); + return new LimitExec(Source.readFrom(in), in.readPhysicalPlanNode(), in.readNamedWriteable(Expression.class)); } static void writeLimitExec(PlanStreamOutput out, LimitExec limitExec) throws IOException { Source.EMPTY.writeTo(out); out.writePhysicalPlanNode(limitExec.child()); - out.writeExpression(limitExec.limit()); + out.writeNamedWriteable(limitExec.limit()); } static MvExpandExec readMvExpandExec(PlanStreamInput in) throws IOException { @@ -546,7 +509,7 @@ static TopNExec readTopNExec(PlanStreamInput in) throws IOException { Source.readFrom(in), in.readPhysicalPlanNode(), in.readCollectionAsList(org.elasticsearch.xpack.esql.expression.Order::new), - in.readNamed(Expression.class), + in.readNamedWriteable(Expression.class), in.readOptionalVInt() ); } @@ -555,7 +518,7 @@ static void writeTopNExec(PlanStreamOutput out, TopNExec topNExec) throws IOExce Source.EMPTY.writeTo(out); out.writePhysicalPlanNode(topNExec.child()); out.writeCollection(topNExec.order()); - out.writeExpression(topNExec.limit()); + out.writeNamedWriteable(topNExec.limit()); out.writeOptionalVInt(topNExec.estimatedRowSize()); } @@ -563,7 +526,7 @@ static Dissect readDissect(PlanStreamInput in) throws IOException { return new Dissect( Source.readFrom(in), in.readLogicalPlanNode(), - in.readExpression(), + in.readNamedWriteable(Expression.class), readDissectParser(in), in.readNamedWriteableCollectionAsList(Attribute.class) ); @@ -572,7 +535,7 @@ static Dissect readDissect(PlanStreamInput in) throws IOException { static void writeDissect(PlanStreamOutput out, Dissect dissect) throws IOException { Source.EMPTY.writeTo(out); out.writeLogicalPlanNode(dissect.child()); - out.writeExpression(dissect.input()); + out.writeNamedWriteable(dissect.input()); writeDissectParser(out, dissect.parser()); out.writeNamedWriteableCollection(dissect.extractedFields()); } @@ -641,7 +604,7 @@ static Enrich readEnrich(PlanStreamInput in) throws IOException { } final Source source = Source.readFrom(in); final LogicalPlan child = in.readLogicalPlanNode(); - final Expression policyName = in.readExpression(); + final Expression policyName = in.readNamedWriteable(Expression.class); final NamedExpression matchField = in.readNamedWriteable(NamedExpression.class); if (in.getTransportVersion().before(TransportVersions.V_8_13_0)) { in.readString(); // discard the old policy name @@ -676,7 +639,7 @@ static void writeEnrich(PlanStreamOutput out, Enrich enrich) throws IOException Source.EMPTY.writeTo(out); out.writeLogicalPlanNode(enrich.child()); - out.writeExpression(enrich.policyName()); + out.writeNamedWriteable(enrich.policyName()); out.writeNamedWriteable(enrich.matchField()); if (out.getTransportVersion().before(TransportVersions.V_8_13_0)) { out.writeString(BytesRefs.toString(enrich.policyName().fold())); // old policy name @@ -708,13 +671,13 @@ static void writeEsqlProject(PlanStreamOutput out, EsqlProject project) throws I } static Filter readFilter(PlanStreamInput in) throws IOException { - return new Filter(Source.readFrom(in), in.readLogicalPlanNode(), in.readExpression()); + return new Filter(Source.readFrom(in), in.readLogicalPlanNode(), in.readNamedWriteable(Expression.class)); } static void writeFilter(PlanStreamOutput out, Filter filter) throws IOException { Source.EMPTY.writeTo(out); out.writeLogicalPlanNode(filter.child()); - out.writeExpression(filter.condition()); + out.writeNamedWriteable(filter.condition()); } static Grok readGrok(PlanStreamInput in) throws IOException { @@ -722,7 +685,7 @@ static Grok readGrok(PlanStreamInput in) throws IOException { return new Grok( source = Source.readFrom(in), in.readLogicalPlanNode(), - in.readExpression(), + in.readNamedWriteable(Expression.class), Grok.pattern(source, in.readString()), in.readNamedWriteableCollectionAsList(Attribute.class) ); @@ -731,18 +694,18 @@ static Grok readGrok(PlanStreamInput in) throws IOException { static void writeGrok(PlanStreamOutput out, Grok grok) throws IOException { Source.EMPTY.writeTo(out); out.writeLogicalPlanNode(grok.child()); - out.writeExpression(grok.input()); + out.writeNamedWriteable(grok.input()); out.writeString(grok.parser().pattern()); out.writeNamedWriteableCollection(grok.extractedFields()); } static Limit readLimit(PlanStreamInput in) throws IOException { - return new Limit(Source.readFrom(in), in.readNamed(Expression.class), in.readLogicalPlanNode()); + return new Limit(Source.readFrom(in), in.readNamedWriteable(Expression.class), in.readLogicalPlanNode()); } static void writeLimit(PlanStreamOutput out, Limit limit) throws IOException { Source.EMPTY.writeTo(out); - out.writeExpression(limit.limit()); + out.writeNamedWriteable(limit.limit()); out.writeLogicalPlanNode(limit.child()); } @@ -791,7 +754,7 @@ static TopN readTopN(PlanStreamInput in) throws IOException { Source.readFrom(in), in.readLogicalPlanNode(), in.readCollectionAsList(org.elasticsearch.xpack.esql.expression.Order::new), - in.readExpression() + in.readNamedWriteable(Expression.class) ); } @@ -799,37 +762,7 @@ static void writeTopN(PlanStreamOutput out, TopN topN) throws IOException { Source.EMPTY.writeTo(out); out.writeLogicalPlanNode(topN.child()); out.writeCollection(topN.order()); - out.writeExpression(topN.limit()); - } - - static final Map< - String, - BiFunction< - Source, - Expression, - org.elasticsearch.xpack.esql.core.expression.function.scalar.UnaryScalarFunction>> QL_UNARY_SCALAR_CTRS = Map.ofEntries( - entry(name(IsNotNull.class), IsNotNull::new), - entry(name(IsNull.class), IsNull::new), - entry(name(Not.class), Not::new) - ); - - static org.elasticsearch.xpack.esql.core.expression.function.scalar.UnaryScalarFunction readQLUnaryScalar( - PlanStreamInput in, - String name - ) throws IOException { - var ctr = QL_UNARY_SCALAR_CTRS.get(name); - if (ctr == null) { - throw new IOException("Constructor for QLUnaryScalar not found for name:" + name); - } - return ctr.apply(Source.readFrom(in), in.readExpression()); - } - - static void writeQLUnaryScalar( - PlanStreamOutput out, - org.elasticsearch.xpack.esql.core.expression.function.scalar.UnaryScalarFunction function - ) throws IOException { - function.source().writeTo(out); - out.writeExpression(function.field()); + out.writeNamedWriteable(topN.limit()); } // -- ancillary supporting classes of plan nodes, etc diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java index 0b671d6b90c7e..be2a9454b3bef 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java @@ -23,7 +23,6 @@ import org.elasticsearch.compute.data.LongBigArrayBlock; import org.elasticsearch.core.Releasables; import org.elasticsearch.xpack.esql.Column; -import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanNamedReader; @@ -92,11 +91,6 @@ public PhysicalPlan readOptionalPhysicalPlanNode() throws IOException { return readOptionalNamed(PhysicalPlan.class); } - @Override - public Expression readExpression() throws IOException { - return readNamed(Expression.class); - } - public T readNamed(Class type) throws IOException { String name = readString(); @SuppressWarnings("unchecked") @@ -120,18 +114,6 @@ public T readOptionalNamed(Class type) throws IOException { } } - public T readOptionalWithReader(PlanReader reader) throws IOException { - if (readBoolean()) { - T t = reader.read(this); - if (t == null) { - throwOnNullOptionalRead(reader); - } - return t; - } else { - return null; - } - } - public EsqlConfiguration configuration() throws IOException { return configuration; } @@ -220,12 +202,6 @@ static void throwOnNullOptionalRead(Class type) throws IOException { throw e; } - static void throwOnNullOptionalRead(PlanReader reader) throws IOException { - final IOException e = new IOException("read optional named returned null which is not allowed, reader:" + reader); - assert false : e; - throw e; - } - @Override public NameId mapNameId(long l) { return nameIdFunction.apply(l); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java index 45662d13e2618..58cd2465e1584 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java @@ -19,7 +19,6 @@ import org.elasticsearch.compute.data.LongBigArrayBlock; import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.Column; -import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanWriter; import org.elasticsearch.xpack.esql.plan.logical.join.Join; @@ -35,7 +34,7 @@ * A customized stream output used to serialize ESQL physical plan fragments. Complements stream * output with methods that write plan nodes, Attributes, Expressions, etc. */ -public final class PlanStreamOutput extends StreamOutput implements org.elasticsearch.xpack.esql.core.util.PlanStreamOutput { +public final class PlanStreamOutput extends StreamOutput { /** * Cache of written blocks. We use an {@link IdentityHashMap} for this @@ -94,20 +93,6 @@ public void writeOptionalPhysicalPlanNode(PhysicalPlan physicalPlan) throws IOEx } } - @Override - public void writeExpression(Expression expression) throws IOException { - writeNamed(Expression.class, expression); - } - - public void writeOptionalExpression(Expression expression) throws IOException { - if (expression == null) { - writeBoolean(false); - } else { - writeBoolean(true); - writeExpression(expression); - } - } - public void writeNamed(Class type, T value) throws IOException { String name = nameSupplier.apply(value.getClass()); @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java index 5a44c36a81b2d..bc7282857dbbe 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java @@ -25,9 +25,6 @@ import java.util.List; import java.util.Objects; -import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanReader.readerFromPlanReader; -import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanWriter.writerFromPlanWriter; - public class Aggregate extends UnaryPlan { public enum AggregateType { @@ -74,7 +71,7 @@ public Aggregate(PlanStreamInput in) throws IOException { Source.readFrom(in), in.readLogicalPlanNode(), AggregateType.readType(in), - in.readCollectionAsList(readerFromPlanReader(org.elasticsearch.xpack.esql.io.stream.PlanStreamInput::readExpression)), + in.readNamedWriteableCollectionAsList(Expression.class), in.readNamedWriteableCollectionAsList(NamedExpression.class) ); } @@ -83,7 +80,7 @@ public static void writeAggregate(PlanStreamOutput out, Aggregate aggregate) thr Source.EMPTY.writeTo(out); out.writeLogicalPlanNode(aggregate.child()); AggregateType.writeType(out, aggregate.aggregateType()); - out.writeCollection(aggregate.groupings(), writerFromPlanWriter(PlanStreamOutput::writeExpression)); + out.writeNamedWriteableCollection(aggregate.groupings); out.writeNamedWriteableCollection(aggregate.aggregates()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java index 9f44fe49fb80a..f28a1d11a5990 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java @@ -57,7 +57,7 @@ public Lookup( public Lookup(PlanStreamInput in) throws IOException { super(Source.readFrom(in), in.readLogicalPlanNode()); - this.tableName = in.readExpression(); + this.tableName = in.readNamedWriteable(Expression.class); this.matchFields = in.readNamedWriteableCollectionAsList(Attribute.class); this.localRelation = in.readBoolean() ? new LocalRelation(in) : null; } @@ -65,7 +65,7 @@ public Lookup(PlanStreamInput in) throws IOException { public void writeTo(PlanStreamOutput out) throws IOException { source().writeTo(out); out.writeLogicalPlanNode(child()); - out.writeExpression(tableName); + out.writeNamedWriteable(tableName); out.writeNamedWriteableCollection(matchFields); if (localRelation == null) { out.writeBoolean(false); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java index fc00f5be22624..46fe229098a16 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java @@ -56,11 +56,14 @@ import org.elasticsearch.xpack.esql.action.RestEsqlGetAsyncResultAction; import org.elasticsearch.xpack.esql.action.RestEsqlQueryAction; import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.enrich.EnrichLookupOperator; import org.elasticsearch.xpack.esql.execution.PlanExecutor; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; +import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; +import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; import org.elasticsearch.xpack.esql.session.IndexResolver; import org.elasticsearch.xpack.esql.type.EsqlDataTypeRegistry; @@ -189,7 +192,11 @@ public List getNamedWriteables() { entries.add(UnsupportedAttribute.ENTRY); // TODO combine with above once these are in the same project entries.addAll(NamedExpression.getNamedWriteables()); entries.add(UnsupportedAttribute.NAMED_EXPRESSION_ENTRY); // TODO combine with above once these are in the same project + entries.addAll(Expression.getNamedWriteables()); + entries.add(UnsupportedAttribute.EXPRESSION_ENTRY); // TODO combine with above once these are in the same project entries.add(MultiTypeEsField.ENTRY); // TODO combine with EsField.getNamedWriteables() once these are in the same module + entries.addAll(EsqlScalarFunction.getNamedWriteables()); + entries.addAll(AggregateFunction.getNamedWriteables()); return entries; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/MultiTypeEsField.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/MultiTypeEsField.java index 2b963e7428e2b..8b2fc926379f2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/MultiTypeEsField.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/MultiTypeEsField.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.type.InvalidMappedField; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import java.io.IOException; import java.util.HashMap; @@ -46,8 +45,7 @@ public MultiTypeEsField(String name, DataType dataType, boolean aggregatable, Ma } public MultiTypeEsField(StreamInput in) throws IOException { - // TODO: Change the conversion expression serialization to i.readNamedWriteable(Expression.class) once Expression is fully supported - this(in.readString(), DataType.readFrom(in), in.readBoolean(), in.readImmutableMap(i -> ((PlanStreamInput) i).readExpression())); + this(in.readString(), DataType.readFrom(in), in.readBoolean(), in.readImmutableMap(i -> i.readNamedWriteable(Expression.class))); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java index a614ff3c621f8..fd811a2f2e217 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java @@ -29,6 +29,8 @@ import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; +import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; +import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; @@ -71,8 +73,8 @@ public static void assertSerialization(Expression expression) { public static void assertSerialization(Expression expression, EsqlConfiguration configuration) { Expression deserExpression = serializeDeserialize( expression, - PlanStreamOutput::writeExpression, - PlanStreamInput::readExpression, + PlanStreamOutput::writeNamedWriteable, + in -> in.readNamedWriteable(Expression.class), configuration ); EqualsHashCodeTestUtils.checkEqualsAndHashCode(expression, unused -> deserExpression); @@ -90,7 +92,7 @@ public static T serializeDeserialize(T orig, Serializer serializer, Deser ByteBufferStreamInput.wrap(BytesReference.toBytes(out.bytes())), writableRegistry() ); - PlanStreamInput planStreamInput = new PlanStreamInput(in, planNameRegistry, writableRegistry(), config); + PlanStreamInput planStreamInput = new PlanStreamInput(in, planNameRegistry, in.namedWriteableRegistry(), config); return deserializer.read(planStreamInput); } catch (IOException e) { throw new UncheckedIOException(e); @@ -121,6 +123,9 @@ public static NamedWriteableRegistry writableRegistry() { entries.add(UnsupportedAttribute.ENTRY); entries.addAll(NamedExpression.getNamedWriteables()); entries.add(UnsupportedAttribute.NAMED_EXPRESSION_ENTRY); + entries.addAll(Expression.getNamedWriteables()); + entries.addAll(EsqlScalarFunction.getNamedWriteables()); + entries.addAll(AggregateFunction.getNamedWriteables()); return new NamedWriteableRegistry(entries); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractExpressionSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractExpressionSerializationTests.java index 33f9cb3123b8d..5e50af6a0d212 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractExpressionSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractExpressionSerializationTests.java @@ -17,6 +17,8 @@ import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.expression.function.ReferenceAttributeTests; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; +import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; +import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; @@ -76,8 +78,6 @@ protected boolean alwaysEmptySource() { return false; } - protected abstract List getNamedWriteables(); - public EsqlConfiguration configuration() { return config; } @@ -85,10 +85,15 @@ public EsqlConfiguration configuration() { @Override protected final NamedWriteableRegistry getNamedWriteableRegistry() { List entries = new ArrayList<>(NamedExpression.getNamedWriteables()); + entries.addAll(Expression.getNamedWriteables()); entries.addAll(Attribute.getNamedWriteables()); + entries.addAll(EsqlScalarFunction.getNamedWriteables()); + entries.addAll(AggregateFunction.getNamedWriteables()); entries.add(UnsupportedAttribute.ENTRY); + entries.add(UnsupportedAttribute.NAMED_EXPRESSION_ENTRY); + entries.add(UnsupportedAttribute.EXPRESSION_ENTRY); entries.addAll(EsField.getNamedWriteables()); - entries.addAll(getNamedWriteables()); + entries.add(org.elasticsearch.xpack.esql.expression.Order.ENTRY); return new NamedWriteableRegistry(entries); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractUnaryScalarSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractUnaryScalarSerializationTests.java index d8290966acbdd..8581699b83fbd 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractUnaryScalarSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractUnaryScalarSerializationTests.java @@ -7,13 +7,11 @@ package org.elasticsearch.xpack.esql.expression; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import java.io.IOException; -import java.util.List; public abstract class AbstractUnaryScalarSerializationTests extends AbstractExpressionSerializationTests { protected abstract T create(Source source, Expression child); @@ -28,9 +26,4 @@ protected final T mutateInstance(T instance) throws IOException { Expression child = randomValueOtherThan(instance.field(), AbstractExpressionSerializationTests::randomChild); return create(instance.source(), child); } - - @Override - protected List getNamedWriteables() { - return UnaryScalarFunction.getNamedWriteables(); - } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AliasTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AliasTests.java index ce7aa789f89b1..a6808e835bc3c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AliasTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AliasTests.java @@ -81,6 +81,7 @@ protected final NamedWriteableRegistry getNamedWriteableRegistry() { entries.addAll(Attribute.getNamedWriteables()); entries.add(UnsupportedAttribute.ENTRY); entries.addAll(EsField.getNamedWriteables()); + entries.addAll(Expression.getNamedWriteables()); return new NamedWriteableRegistry(entries); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/LiteralSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/LiteralSerializationTests.java index 39e18bf9761ec..fa6041c6d2e58 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/LiteralSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/LiteralSerializationTests.java @@ -7,12 +7,10 @@ package org.elasticsearch.xpack.esql.expression; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.LiteralTests; import java.io.IOException; -import java.util.List; public class LiteralSerializationTests extends AbstractExpressionSerializationTests { @Override @@ -25,11 +23,6 @@ protected Literal mutateInstance(Literal instance) throws IOException { return LiteralTests.mutateLiteral(instance); } - @Override - protected List getNamedWriteables() { - return List.of(Literal.ENTRY); - } - @Override protected boolean alwaysEmptySource() { return true; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/OrderSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/OrderSerializationTests.java index dd2671f4cf86d..3c5a77daf8832 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/OrderSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/OrderSerializationTests.java @@ -7,12 +7,10 @@ package org.elasticsearch.xpack.esql.expression; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import java.io.IOException; -import java.util.List; public class OrderSerializationTests extends AbstractExpressionSerializationTests { @Override @@ -42,11 +40,6 @@ protected Order mutateInstance(Order instance) throws IOException { return new Order(source, child, direction, nulls); } - @Override - protected List getNamedWriteables() { - return List.of(Order.ENTRY); - } - @Override protected boolean alwaysEmptySource() { return true; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/DeepCopy.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/DeepCopy.java index 954d26b6de137..d25305a9ea190 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/DeepCopy.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/DeepCopy.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.expression.function; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.compute.data.Page; @@ -18,6 +19,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import java.io.IOException; import java.util.function.Function; /** @@ -28,6 +30,16 @@ public DeepCopy(Source source, Expression child) { super(source, child); } + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException(); + } + @Override public EvalOperator.ExpressionEvaluator.Factory toEvaluator( Function toEvaluator diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistryTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistryTests.java index 74ace6f4ceb9c..6e2ec0d904b27 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistryTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistryTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.expression.function; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.ParsingException; @@ -132,6 +133,16 @@ public DummyConfigurationOptionalArgumentFunction(Source source, List { @Override @@ -24,11 +22,6 @@ protected Avg mutateInstance(Avg instance) throws IOException { return new Avg(instance.source(), randomValueOtherThan(instance.field(), AbstractExpressionSerializationTests::randomChild)); } - @Override - protected List getNamedWriteables() { - return AggregateFunction.getNamedWriteables(); - } - @Override protected boolean alwaysEmptySource() { return true; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctSerializationTests.java index c7166adb0b62f..ab06b0b58f7f0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctSerializationTests.java @@ -7,13 +7,11 @@ package org.elasticsearch.xpack.esql.expression.function.aggregate; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; import java.io.IOException; -import java.util.List; public class CountDistinctSerializationTests extends AbstractExpressionSerializationTests { @Override @@ -27,8 +25,8 @@ protected CountDistinct createTestInstance() { @Override protected CountDistinct mutateInstance(CountDistinct instance) throws IOException { Source source = randomSource(); - Expression field = randomChild(); - Expression precision = randomBoolean() ? null : randomChild(); + Expression field = instance.field(); + Expression precision = instance.precision(); if (randomBoolean()) { field = randomValueOtherThan(field, AbstractExpressionSerializationTests::randomChild); } else { @@ -37,11 +35,6 @@ protected CountDistinct mutateInstance(CountDistinct instance) throws IOExceptio return new CountDistinct(source, field, precision); } - @Override - protected List getNamedWriteables() { - return AggregateFunction.getNamedWriteables(); - } - @Override protected boolean alwaysEmptySource() { return true; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountSerializationTests.java index 1c588b26abad8..133979f66860c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountSerializationTests.java @@ -7,11 +7,9 @@ package org.elasticsearch.xpack.esql.expression.function.aggregate; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; import java.io.IOException; -import java.util.List; public class CountSerializationTests extends AbstractExpressionSerializationTests { @Override @@ -24,11 +22,6 @@ protected Count mutateInstance(Count instance) throws IOException { return new Count(instance.source(), randomValueOtherThan(instance.field(), AbstractExpressionSerializationTests::randomChild)); } - @Override - protected List getNamedWriteables() { - return AggregateFunction.getNamedWriteables(); - } - @Override protected boolean alwaysEmptySource() { return true; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxSerializationTests.java index a50cba3e9e9cd..7a732883a99a5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxSerializationTests.java @@ -7,11 +7,9 @@ package org.elasticsearch.xpack.esql.expression.function.aggregate; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; import java.io.IOException; -import java.util.List; public class MaxSerializationTests extends AbstractExpressionSerializationTests { @Override @@ -24,11 +22,6 @@ protected Max mutateInstance(Max instance) throws IOException { return new Max(instance.source(), randomValueOtherThan(instance.field(), AbstractExpressionSerializationTests::randomChild)); } - @Override - protected List getNamedWriteables() { - return AggregateFunction.getNamedWriteables(); - } - @Override protected boolean alwaysEmptySource() { return true; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviationSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviationSerializationTests.java index a57c45da07ba3..bdbe839c46a75 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviationSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviationSerializationTests.java @@ -7,11 +7,9 @@ package org.elasticsearch.xpack.esql.expression.function.aggregate; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; import java.io.IOException; -import java.util.List; public class MedianAbsoluteDeviationSerializationTests extends AbstractExpressionSerializationTests { @Override @@ -27,11 +25,6 @@ protected MedianAbsoluteDeviation mutateInstance(MedianAbsoluteDeviation instanc ); } - @Override - protected List getNamedWriteables() { - return AggregateFunction.getNamedWriteables(); - } - @Override protected boolean alwaysEmptySource() { return true; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianSerializationTests.java index 56943e9ef41c3..75161977319ea 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianSerializationTests.java @@ -7,11 +7,9 @@ package org.elasticsearch.xpack.esql.expression.function.aggregate; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; import java.io.IOException; -import java.util.List; public class MedianSerializationTests extends AbstractExpressionSerializationTests { @Override @@ -24,11 +22,6 @@ protected Median mutateInstance(Median instance) throws IOException { return new Median(instance.source(), randomValueOtherThan(instance.field(), AbstractExpressionSerializationTests::randomChild)); } - @Override - protected List getNamedWriteables() { - return AggregateFunction.getNamedWriteables(); - } - @Override protected boolean alwaysEmptySource() { return true; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinSerializationTests.java index bd0d8088ef857..1ff434d8d2a76 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinSerializationTests.java @@ -7,11 +7,9 @@ package org.elasticsearch.xpack.esql.expression.function.aggregate; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; import java.io.IOException; -import java.util.List; public class MinSerializationTests extends AbstractExpressionSerializationTests { @Override @@ -24,11 +22,6 @@ protected Min mutateInstance(Min instance) throws IOException { return new Min(instance.source(), randomValueOtherThan(instance.field(), AbstractExpressionSerializationTests::randomChild)); } - @Override - protected List getNamedWriteables() { - return AggregateFunction.getNamedWriteables(); - } - @Override protected boolean alwaysEmptySource() { return true; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/PercentileSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/PercentileSerializationTests.java index 88e063058c9f2..a6349b9cb5c81 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/PercentileSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/PercentileSerializationTests.java @@ -7,13 +7,11 @@ package org.elasticsearch.xpack.esql.expression.function.aggregate; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; import java.io.IOException; -import java.util.List; public class PercentileSerializationTests extends AbstractExpressionSerializationTests { @Override @@ -37,11 +35,6 @@ protected Percentile mutateInstance(Percentile instance) throws IOException { return new Percentile(source, field, percentile); } - @Override - protected List getNamedWriteables() { - return AggregateFunction.getNamedWriteables(); - } - @Override protected boolean alwaysEmptySource() { return true; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/RateSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/RateSerializationTests.java new file mode 100644 index 0000000000000..94b2a81b308d7 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/RateSerializationTests.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; + +import java.io.IOException; + +public class RateSerializationTests extends AbstractExpressionSerializationTests { + @Override + protected Rate createTestInstance() { + Source source = randomSource(); + Expression field = randomChild(); + Expression timestamp = randomChild(); + Expression unit = randomBoolean() ? null : randomChild(); + return new Rate(source, field, timestamp, unit); + } + + @Override + protected Rate mutateInstance(Rate instance) throws IOException { + Source source = randomSource(); + Expression field = instance.field(); + Expression timestamp = instance.timestamp(); + Expression unit = instance.unit(); + switch (between(0, 2)) { + case 0 -> field = randomValueOtherThan(field, AbstractExpressionSerializationTests::randomChild); + case 1 -> timestamp = randomValueOtherThan(timestamp, AbstractExpressionSerializationTests::randomChild); + case 2 -> unit = randomValueOtherThan(unit, () -> randomBoolean() ? null : randomChild()); + } + return new Rate(source, field, timestamp, unit); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidSerializationTests.java index 9adf7d1e00361..037b7dc229b03 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidSerializationTests.java @@ -7,11 +7,9 @@ package org.elasticsearch.xpack.esql.expression.function.aggregate; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; import java.io.IOException; -import java.util.List; public class SpatialCentroidSerializationTests extends AbstractExpressionSerializationTests { @Override @@ -27,11 +25,6 @@ protected SpatialCentroid mutateInstance(SpatialCentroid instance) throws IOExce ); } - @Override - protected List getNamedWriteables() { - return AggregateFunction.getNamedWriteables(); - } - @Override protected boolean alwaysEmptySource() { return true; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SumSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SumSerializationTests.java index 9c7ee0e8348b7..863392f7eb451 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SumSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SumSerializationTests.java @@ -7,11 +7,9 @@ package org.elasticsearch.xpack.esql.expression.function.aggregate; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; import java.io.IOException; -import java.util.List; public class SumSerializationTests extends AbstractExpressionSerializationTests { @Override @@ -24,11 +22,6 @@ protected Sum mutateInstance(Sum instance) throws IOException { return new Sum(instance.source(), randomValueOtherThan(instance.field(), AbstractExpressionSerializationTests::randomChild)); } - @Override - protected List getNamedWriteables() { - return AggregateFunction.getNamedWriteables(); - } - @Override protected boolean alwaysEmptySource() { return true; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopSerializationTests.java index 2906a1e74e72a..82bf57d1a194e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopSerializationTests.java @@ -7,13 +7,11 @@ package org.elasticsearch.xpack.esql.expression.function.aggregate; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; import java.io.IOException; -import java.util.List; public class TopSerializationTests extends AbstractExpressionSerializationTests { @Override @@ -38,9 +36,4 @@ protected Top mutateInstance(Top instance) throws IOException { } return new Top(source, field, limit, order); } - - @Override - protected List getNamedWriteables() { - return AggregateFunction.getNamedWriteables(); - } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesSerializationTests.java index 2471e6a8218b3..6787e8d1ad09a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesSerializationTests.java @@ -7,11 +7,9 @@ package org.elasticsearch.xpack.esql.expression.function.aggregate; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; import java.io.IOException; -import java.util.List; public class ValuesSerializationTests extends AbstractExpressionSerializationTests { @Override @@ -24,11 +22,6 @@ protected Values mutateInstance(Values instance) throws IOException { return new Values(instance.source(), randomValueOtherThan(instance.field(), AbstractExpressionSerializationTests::randomChild)); } - @Override - protected List getNamedWriteables() { - return AggregateFunction.getNamedWriteables(); - } - @Override protected boolean alwaysEmptySource() { return true; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/grouping/BucketSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/grouping/BucketSerializationTests.java index 8250cad0c85e8..3b38c31e760be 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/grouping/BucketSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/grouping/BucketSerializationTests.java @@ -7,21 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.grouping; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; -import java.util.List; public class BucketSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected Bucket createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AndSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AndSerializationTests.java index ffeae4465eac6..40788eb7a2b2d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AndSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AndSerializationTests.java @@ -7,21 +7,14 @@ package org.elasticsearch.xpack.esql.expression.function.scalar; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; import java.io.IOException; -import java.util.List; public class AndSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected And createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/NotSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/NotSerializationTests.java index 61e3690f1633f..aa8bad907eeb3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/NotSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/NotSerializationTests.java @@ -7,21 +7,14 @@ package org.elasticsearch.xpack.esql.expression.function.scalar; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; import java.io.IOException; -import java.util.List; public class NotSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return UnaryScalarFunction.getNamedWriteables(); - } - @Override protected Not createTestInstance() { return new Not(randomSource(), randomChild()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/OrSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/OrSerializationTests.java index 1755ba1fac026..5e68a4af80623 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/OrSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/OrSerializationTests.java @@ -7,21 +7,14 @@ package org.elasticsearch.xpack.esql.expression.function.scalar; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; import java.io.IOException; -import java.util.List; public class OrSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected Or createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseSerializationTests.java index 69bbf2f76937f..06df15dd68827 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseSerializationTests.java @@ -7,20 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.conditional; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractVarargsSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.util.List; public class CaseSerializationTests extends AbstractVarargsSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected Case create(Source source, Expression first, List rest) { return new Case(source, first, rest); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestSerializationTests.java index 43e1fe405911a..42b0203bc79ca 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestSerializationTests.java @@ -7,20 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.conditional; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractVarargsSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.util.List; public class GreatestSerializationTests extends AbstractVarargsSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected Greatest create(Source source, Expression first, List rest) { return new Greatest(source, first, rest); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastSerializationTests.java index f552713af4dbe..fc9e8e5522a19 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastSerializationTests.java @@ -7,20 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.conditional; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractVarargsSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.util.List; public class LeastSerializationTests extends AbstractVarargsSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected Least create(Source source, Expression first, List rest) { return new Least(source, first, rest); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffSerializationTests.java index b1dc1b064ae5a..77158b6f1866f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffSerializationTests.java @@ -7,21 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.date; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; -import java.util.List; public class DateDiffSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected DateDiff createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractSerializationTests.java index 6e1c061c84f2e..f4e6d2672a40f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractSerializationTests.java @@ -7,21 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.date; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; -import java.util.List; public class DateExtractSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected DateExtract createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatSerializationTests.java index 4dff735318558..ece145e95aabb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatSerializationTests.java @@ -7,21 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.date; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; -import java.util.List; public class DateFormatSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected DateFormat createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseSerializationTests.java index e816f2c4a20fb..79a650c8dd963 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseSerializationTests.java @@ -7,21 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.date; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; -import java.util.List; public class DateParseSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected DateParse createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncSerializationTests.java index 09d2e06003128..3d1616ce29adf 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncSerializationTests.java @@ -7,21 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.date; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; -import java.util.List; public class DateTruncSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected DateTrunc createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowSerializationTests.java index 3bb8c2f260561..b816e3a8da858 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowSerializationTests.java @@ -7,19 +7,11 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.date; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; -import java.util.List; public class NowSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected Now createTestInstance() { return new Now(randomSource(), configuration()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchSerializationTests.java index e20f9e03f09b6..3c833c4b0d7ac 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchSerializationTests.java @@ -7,21 +7,14 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.ip; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; import java.util.List; public class CIDRMatchSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected CIDRMatch createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixSerializationTests.java index 8393dad31b2e2..d7fc05d9d0f64 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefixSerializationTests.java @@ -7,21 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.ip; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; -import java.util.List; public class IpPrefixSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected IpPrefix createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2SerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2SerializationTests.java index 11986adf2bc24..2ae88bbf24549 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2SerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2SerializationTests.java @@ -7,22 +7,14 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; -import java.util.List; public class Atan2SerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected Atan2 createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ESerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ESerializationTests.java index ff8f1563df94e..971295aa02a9b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ESerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/ESerializationTests.java @@ -7,19 +7,11 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; -import java.util.List; public class ESerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected E createTestInstance() { return new E(randomSource()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogSerializationTests.java index bb33516900dd7..8b65a40d9e831 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogSerializationTests.java @@ -7,21 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; -import java.util.List; public class LogSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected Log createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PiSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PiSerializationTests.java index 4768ab292be10..597d1cbc8533c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PiSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PiSerializationTests.java @@ -7,19 +7,11 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; -import java.util.List; public class PiSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected Pi createTestInstance() { return new Pi(randomSource()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowSerializationTests.java index b47ec608cccab..b811d719ca923 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowSerializationTests.java @@ -7,21 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; -import java.util.List; public class PowSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected Pow createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundSerializationTests.java index 8146aea8d5c9f..91e97a6d07b14 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundSerializationTests.java @@ -7,21 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; -import java.util.List; public class RoundSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected Round createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TauSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TauSerializationTests.java index 3320dcf0a180c..fb259f0e43150 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TauSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TauSerializationTests.java @@ -7,19 +7,11 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; -import java.util.List; public class TauSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected Tau createTestInstance() { return new Tau(randomSource()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMvSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMvSerializationTests.java deleted file mode 100644 index fba33c9ea1c03..0000000000000 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMvSerializationTests.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; - -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; - -import java.util.List; - -public abstract class AbstractMvSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return AbstractMultivalueFunction.getNamedWriteables(); - } -} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendSerializationTests.java index 8afd1b44dc3f3..9bbb4856b5e0f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendSerializationTests.java @@ -13,7 +13,7 @@ import java.io.IOException; -public class MvAppendSerializationTests extends AbstractMvSerializationTests { +public class MvAppendSerializationTests extends AbstractExpressionSerializationTests { @Override protected MvAppend createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgSerializationTests.java index f70702b001492..271312622245d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgSerializationTests.java @@ -11,7 +11,7 @@ import java.io.IOException; -public class MvAvgSerializationTests extends AbstractMvSerializationTests { +public class MvAvgSerializationTests extends AbstractExpressionSerializationTests { @Override protected MvAvg createTestInstance() { return new MvAvg(randomSource(), randomChild()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatSerializationTests.java index 9f2aba8d9d9ca..ba4eda8590f70 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatSerializationTests.java @@ -13,7 +13,7 @@ import java.io.IOException; -public class MvConcatSerializationTests extends AbstractMvSerializationTests { +public class MvConcatSerializationTests extends AbstractExpressionSerializationTests { @Override protected MvConcat createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountSerializationTests.java index a0d28a6cf925b..0ec51d73982ec 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountSerializationTests.java @@ -11,7 +11,7 @@ import java.io.IOException; -public class MvCountSerializationTests extends AbstractMvSerializationTests { +public class MvCountSerializationTests extends AbstractExpressionSerializationTests { @Override protected MvCount createTestInstance() { return new MvCount(randomSource(), randomChild()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeSerializationTests.java index afb2ec90e1e3e..410167addf163 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeSerializationTests.java @@ -11,7 +11,7 @@ import java.io.IOException; -public class MvDedupeSerializationTests extends AbstractMvSerializationTests { +public class MvDedupeSerializationTests extends AbstractExpressionSerializationTests { @Override protected MvDedupe createTestInstance() { return new MvDedupe(randomSource(), randomChild()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstSerializationTests.java index dbb49bb96a663..8934dde1717c6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstSerializationTests.java @@ -11,7 +11,7 @@ import java.io.IOException; -public class MvFirstSerializationTests extends AbstractMvSerializationTests { +public class MvFirstSerializationTests extends AbstractExpressionSerializationTests { @Override protected MvFirst createTestInstance() { return new MvFirst(randomSource(), randomChild()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastSerializationTests.java index 190eb0263c162..9c4ad7ab059ef 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastSerializationTests.java @@ -11,7 +11,7 @@ import java.io.IOException; -public class MvLastSerializationTests extends AbstractMvSerializationTests { +public class MvLastSerializationTests extends AbstractExpressionSerializationTests { @Override protected MvLast createTestInstance() { return new MvLast(randomSource(), randomChild()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxSerializationTests.java index ffc51af5f103d..4ce5112c4e8e7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxSerializationTests.java @@ -11,7 +11,7 @@ import java.io.IOException; -public class MvMaxSerializationTests extends AbstractMvSerializationTests { +public class MvMaxSerializationTests extends AbstractExpressionSerializationTests { @Override protected MvMax createTestInstance() { return new MvMax(randomSource(), randomChild()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianSerializationTests.java index 067cc6430ce01..0e35ec6f77150 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianSerializationTests.java @@ -11,7 +11,7 @@ import java.io.IOException; -public class MvMedianSerializationTests extends AbstractMvSerializationTests { +public class MvMedianSerializationTests extends AbstractExpressionSerializationTests { @Override protected MvMedian createTestInstance() { return new MvMedian(randomSource(), randomChild()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinSerializationTests.java index 1f38587274353..0769e41a09921 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinSerializationTests.java @@ -11,7 +11,7 @@ import java.io.IOException; -public class MvMinSerializationTests extends AbstractMvSerializationTests { +public class MvMinSerializationTests extends AbstractExpressionSerializationTests { @Override protected MvMin createTestInstance() { return new MvMin(randomSource(), randomChild()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceSerializationTests.java index 64209ce0f4644..ffa355178b460 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceSerializationTests.java @@ -13,7 +13,7 @@ import java.io.IOException; -public class MvSliceSerializationTests extends AbstractMvSerializationTests { +public class MvSliceSerializationTests extends AbstractExpressionSerializationTests { @Override protected MvSlice createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortSerializationTests.java index 1728ad6f09357..d7dba33e1aae3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortSerializationTests.java @@ -13,7 +13,7 @@ import java.io.IOException; -public class MvSortSerializationTests extends AbstractMvSerializationTests { +public class MvSortSerializationTests extends AbstractExpressionSerializationTests { @Override protected MvSort createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumSerializationTests.java index e8ddcc9340b45..15f6d94b44066 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumSerializationTests.java @@ -11,7 +11,7 @@ import java.io.IOException; -public class MvSumSerializationTests extends AbstractMvSerializationTests { +public class MvSumSerializationTests extends AbstractExpressionSerializationTests { @Override protected MvSum createTestInstance() { return new MvSum(randomSource(), randomChild()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipSerializationTests.java index d16ca02627b29..4b49a1f55340d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipSerializationTests.java @@ -13,7 +13,7 @@ import java.io.IOException; -public class MvZipSerializationTests extends AbstractMvSerializationTests { +public class MvZipSerializationTests extends AbstractExpressionSerializationTests { @Override protected MvZip createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceSerializationTests.java index 7cab0a957b235..ad792b6b66668 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/CoalesceSerializationTests.java @@ -7,20 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.nulls; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractVarargsSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.util.List; public class CoalesceSerializationTests extends AbstractVarargsSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected Coalesce create(Source source, Expression first, List rest) { return new Coalesce(source, first, rest); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNotNullSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNotNullSerializationTests.java index 23545b3627a1a..bd309e4a893dc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNotNullSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNotNullSerializationTests.java @@ -7,22 +7,14 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.nulls; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import java.io.IOException; -import java.util.List; public class IsNotNullSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return UnaryScalarFunction.getNamedWriteables(); - } - @Override protected IsNotNull createTestInstance() { return new IsNotNull(randomSource(), randomChild()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNullSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNullSerializationTests.java index 354a2129d7ec0..60bf3085c6d13 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNullSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNullSerializationTests.java @@ -7,22 +7,14 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.nulls; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import java.io.IOException; -import java.util.List; public class IsNullSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return UnaryScalarFunction.getNamedWriteables(); - } - @Override protected IsNull createTestInstance() { return new IsNull(randomSource(), randomChild()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/AbstractBinarySpatialFunctionSerializationTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/AbstractBinarySpatialFunctionSerializationTestCase.java index d304c474feac3..006fdf6865340 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/AbstractBinarySpatialFunctionSerializationTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/AbstractBinarySpatialFunctionSerializationTestCase.java @@ -7,24 +7,17 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; import java.io.IOException; -import java.util.List; public abstract class AbstractBinarySpatialFunctionSerializationTestCase extends AbstractExpressionSerializationTests { protected abstract T build(Source source, Expression left, Expression right); - @Override - protected final List getNamedWriteables() { - return BinarySpatialFunction.getNamedWriteables(); - } - @Override protected final T createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXSerializationTests.java index 56ddd039cb87f..a497d8a50f06b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXSerializationTests.java @@ -7,19 +7,11 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import java.io.IOException; -import java.util.List; public class StXSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return UnaryScalarFunction.getNamedWriteables(); - } - @Override protected StX createTestInstance() { return new StX(randomSource(), randomChild()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYSerializationTests.java index f44b49d38d34c..5b3edb9cd0a24 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYSerializationTests.java @@ -7,19 +7,11 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import java.io.IOException; -import java.util.List; public class StYSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return UnaryScalarFunction.getNamedWriteables(); - } - @Override protected StY createTestInstance() { return new StY(randomSource(), randomChild()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatSerializationTests.java index 30f6acffbaf8a..c1fa6f9274a8c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatSerializationTests.java @@ -7,20 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.string; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractVarargsSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.util.List; public class ConcatSerializationTests extends AbstractVarargsSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected Concat create(Source source, Expression first, List rest) { return new Concat(source, first, rest); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithSerializationTests.java index 2f734d585ab52..183e39f11b6c3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithSerializationTests.java @@ -7,21 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.string; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; -import java.util.List; public class EndsWithSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected EndsWith createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftSerializationTests.java index 2162044d2e29f..b20d740954ff9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftSerializationTests.java @@ -7,21 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.string; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; -import java.util.List; public class LeftSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected Left createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateSerializationTests.java index d705b4a6167ec..a75fb9d1f772a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LocateSerializationTests.java @@ -7,21 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.string; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; -import java.util.List; public class LocateSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected Locate createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeSerializationTests.java index 6be60b7163e3b..655d1a75470a3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeSerializationTests.java @@ -7,22 +7,14 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.string; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLikePattern; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import java.io.IOException; -import java.util.List; public class RLikeSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return UnaryScalarFunction.getNamedWriteables(); - } - @Override protected RLike createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatSerializationTests.java index d246b28ddb0d6..6abcfdc472685 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatSerializationTests.java @@ -7,21 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.string; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; -import java.util.List; public class RepeatSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected Repeat createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceSerializationTests.java index 555f210e6b0c0..4bc54241eca2c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceSerializationTests.java @@ -7,21 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.string; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; -import java.util.List; public class ReplaceSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected Replace createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightSerializationTests.java index 17ab41cc467db..7ed7345910765 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightSerializationTests.java @@ -7,21 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.string; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; -import java.util.List; public class RightSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected Right createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSerializationTests.java index 4e38ea9a57d7f..bede192c354d1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSerializationTests.java @@ -7,21 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.string; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; -import java.util.List; public class SplitSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected Split createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithSerializationTests.java index 4cff6f3441510..1b1167879b212 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithSerializationTests.java @@ -7,21 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.string; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; -import java.util.List; public class StartsWithSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected StartsWith createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringSerializationTests.java index d5f8fe498902d..accbed6e8f613 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringSerializationTests.java @@ -7,21 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.string; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; -import java.util.List; public class SubstringSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected Substring createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerSerializationTests.java index f2dbdbd74470a..caff331755f44 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToLowerSerializationTests.java @@ -7,21 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.string; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; -import java.util.List; public class ToLowerSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected ToLower createTestInstance() { return new ToLower(randomSource(), randomChild(), configuration()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperSerializationTests.java index e57aedd79fdfd..97316c9cc7681 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ToUpperSerializationTests.java @@ -7,21 +7,13 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.string; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; -import java.util.List; public class ToUpperSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected ToUpper createTestInstance() { return new ToUpper(randomSource(), randomChild(), configuration()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeSerializationTests.java index 99b566b1e8584..1bbf124864682 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeSerializationTests.java @@ -7,22 +7,14 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.string; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardPattern; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import java.io.IOException; -import java.util.List; public class WildcardLikeSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return UnaryScalarFunction.getNamedWriteables(); - } - @Override protected WildcardLike createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AbstractArithmeticSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AbstractArithmeticSerializationTests.java index c9a7933142605..81860addf1c5e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AbstractArithmeticSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AbstractArithmeticSerializationTests.java @@ -7,13 +7,11 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; import java.io.IOException; -import java.util.List; public abstract class AbstractArithmeticSerializationTests extends AbstractExpressionSerializationTests< T> { @@ -35,9 +33,4 @@ protected final T mutateInstance(T instance) throws IOException { } return create(instance.source(), left, right); } - - @Override - protected List getNamedWriteables() { - return EsqlArithmeticOperation.getNamedWriteables(); - } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/AbstractComparisonSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/AbstractComparisonSerializationTests.java index 8f28cfddb1d3a..3802d301357e9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/AbstractComparisonSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/AbstractComparisonSerializationTests.java @@ -7,13 +7,11 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.comparison; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; import java.io.IOException; -import java.util.List; public abstract class AbstractComparisonSerializationTests extends AbstractExpressionSerializationTests { protected abstract T create(Source source, Expression left, Expression right); @@ -34,9 +32,4 @@ protected final T mutateInstance(T instance) throws IOException { } return create(instance.source(), left, right); } - - @Override - protected List getNamedWriteables() { - return EsqlBinaryComparison.getNamedWriteables(); - } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InSerializationTests.java index a92921050ab18..8e8c6287d433d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InSerializationTests.java @@ -7,21 +7,14 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.comparison; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import java.io.IOException; import java.util.List; public class InSerializationTests extends AbstractExpressionSerializationTests { - @Override - protected List getNamedWriteables() { - return EsqlScalarFunction.getNamedWriteables(); - } - @Override protected In createTestInstance() { Source source = randomSource(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsSerializationTests.java index d9daa27936267..7ca1e27ba510a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEqualsSerializationTests.java @@ -7,12 +7,10 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.comparison; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; import java.io.IOException; -import java.util.List; public class InsensitiveEqualsSerializationTests extends AbstractExpressionSerializationTests { @Override @@ -31,9 +29,4 @@ protected final InsensitiveEquals mutateInstance(InsensitiveEquals instance) thr } return new InsensitiveEquals(instance.source(), left, right); } - - @Override - protected List getNamedWriteables() { - return List.of(InsensitiveEquals.ENTRY); - } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/fulltext/AbstractFulltextSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/fulltext/AbstractFulltextSerializationTests.java index 88f88436f8a04..370cfaf67fe0f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/fulltext/AbstractFulltextSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/fulltext/AbstractFulltextSerializationTests.java @@ -7,12 +7,10 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.fulltext; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.FullTextPredicate; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -20,11 +18,6 @@ public abstract class AbstractFulltextSerializationTests getNamedWriteables() { - return FullTextPredicate.getNamedWriteables(); - } - String randomOptionOrNull() { if (randomBoolean()) { return null; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java index 05f0f47910665..5a398ed3e4370 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java @@ -19,7 +19,6 @@ import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.SerializationTestUtils; import org.elasticsearch.xpack.esql.core.expression.Alias; -import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.core.expression.Nullability; @@ -34,7 +33,6 @@ import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.type.KeywordEsField; import org.elasticsearch.xpack.esql.expression.Order; -import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod; @@ -91,13 +89,11 @@ import java.util.Map; import java.util.Set; import java.util.TreeSet; -import java.util.stream.Stream; import static org.elasticsearch.test.ListMatcher.matchesList; import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.xpack.esql.SerializationTestUtils.serializeDeserialize; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasItem; public class PlanNamedTypesTests extends ESTestCase { @@ -172,19 +168,6 @@ public void testLogicalPlanEntries() { assertMap(actual, matchesList(expected)); } - public void testFunctionEntries() { - var serializableFunctions = PlanNamedTypes.namedTypeEntries() - .stream() - .filter(e -> Expression.class.isAssignableFrom(e.categoryClass())) - .map(PlanNameRegistry.Entry::name) - .sorted() - .toList(); - - for (var function : new EsqlFunctionRegistry().listFunctions()) { - assertThat(serializableFunctions, hasItem(equalTo(PlanNamedTypes.name(function.clazz())))); - } - } - // Tests that all names are unique - there should be a good reason if this is not the case. public void testUniqueNames() { var actual = PlanNamedTypes.namedTypeEntries().stream().map(PlanNameRegistry.Entry::name).distinct().toList(); @@ -210,32 +193,6 @@ public void testWrappedStreamSimple() throws IOException { assertThat(in.readVInt(), equalTo(11_345)); } - public void testBinComparisonSimple() throws IOException { - var orig = new Equals(Source.EMPTY, field("foo", DataType.DOUBLE), field("bar", DataType.DOUBLE)); - BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); - out.writeNamed(Expression.class, orig); - var deser = (Equals) planStreamInput(bso).readNamed(Expression.class); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); - } - - public void testBinComparison() { - Stream.generate(PlanNamedTypesTests::randomBinaryComparison).limit(100).forEach(obj -> assertNamedType(Expression.class, obj)); - } - - public void testArithmeticOperationSimple() throws IOException { - var orig = new Add(Source.EMPTY, field("foo", DataType.LONG), field("bar", DataType.LONG)); - BytesStreamOutput bso = new BytesStreamOutput(); - PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry, null); - out.writeNamed(Expression.class, orig); - var deser = (Add) planStreamInput(bso).readNamed(Expression.class); - EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); - } - - public void testArithmeticOperation() { - Stream.generate(PlanNamedTypesTests::randomArithmeticOperation).limit(100).forEach(obj -> assertNamedType(Expression.class, obj)); - } - public void testFieldSortSimple() throws IOException { var orig = new EsQueryExec.FieldSort(field("val", DataType.LONG), Order.OrderDirection.ASC, Order.NullsPosition.FIRST); BytesStreamOutput bso = new BytesStreamOutput(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/MultiTypeEsFieldTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/MultiTypeEsFieldTests.java index 86baee58ca53f..bebfcd7f7bdbc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/MultiTypeEsFieldTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/MultiTypeEsFieldTests.java @@ -95,7 +95,8 @@ protected final NamedWriteableRegistry getNamedWriteableRegistry() { List entries = new ArrayList<>(UnaryScalarFunction.getNamedWriteables()); entries.addAll(Attribute.getNamedWriteables()); entries.addAll(EsField.getNamedWriteables()); - entries.add(new NamedWriteableRegistry.Entry(MultiTypeEsField.class, "MultiTypeEsField", MultiTypeEsField::new)); + entries.add(MultiTypeEsField.ENTRY); + entries.addAll(Expression.getNamedWriteables()); return new NamedWriteableRegistry(entries); } @@ -112,7 +113,7 @@ protected final MultiTypeEsField copyInstance(MultiTypeEsField instance, Transpo (out, v) -> new PlanStreamOutput(out, new PlanNameRegistry(), config).writeNamedWriteable(v), in -> { PlanStreamInput pin = new PlanStreamInput(in, new PlanNameRegistry(), in.namedWriteableRegistry(), config); - return pin.readNamedWriteable(MultiTypeEsField.class); + return (MultiTypeEsField) pin.readNamedWriteable(EsField.class); }, version ); From 81b84953884674e5be317ca91042499d7686ab12 Mon Sep 17 00:00:00 2001 From: Matt Culbreth Date: Tue, 2 Jul 2024 16:58:57 -0400 Subject: [PATCH 132/216] Mark the Redact processor as Generally Available --- docs/changelog/110395.yaml | 9 +++++++++ docs/reference/ingest/processors/redact.asciidoc | 2 -- 2 files changed, 9 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/110395.yaml diff --git a/docs/changelog/110395.yaml b/docs/changelog/110395.yaml new file mode 100644 index 0000000000000..690be55abb5b2 --- /dev/null +++ b/docs/changelog/110395.yaml @@ -0,0 +1,9 @@ +pr: 110395 +summary: Mark the Redact processor as Generally Available +area: Ingest Node +type: feature +issues: [] +highlight: + title: The Redact processor is Generally Available + body: The Redact processor uses the Grok rules engine to obscure text in the input document matching the given Grok patterns. The Redact processor was initially released as Technical Preview in `8.7.0`, and is now released as Generally Available. + notable: true diff --git a/docs/reference/ingest/processors/redact.asciidoc b/docs/reference/ingest/processors/redact.asciidoc index 2004e48c2ed78..6706106e92655 100644 --- a/docs/reference/ingest/processors/redact.asciidoc +++ b/docs/reference/ingest/processors/redact.asciidoc @@ -4,8 +4,6 @@ Redact ++++ -experimental::[] - The Redact processor uses the Grok rules engine to obscure text in the input document matching the given Grok patterns. The processor can be used to obscure Personal Identifying Information (PII) by configuring it to From 248b045d7029c548d6c5aa149b728c314582c88b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20FOUCRET?= Date: Tue, 2 Jul 2024 23:30:18 +0200 Subject: [PATCH 133/216] [LTR] Do not add fields extracted using a query to the FieldValueFeatureExtractor (#109437) --- .../core/ml/inference/TrainedModelConfig.java | 32 +-- .../trainedmodel/InferenceConfig.java | 42 +++ .../trainedmodel/LearningToRankConfig.java | 25 ++ .../MlLearningToRankRescorerIT.java | 36 ++- .../ml/qa/ml-with-security/build.gradle | 1 + .../integration/LearningToRankRescorerIT.java | 255 +++++++++--------- .../ltr/LearningToRankRescorerContext.java | 17 +- ...ningToRankRescorerBuilderRewriteTests.java | 49 +++- .../ltr/LearningToRankServiceTests.java | 5 +- .../test/ml/learning_to_rank_rescorer.yml | 99 ++++++- 10 files changed, 384 insertions(+), 177 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java index 24fc24e43226b..f0909f75d9402 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/TrainedModelConfig.java @@ -47,7 +47,6 @@ import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.time.Instant; -import java.util.Arrays; import java.util.Base64; import java.util.Collections; import java.util.HashMap; @@ -236,7 +235,7 @@ public static TrainedModelConfig.Builder fromXContent(XContentParser parser, boo this.description = description; this.tags = Collections.unmodifiableList(ExceptionsHelper.requireNonNull(tags, TAGS)); this.metadata = metadata == null ? null : Collections.unmodifiableMap(metadata); - this.input = ExceptionsHelper.requireNonNull(handleDefaultInput(input, modelType), INPUT); + this.input = ExceptionsHelper.requireNonNull(handleDefaultInput(input, inferenceConfig, modelType), INPUT); if (ExceptionsHelper.requireNonNull(modelSize, MODEL_SIZE_BYTES) < 0) { throw new IllegalArgumentException("[" + MODEL_SIZE_BYTES.getPreferredName() + "] must be greater than or equal to 0"); } @@ -256,11 +255,12 @@ public static TrainedModelConfig.Builder fromXContent(XContentParser parser, boo this.prefixStrings = prefixStrings; } - private static TrainedModelInput handleDefaultInput(TrainedModelInput input, TrainedModelType modelType) { - if (modelType == null) { - return input; - } - return input == null ? modelType.getDefaultInput() : input; + private static TrainedModelInput handleDefaultInput( + TrainedModelInput input, + InferenceConfig inferenceConfig, + TrainedModelType modelType + ) { + return input == null && inferenceConfig != null ? inferenceConfig.getDefaultInput(modelType) : input; } public TrainedModelConfig(StreamInput in) throws IOException { @@ -963,20 +963,12 @@ public Builder validate(boolean forCreation) { break; } } - if (input != null && input.getFieldNames().isEmpty()) { - validationException = addValidationError("[input.field_names] must not be empty", validationException); - } - if (input != null - && input.getFieldNames() - .stream() - .filter(s -> s.contains(".")) - .flatMap(s -> Arrays.stream(Strings.delimitedListToStringArray(s, "."))) - .anyMatch(String::isEmpty)) { - validationException = addValidationError( - "[input.field_names] must only contain valid dot delimited field names", - validationException - ); + + // Delegate input validation to the inference config. + if (inferenceConfig != null) { + validationException = inferenceConfig.validateTrainedModelInput(input, forCreation, validationException); } + if (forCreation) { validationException = checkIllegalSetting(version, VERSION.getPreferredName(), validationException); validationException = checkIllegalSetting(createdBy, CREATED_BY.getPreferredName(), validationException); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfig.java index 8733e456157d2..92e833c250873 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfig.java @@ -8,12 +8,21 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.VersionedNamedWriteable; +import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.core.ml.MlConfigVersion; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelInput; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelType; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.NamedXContentObject; +import java.util.Arrays; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + public interface InferenceConfig extends NamedXContentObject, VersionedNamedWriteable { String DEFAULT_TOP_CLASSES_RESULTS_FIELD = "top_classes"; @@ -65,6 +74,39 @@ default boolean supportsSearchRescorer() { return false; } + @Nullable + default TrainedModelInput getDefaultInput(TrainedModelType modelType) { + if (modelType == null) { + return null; + } + return modelType.getDefaultInput(); + } + + default ActionRequestValidationException validateTrainedModelInput( + TrainedModelInput input, + boolean forCreation, + ActionRequestValidationException validationException + ) { + + if (input != null && input.getFieldNames().isEmpty()) { + validationException = addValidationError("[input.field_names] must not be empty", validationException); + } + + if (input != null + && input.getFieldNames() + .stream() + .filter(s -> s.contains(".")) + .flatMap(s -> Arrays.stream(Strings.delimitedListToStringArray(s, "."))) + .anyMatch(String::isEmpty)) { + validationException = addValidationError( + "[input.field_names] must only contain valid dot delimited field names", + validationException + ); + } + + return validationException; + } + default ElasticsearchStatusException incompatibleUpdateException(String updateName) { throw ExceptionsHelper.badRequestException( "Inference config of type [{}] can not be updated with a inference request of type [{}]", diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearningToRankConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearningToRankConfig.java index 293769371999e..7d515c9509c41 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearningToRankConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearningToRankConfig.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -17,6 +18,8 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.MlConfigVersion; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelInput; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelType; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearningToRankFeatureExtractorBuilder; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.QueryExtractorBuilder; import org.elasticsearch.xpack.core.ml.utils.NamedXContentObjectHelper; @@ -30,6 +33,8 @@ import java.util.Set; import java.util.stream.Collectors; +import static org.elasticsearch.action.ValidateActions.addValidationError; + public class LearningToRankConfig extends RegressionConfig implements Rewriteable { public static final ParseField NAME = new ParseField("learning_to_rank"); @@ -43,6 +48,8 @@ public class LearningToRankConfig extends RegressionConfig implements Rewriteabl private static final ObjectParser LENIENT_PARSER = createParser(true); private static final ObjectParser STRICT_PARSER = createParser(false); + private static final TrainedModelInput DEFAULT_INPUT = new TrainedModelInput(List.of()); + private static ObjectParser createParser(boolean lenient) { ObjectParser parser = new ObjectParser<>( NAME.getPreferredName(), @@ -237,6 +244,24 @@ public LearningToRankConfig rewrite(QueryRewriteContext ctx) throws IOException return this; } + @Override + public TrainedModelInput getDefaultInput(TrainedModelType modelType) { + return DEFAULT_INPUT; + } + + @Override + public ActionRequestValidationException validateTrainedModelInput( + TrainedModelInput input, + boolean forCreation, + ActionRequestValidationException validationException + ) { + if (forCreation && input != null && input.getFieldNames().isEmpty() == false) { + return addValidationError("cannot specify [input.field_names] for a model of type [learning_to_rank]", validationException); + } + + return validationException; + } + public static class Builder { private Integer numTopFeatureImportanceValues; private List learningToRankFeatureExtractorBuilders; diff --git a/x-pack/plugin/ml/qa/basic-multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlLearningToRankRescorerIT.java b/x-pack/plugin/ml/qa/basic-multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlLearningToRankRescorerIT.java index e5238c4aa44f0..6c6e1caf06584 100644 --- a/x-pack/plugin/ml/qa/basic-multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlLearningToRankRescorerIT.java +++ b/x-pack/plugin/ml/qa/basic-multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlLearningToRankRescorerIT.java @@ -31,10 +31,33 @@ public void setupModelAndData() throws IOException { putLearningToRankModel(MODEL_ID, """ { "description": "super complex model for tests", - "input": { "field_names": ["cost", "product"] }, "inference_config": { "learning_to_rank": { "feature_extractors": [ + { + "query_extractor": { + "feature_name": "cost", + "query": {"script_score": {"query": {"match_all":{}}, "script": {"source": "return doc['cost'].value;"}}} + } + }, + { + "query_extractor": { + "feature_name": "type_tv", + "query": {"constant_score": {"filter": {"term": { "product": "TV" }}, "boost": 1.0}} + } + }, + { + "query_extractor": { + "feature_name": "type_vcr", + "query": {"constant_score": {"filter": {"term": { "product": "VCR" }}, "boost": 1.0}} + } + }, + { + "query_extractor": { + "feature_name": "type_laptop", + "query": {"constant_score": {"filter": {"term": { "product": "Laptop" }}, "boost": 1.0}} + } + }, { "query_extractor": { "feature_name": "two", @@ -51,16 +74,6 @@ public void setupModelAndData() throws IOException { } }, "definition": { - "preprocessors" : [{ - "one_hot_encoding": { - "field": "product", - "hot_map": { - "TV": "type_tv", - "VCR": "type_vcr", - "Laptop": "type_laptop" - } - } - }], "trained_model": { "ensemble": { "feature_names": ["cost", "type_tv", "type_vcr", "type_laptop", "two", "product_bm25"], @@ -351,7 +364,6 @@ public void testModelCacheIsFlushedOnModelChange() throws IOException { deleteLearningToRankModel(MODEL_ID); putLearningToRankModel(MODEL_ID, """ { - "input": { "field_names": ["cost"] }, "inference_config": { "learning_to_rank": { "feature_extractors": [ diff --git a/x-pack/plugin/ml/qa/ml-with-security/build.gradle b/x-pack/plugin/ml/qa/ml-with-security/build.gradle index f2ec17093bb93..0869ae394d3de 100644 --- a/x-pack/plugin/ml/qa/ml-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/ml-with-security/build.gradle @@ -181,6 +181,7 @@ tasks.named("yamlRestTest").configure { 'ml/inference_crud/Test put model model aliases with nlp model', 'ml/inference_processor/Test create processor with missing mandatory fields', 'ml/learning_to_rank_rescorer/Test rescore with missing model', + 'ml/learning_to_rank_rescorer/Test model input validation', 'ml/inference_stats_crud/Test get stats given missing trained model', 'ml/inference_stats_crud/Test get stats given expression without matches and allow_no_match is false', 'ml/jobs_crud/Test cannot create job with model snapshot id set', diff --git a/x-pack/plugin/ml/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/LearningToRankRescorerIT.java b/x-pack/plugin/ml/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/LearningToRankRescorerIT.java index f17d5bf00297f..b2a0b60aed7ba 100644 --- a/x-pack/plugin/ml/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/LearningToRankRescorerIT.java +++ b/x-pack/plugin/ml/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/LearningToRankRescorerIT.java @@ -31,151 +31,166 @@ public void setupModelAndData() throws IOException { putRegressionModel(MODEL_ID, """ { "description": "super complex model for tests", - "input": {"field_names": ["cost", "product"]}, "inference_config": { "learning_to_rank": { "feature_extractors": [ { "query_extractor": { - "feature_name": "two", - "query": {"script_score": {"query": {"match_all":{}}, "script": {"source": "return 2.0;"}}} + "feature_name": "cost", + "query": {"script_score": {"query": {"match_all":{}}, "script": {"source": "return doc['cost'].value;"}}} } }, { "query_extractor": { - "feature_name": "product_bm25", - "query": {"term": {"product": "{{keyword}}"}} + "feature_name": "type_tv", + "query": {"constant_score": {"filter": {"term": { "product": "TV" }}, "boost": 1.0}} } + }, + { + "query_extractor": { + "feature_name": "type_vcr", + "query": {"constant_score": {"filter": {"term": { "product": "VCR" }}, "boost": 1.0}} + } + }, + { + "query_extractor": { + "feature_name": "type_laptop", + "query": {"constant_score": {"filter": {"term": { "product": "Laptop" }}, "boost": 1.0}} + } + }, + { + "query_extractor": { + "feature_name": "two", + "query": { "script_score": { "query": { "match_all": {} }, "script": { "source": "return 2.0;" } } } + } + }, + { + "query_extractor": { + "feature_name": "product_bm25", + "query": { "term": { "product": "{{keyword}}" } } + } } ] } }, "definition": { - "preprocessors" : [{ - "one_hot_encoding": { - "field": "product", - "hot_map": { - "TV": "type_tv", - "VCR": "type_vcr", - "Laptop": "type_laptop" - } - } - }], "trained_model": { "ensemble": { "feature_names": ["cost", "type_tv", "type_vcr", "type_laptop", "two", "product_bm25"], "target_type": "regression", "trained_models": [ - { - "tree": { - "feature_names": ["cost"], - "tree_structure": [ - { - "node_index": 0, - "split_feature": 0, - "split_gain": 12, - "threshold": 400, - "decision_type": "lte", - "default_left": true, - "left_child": 1, - "right_child": 2 - }, - { - "node_index": 1, - "leaf_value": 5.0 - }, - { - "node_index": 2, - "leaf_value": 2.0 - } - ], - "target_type": "regression" + { + "tree": { + "feature_names": [ + "cost" + ], + "tree_structure": [ + { + "node_index": 0, + "split_feature": 0, + "split_gain": 12, + "threshold": 400, + "decision_type": "lte", + "default_left": true, + "left_child": 1, + "right_child": 2 + }, + { + "node_index": 1, + "leaf_value": 5.0 + }, + { + "node_index": 2, + "leaf_value": 2.0 } - }, - { - "tree": { - "feature_names": [ - "type_tv" - ], - "tree_structure": [ - { - "node_index": 0, - "split_feature": 0, - "split_gain": 12, - "threshold": 1, - "decision_type": "lt", - "default_left": true, - "left_child": 1, - "right_child": 2 - }, - { - "node_index": 1, - "leaf_value": 1.0 - }, - { - "node_index": 2, - "leaf_value": 12.0 - } - ], - "target_type": "regression" + ], + "target_type": "regression" + } + }, + { + "tree": { + "feature_names": [ + "type_tv" + ], + "tree_structure": [ + { + "node_index": 0, + "split_feature": 0, + "split_gain": 12, + "threshold": 1, + "decision_type": "lt", + "default_left": true, + "left_child": 1, + "right_child": 2 + }, + { + "node_index": 1, + "leaf_value": 1.0 + }, + { + "node_index": 2, + "leaf_value": 12.0 } - }, - { - "tree": { - "feature_names": [ - "two" - ], - "tree_structure": [ - { - "node_index": 0, - "split_feature": 0, - "split_gain": 12, - "threshold": 1, - "decision_type": "lt", - "default_left": true, - "left_child": 1, - "right_child": 2 - }, - { - "node_index": 1, - "leaf_value": 1.0 - }, - { - "node_index": 2, - "leaf_value": 2.0 - } - ], - "target_type": "regression" + ], + "target_type": "regression" + } + }, + { + "tree": { + "feature_names": [ + "two" + ], + "tree_structure": [ + { + "node_index": 0, + "split_feature": 0, + "split_gain": 12, + "threshold": 1, + "decision_type": "lt", + "default_left": true, + "left_child": 1, + "right_child": 2 + }, + { + "node_index": 1, + "leaf_value": 1.0 + }, + { + "node_index": 2, + "leaf_value": 2.0 } - }, - { - "tree": { - "feature_names": [ - "product_bm25" - ], - "tree_structure": [ - { - "node_index": 0, - "split_feature": 0, - "split_gain": 12, - "threshold": 1, - "decision_type": "lt", - "default_left": true, - "left_child": 1, - "right_child": 2 - }, - { - "node_index": 1, - "leaf_value": 1.0 - }, - { - "node_index": 2, - "leaf_value": 4.0 - } - ], - "target_type": "regression" + ], + "target_type": "regression" + } + }, + { + "tree": { + "feature_names": [ + "product_bm25" + ], + "tree_structure": [ + { + "node_index": 0, + "split_feature": 0, + "split_gain": 12, + "threshold": 1, + "decision_type": "lt", + "default_left": true, + "left_child": 1, + "right_child": 2 + }, + { + "node_index": 1, + "leaf_value": 1.0 + }, + { + "node_index": 2, + "leaf_value": 4.0 } + ], + "target_type": "regression" } + } ] } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerContext.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerContext.java index b1df3a2da7c42..e03370b415417 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerContext.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerContext.java @@ -24,6 +24,8 @@ import java.util.ArrayList; import java.util.List; +import static java.util.function.Predicate.not; + public class LearningToRankRescorerContext extends RescoreContext { final SearchExecutionContext executionContext; @@ -52,12 +54,9 @@ public LearningToRankRescorerContext( List buildFeatureExtractors(IndexSearcher searcher) throws IOException { assert this.regressionModelDefinition != null && this.learningToRankConfig != null; + List featureExtractors = new ArrayList<>(); - if (this.regressionModelDefinition.inputFields().isEmpty() == false) { - featureExtractors.add( - new FieldValueFeatureExtractor(new ArrayList<>(this.regressionModelDefinition.inputFields()), this.executionContext) - ); - } + List weights = new ArrayList<>(); List queryFeatureNames = new ArrayList<>(); for (LearningToRankFeatureExtractorBuilder featureExtractorBuilder : learningToRankConfig.getFeatureExtractorBuilders()) { @@ -72,6 +71,14 @@ List buildFeatureExtractors(IndexSearcher searcher) throws IOE featureExtractors.add(new QueryFeatureExtractor(queryFeatureNames, weights)); } + List fieldValueExtractorFields = this.regressionModelDefinition.inputFields() + .stream() + .filter(not(queryFeatureNames::contains)) + .toList(); + if (fieldValueExtractorFields.isEmpty() == false) { + featureExtractors.add(new FieldValueFeatureExtractor(fieldValueExtractorFields, this.executionContext)); + } + return featureExtractors; } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilderRewriteTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilderRewriteTests.java index 3bfe8aa390d8b..7266b165504d4 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilderRewriteTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilderRewriteTests.java @@ -41,6 +41,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.in; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; @@ -193,8 +194,7 @@ private static LearningToRankService learningToRankServiceMock() { public void testBuildContext() throws Exception { LocalModel localModel = mock(LocalModel.class); - List inputFields = List.of(DOUBLE_FIELD_NAME, INT_FIELD_NAME); - when(localModel.inputFields()).thenReturn(inputFields); + when(localModel.inputFields()).thenReturn(GOOD_MODEL_CONFIG.getInput().getFieldNames()); IndexSearcher searcher = mock(IndexSearcher.class); doAnswer(invocation -> invocation.getArgument(0)).when(searcher).rewrite(any(Query.class)); @@ -211,11 +211,48 @@ public void testBuildContext() throws Exception { assertNotNull(rescoreContext); assertThat(rescoreContext.getWindowSize(), equalTo(20)); List featureExtractors = rescoreContext.buildFeatureExtractors(context.searcher()); - assertThat(featureExtractors, hasSize(2)); - assertThat( - featureExtractors.stream().flatMap(featureExtractor -> featureExtractor.featureNames().stream()).toList(), - containsInAnyOrder("feature_1", "feature_2", DOUBLE_FIELD_NAME, INT_FIELD_NAME) + assertThat(featureExtractors, hasSize(1)); + + FeatureExtractor queryExtractor = featureExtractors.get(0); + assertThat(queryExtractor, instanceOf(QueryFeatureExtractor.class)); + assertThat(queryExtractor.featureNames(), hasSize(2)); + assertThat(queryExtractor.featureNames(), containsInAnyOrder("feature_1", "feature_2")); + } + + public void testLegacyFieldValueExtractorBuildContext() throws Exception { + // Models created before 8.15 have been saved with input fields. + // We check field value extractors are created and the deduplication is done correctly. + LocalModel localModel = mock(LocalModel.class); + when(localModel.inputFields()).thenReturn(List.of("feature_1", "field_1", "field_2")); + + IndexSearcher searcher = mock(IndexSearcher.class); + doAnswer(invocation -> invocation.getArgument(0)).when(searcher).rewrite(any(Query.class)); + SearchExecutionContext context = createSearchExecutionContext(searcher); + + LearningToRankRescorerBuilder rescorerBuilder = new LearningToRankRescorerBuilder( + localModel, + (LearningToRankConfig) GOOD_MODEL_CONFIG.getInferenceConfig(), + null, + mock(LearningToRankService.class) ); + + LearningToRankRescorerContext rescoreContext = rescorerBuilder.innerBuildContext(20, context); + assertNotNull(rescoreContext); + assertThat(rescoreContext.getWindowSize(), equalTo(20)); + List featureExtractors = rescoreContext.buildFeatureExtractors(context.searcher()); + + assertThat(featureExtractors, hasSize(2)); + + FeatureExtractor queryExtractor = featureExtractors.stream().filter(fe -> fe instanceof QueryFeatureExtractor).findFirst().get(); + assertThat(queryExtractor.featureNames(), hasSize(2)); + assertThat(queryExtractor.featureNames(), containsInAnyOrder("feature_1", "feature_2")); + + FeatureExtractor fieldValueExtractor = featureExtractors.stream() + .filter(fe -> fe instanceof FieldValueFeatureExtractor) + .findFirst() + .get(); + assertThat(fieldValueExtractor.featureNames(), hasSize(2)); + assertThat(fieldValueExtractor.featureNames(), containsInAnyOrder("field_1", "field_2")); } private LearningToRankRescorerBuilder rewriteAndFetch( diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankServiceTests.java index 026dcca4bfcf7..6ca9ae4296789 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankServiceTests.java @@ -50,11 +50,10 @@ import static org.mockito.Mockito.verify; public class LearningToRankServiceTests extends ESTestCase { - public static final String GOOD_MODEL = "inferenceEntityId"; - public static final String BAD_MODEL = "badModel"; + public static final String GOOD_MODEL = "inference-entity-id"; + public static final String BAD_MODEL = "bad-model"; public static final TrainedModelConfig GOOD_MODEL_CONFIG = TrainedModelConfig.builder() .setModelId(GOOD_MODEL) - .setInput(new TrainedModelInput(List.of("field1", "field2"))) .setEstimatedOperations(1) .setModelSize(2) .setModelType(TrainedModelType.TREE_ENSEMBLE) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/learning_to_rank_rescorer.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/learning_to_rank_rescorer.yml index dac7b48617a2f..5c0096e9666fc 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/learning_to_rank_rescorer.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/learning_to_rank_rescorer.yml @@ -9,22 +9,37 @@ setup: body: > { "description": "super complex model for tests", - "input": {"field_names": ["cost", "product"]}, "inference_config": { "learning_to_rank": { + "feature_extractors": [ + { + "query_extractor": { + "feature_name": "cost", + "query": {"script_score": {"query": {"match_all":{}}, "script": {"source": "return doc['cost'].value;"}}} + } + }, + { + "query_extractor": { + "feature_name": "type_tv", + "query": {"term": {"product": "TV"}} + } + }, + { + "query_extractor": { + "feature_name": "type_vcr", + "query": {"term": {"product": "VCR"}} + } + }, + { + "query_extractor": { + "feature_name": "type_laptop", + "query": {"term": {"product": "Laptop"}} + } + } + ] } }, "definition": { - "preprocessors" : [{ - "one_hot_encoding": { - "field": "product", - "hot_map": { - "TV": "type_tv", - "VCR": "type_vcr", - "Laptop": "type_laptop" - } - } - }], "trained_model": { "ensemble": { "feature_names": ["cost", "type_tv", "type_vcr", "type_laptop"], @@ -246,3 +261,65 @@ setup: } } - length: { hits.hits: 0 } +--- +"Test model input validation": + - skip: + features: headers + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + catch: bad_request + ml.put_trained_model: + model_id: bad-model + body: > + { + "description": "a bad model", + "input": { + "field_names": ["cost"] + }, + "inference_config": { + "learning_to_rank": { } + }, + "definition": { + "trained_model": { + "ensemble": { + "feature_names": ["cost"], + "target_type": "regression", + "trained_models": [ + { + "tree": { + "feature_names": [ + "cost" + ], + "tree_structure": [ + { + "node_index": 0, + "split_feature": 0, + "split_gain": 12, + "threshold": 400, + "decision_type": "lte", + "default_left": true, + "left_child": 1, + "right_child": 2 + }, + { + "node_index": 1, + "leaf_value": 5.0 + }, + { + "node_index": 2, + "leaf_value": 2.0 + } + ], + "target_type": "regression" + } + } + ] + } + } + } + } + + - match: { status: 400 } + - match: { error.root_cause.0.type: "action_request_validation_exception" } + - match: { error.root_cause.0.reason: "Validation Failed: 1: cannot specify [input.field_names] for a model of type [learning_to_rank];" } From 8abc8857f2d070f1afea091b778ad64148077eba Mon Sep 17 00:00:00 2001 From: Fang Xing <155562079+fang-xing-esql@users.noreply.github.com> Date: Tue, 2 Jul 2024 18:29:02 -0400 Subject: [PATCH 134/216] [ES|QL] weighted_avg (#109993) * weighted_avg --- docs/changelog/109993.yaml | 5 + .../functions/aggregation-functions.asciidoc | 2 + .../esql/functions/weighted-avg.asciidoc | 35 +++++ .../src/main/resources/meta.csv-spec | 6 +- .../src/main/resources/stats.csv-spec | 122 +++++++++++++++ .../xpack/esql/action/EsqlCapabilities.java | 7 +- .../function/EsqlFunctionRegistry.java | 4 +- .../function/aggregate/AggregateFunction.java | 3 +- .../function/aggregate/WeightedAvg.java | 145 ++++++++++++++++++ .../xpack/esql/analysis/VerifierTests.java | 31 ++++ 10 files changed, 356 insertions(+), 4 deletions(-) create mode 100644 docs/changelog/109993.yaml create mode 100644 docs/reference/esql/functions/weighted-avg.asciidoc create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvg.java diff --git a/docs/changelog/109993.yaml b/docs/changelog/109993.yaml new file mode 100644 index 0000000000000..40d161b6b5c24 --- /dev/null +++ b/docs/changelog/109993.yaml @@ -0,0 +1,5 @@ +pr: 109993 +summary: "[ES|QL] `weighted_avg`" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/reference/esql/functions/aggregation-functions.asciidoc b/docs/reference/esql/functions/aggregation-functions.asciidoc index b23416c0e20bf..11fcd576d336e 100644 --- a/docs/reference/esql/functions/aggregation-functions.asciidoc +++ b/docs/reference/esql/functions/aggregation-functions.asciidoc @@ -20,6 +20,7 @@ The <> command supports these aggregate functions: * <> * <> * <> +* experimental:[] <> // end::agg_list[] include::avg.asciidoc[] @@ -34,3 +35,4 @@ include::st_centroid_agg.asciidoc[] include::sum.asciidoc[] include::layout/top.asciidoc[] include::values.asciidoc[] +include::weighted-avg.asciidoc[] diff --git a/docs/reference/esql/functions/weighted-avg.asciidoc b/docs/reference/esql/functions/weighted-avg.asciidoc new file mode 100644 index 0000000000000..4f166801641df --- /dev/null +++ b/docs/reference/esql/functions/weighted-avg.asciidoc @@ -0,0 +1,35 @@ +[discrete] +[[esql-agg-weighted-avg]] +=== `WEIGHTED_AVG` + +*Syntax* + +[source,esql] +---- +WEIGHTED_AVG(expression, weight) +---- + +`expression`:: +Numeric expression. + +`weight`:: +Numeric weight. + +*Description* + +The weighted average of a numeric expression. + +*Supported types* + +The result is always a `double` no matter the input type. + +*Examples* + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats.csv-spec[tag=weighted-avg] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats.csv-spec[tag=weighted-avg-result] +|=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index cf7b1b99843a2..925b2fb9e5533 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -113,6 +113,7 @@ double tau() "double|integer|long|date top(field:double|integer|long|date, limit:integer, order:keyword)" "keyword|text trim(string:keyword|text)" "boolean|date|double|integer|ip|keyword|long|text|version values(field:boolean|date|double|integer|ip|keyword|long|text|version)" +"double weighted_avg(number:double|integer|long, weight:double|integer|long)" ; metaFunctionsArgs#[skip:-8.14.99] @@ -232,6 +233,7 @@ to_version |field |"keyword|text|version" top |[field, limit, order] |["double|integer|long|date", integer, keyword] |[The field to collect the top values for.,The maximum number of values to collect.,The order to calculate the top values. Either `asc` or `desc`.] trim |string |"keyword|text" |String expression. If `null`, the function returns `null`. values |field |"boolean|date|double|integer|ip|keyword|long|text|version" |[""] +weighted_avg |[number, weight] |["double|integer|long", "double|integer|long"] |[A numeric value., A numeric weight.] ; metaFunctionsDescription#[skip:-8.14.99] @@ -352,6 +354,7 @@ to_version |Converts an input string to a version value. top |Collects the top values for a field. Includes repeated values. trim |Removes leading and trailing whitespaces from a string. values |Collect values for a field. +weighted_avg |The weighted average of a numeric field. ; metaFunctionsRemaining#[skip:-8.14.99] @@ -473,6 +476,7 @@ to_version |version top |"double|integer|long|date" |[false, false, false] |false |true trim |"keyword|text" |false |false |false values |"boolean|date|double|integer|ip|keyword|long|text|version" |false |false |true +weighted_avg |"double" |[false, false] |false |true ; metaFunctionsFiltered#[skip:-8.14.99] @@ -491,5 +495,5 @@ countFunctions#[skip:-8.14.99, reason:BIN added] meta functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; a:long | b:long | c:long -111 | 111 | 111 +112 | 112 | 112 ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index bbc98ece3890a..e4fc0580e4ba2 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -1697,3 +1697,125 @@ FROM employees | STATS min = min(salary) by languages | SORT min + CASE(language 29175 |2 28336 |null ; + + +weightedAvg +required_capability: agg_weighted_avg +from employees +| stats w_avg_1 = weighted_avg(salary, 1), avg = avg(salary), w_avg_2 = weighted_avg(salary, height) +| EVAL w_avg_1 = ROUND(w_avg_1), avg = ROUND(avg), w_avg_2 = ROUND(w_avg_2) +; + +w_avg_1:double | avg:double | w_avg_2:double +48249.0 | 48249.0 | 48474.0 +; + +weightedAvgGrouping +required_capability: agg_weighted_avg +// tag::weighted-avg[] +FROM employees +| STATS w_avg = WEIGHTED_AVG(salary, height) by languages +| EVAL w_avg = ROUND(w_avg) +| KEEP w_avg, languages +| SORT languages +// end::weighted-avg[] +; + +// tag::weighted-avg-result[] +w_avg:double | languages:integer +51464.0 | 1 +48477.0 | 2 +52379.0 | 3 +47990.0 | 4 +42119.0 | 5 +52142.0 | null +// end::weighted-avg-result[] +; + +weightedAvgConstant +required_capability: agg_weighted_avg +row v = [1, 2, 3] +| stats w_avg_1 = weighted_avg(v, 1), w_avg_2 = weighted_avg([1, 2, 3], 1), avg = avg(v) +| EVAL w_avg_1 = ROUND(w_avg_1), w_avg_2 = ROUND(w_avg_2), avg = ROUND(avg) +; + +w_avg_1:double |w_avg_2:double |avg:double +2.0 | 2.0 | 2.0 +; + +weightedAvgBothConstantsMvWarning +required_capability: agg_weighted_avg +row v = [1, 2, 3], w = [1, 2, 3] +| stats w_avg = weighted_avg(v, w) +; +warning:Line 2:17: evaluation of [weighted_avg(v, w)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 2:17: java.lang.IllegalArgumentException: single-value function encountered multi-value + +w_avg:double +null +; + +weightedAvgWeightConstantMvWarning +required_capability: agg_weighted_avg +from employees +| eval w = [1, 2, 3] +| stats w_avg = weighted_avg(salary, w) +; +warning:Line 3:17: evaluation of [weighted_avg(salary, w)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 3:17: java.lang.IllegalArgumentException: single-value function encountered multi-value + +w_avg:double +null +; + +weightedAvgWeightMvWarning +required_capability: agg_weighted_avg +from employees +| where emp_no == 10002 or emp_no == 10003 +| stats w_avg = weighted_avg(salary, salary_change.int) +; +warning:Line 3:17: evaluation of [weighted_avg(salary, salary_change.int)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 3:17: java.lang.IllegalArgumentException: single-value function encountered multi-value + +w_avg:double +null +; + +weightedAvgFieldMvWarning +required_capability: agg_weighted_avg +from employees +| where emp_no == 10002 or emp_no == 10003 +| stats w_avg = weighted_avg(salary_change.int, height) +; +warning:Line 3:17: evaluation of [weighted_avg(salary_change.int, height)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 3:17: java.lang.IllegalArgumentException: single-value function encountered multi-value + +w_avg:double +null +; + +weightedAvgWeightZero +required_capability: agg_weighted_avg +from employees +| eval w = 0 +| stats w_avg = weighted_avg(salary, w) +; +warning:Line 3:17: evaluation of [weighted_avg(salary, w)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 3:17: java.lang.ArithmeticException: / by zero + +w_avg:double +null +; + +weightedAvgWeightZeroExp +required_capability: agg_weighted_avg +from employees +| eval w = 0 + 0 +| stats w_avg = weighted_avg(salary, w) +; +warning:Line 3:17: evaluation of [weighted_avg(salary, w)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 3:17: java.lang.ArithmeticException: / by zero + +w_avg:double +null +; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 7f4f1c070f999..07362311d37a5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -101,7 +101,12 @@ public enum Cap { /** * Support for quoting index sources in double quotes. */ - DOUBLE_QUOTES_SOURCE_ENCLOSING; + DOUBLE_QUOTES_SOURCE_ENCLOSING, + + /** + * Support for WEIGHTED_AVG function. + */ + AGG_WEIGHTED_AVG; private final boolean snapshotOnly; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 643fd2622a6a8..d65dc1d6b397f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -26,6 +26,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.esql.expression.function.aggregate.Top; import org.elasticsearch.xpack.esql.expression.function.aggregate.Values; +import org.elasticsearch.xpack.esql.expression.function.aggregate.WeightedAvg; import org.elasticsearch.xpack.esql.expression.function.grouping.Bucket; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; @@ -199,7 +200,8 @@ private FunctionDefinition[][] functions() { def(Percentile.class, Percentile::new, "percentile"), def(Sum.class, Sum::new, "sum"), def(Top.class, Top::new, "top"), - def(Values.class, Values::new, "values") }, + def(Values.class, Values::new, "values"), + def(WeightedAvg.class, WeightedAvg::new, "weighted_avg") }, // math new FunctionDefinition[] { def(Abs.class, Abs::new, "abs"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java index 38d86083df74b..f0acac0e9744e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java @@ -45,7 +45,8 @@ public static List getNamedWriteables() { Values.ENTRY, // internal functions ToPartial.ENTRY, - FromPartial.ENTRY + FromPartial.ENTRY, + WeightedAvg.ENTRY ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvg.java new file mode 100644 index 0000000000000..75315c48b3f45 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/WeightedAvg.java @@ -0,0 +1,145 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.capabilities.Validatable; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.SurrogateExpression; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvAvg; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mul; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.common.logging.LoggerMessageFormat.format; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; + +public class WeightedAvg extends AggregateFunction implements SurrogateExpression, Validatable { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( + Expression.class, + "WeightedAvg", + WeightedAvg::new + ); + + private final Expression weight; + + private static final String invalidWeightError = "{} argument of [{}] cannot be null or 0, received [{}]"; + + @FunctionInfo(returnType = "double", description = "The weighted average of a numeric field.", isAggregation = true) + public WeightedAvg( + Source source, + @Param(name = "number", type = { "double", "integer", "long" }, description = "A numeric value.") Expression field, + @Param(name = "weight", type = { "double", "integer", "long" }, description = "A numeric weight.") Expression weight + ) { + super(source, field, List.of(weight)); + this.weight = weight; + } + + private WeightedAvg(StreamInput in) throws IOException { + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + source().writeTo(out); + List fields = children(); + assert fields.size() == 2; + out.writeNamedWriteable(fields.get(0)); + out.writeNamedWriteable(fields.get(1)); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + @Override + protected Expression.TypeResolution resolveType() { + if (childrenResolved() == false) { + return new TypeResolution("Unresolved children"); + } + + TypeResolution resolution = isType( + field(), + dt -> dt.isNumeric() && dt != DataType.UNSIGNED_LONG, + sourceText(), + FIRST, + "numeric except unsigned_long or counter types" + ); + + if (resolution.unresolved()) { + return resolution; + } + + resolution = isType( + weight(), + dt -> dt.isNumeric() && dt != DataType.UNSIGNED_LONG, + sourceText(), + SECOND, + "numeric except unsigned_long or counter types" + ); + + if (resolution.unresolved()) { + return resolution; + } + + if (weight.dataType() == DataType.NULL + || (weight.foldable() && (weight.fold() == null || weight.fold().equals(0) || weight.fold().equals(0.0)))) { + return new TypeResolution(format(null, invalidWeightError, SECOND, sourceText(), weight.foldable() ? weight.fold() : null)); + } + + return TypeResolution.TYPE_RESOLVED; + } + + @Override + public DataType dataType() { + return DataType.DOUBLE; + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, WeightedAvg::new, field(), weight); + } + + @Override + public WeightedAvg replaceChildren(List newChildren) { + return new WeightedAvg(source(), newChildren.get(0), newChildren.get(1)); + } + + @Override + public Expression surrogate() { + var s = source(); + var field = field(); + var weight = weight(); + + if (field.foldable()) { + return new MvAvg(s, field); + } + if (weight.foldable()) { + return new Div(s, new Sum(s, field), new Count(s, field), dataType()); + } else { + return new Div(s, new Sum(s, new Mul(s, field, weight)), new Sum(s, weight), dataType()); + } + } + + public Expression weight() { + return weight; + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index beb85268425be..ad08130c5b0d9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -595,6 +595,37 @@ public void testRateNotEnclosedInAggregate() { and inside another aggregate""")); } + public void testWeightedAvg() { + assertEquals( + "1:35: SECOND argument of [weighted_avg(v, null)] cannot be null or 0, received [null]", + error("row v = [1, 2, 3] | stats w_avg = weighted_avg(v, null)") + ); + assertEquals( + "1:27: SECOND argument of [weighted_avg(salary, null)] cannot be null or 0, received [null]", + error("from test | stats w_avg = weighted_avg(salary, null)") + ); + assertEquals( + "1:45: SECOND argument of [weighted_avg(v, w)] cannot be null or 0, received [null]", + error("row v = [1, 2, 3], w = null | stats w_avg = weighted_avg(v, w)") + ); + assertEquals( + "1:44: SECOND argument of [weighted_avg(salary, w)] cannot be null or 0, received [null]", + error("from test | eval w = null | stats w_avg = weighted_avg(salary, w)") + ); + assertEquals( + "1:51: SECOND argument of [weighted_avg(salary, w)] cannot be null or 0, received [null]", + error("from test | eval w = null + null | stats w_avg = weighted_avg(salary, w)") + ); + assertEquals( + "1:35: SECOND argument of [weighted_avg(v, 0)] cannot be null or 0, received [0]", + error("row v = [1, 2, 3] | stats w_avg = weighted_avg(v, 0)") + ); + assertEquals( + "1:27: SECOND argument of [weighted_avg(salary, 0.0)] cannot be null or 0, received [0.0]", + error("from test | stats w_avg = weighted_avg(salary, 0.0)") + ); + } + private String error(String query) { return error(query, defaultAnalyzer); } From 741dce20ced87d2e34eb332dc5950749b8e39293 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 3 Jul 2024 10:30:05 +1000 Subject: [PATCH 135/216] Add 8.15 to branches.json --- branches.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/branches.json b/branches.json index 2794b545facc6..b852cd1fa5dbd 100644 --- a/branches.json +++ b/branches.json @@ -4,6 +4,9 @@ { "branch": "main" }, + { + "branch": "8.15" + }, { "branch": "8.14" }, From bc1b77ff111e1d1c1c19b0be0d0f1ef0c0454b3f Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 2 Jul 2024 18:39:54 -0700 Subject: [PATCH 136/216] Fix serialization of to_partial and from_partial (#110403) #110157 broke the serialization of ToPartial and FromPartial, where the parameters weren't properly assigned. I will look into making these classes more compatible with AggregateFunction, but this small change should stabilize the CI. Relates #110157 --- .../esql/expression/function/aggregate/FromPartial.java | 7 ++++--- .../esql/expression/function/aggregate/ToPartial.java | 7 ++++--- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java index e16b872f654e1..593e6fa463371 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/FromPartial.java @@ -24,6 +24,7 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.planner.ToAggregator; import java.io.IOException; @@ -48,13 +49,13 @@ public FromPartial(Source source, Expression field, Expression function) { } private FromPartial(StreamInput in) throws IOException { - super(in); - this.function = in.readNamedWriteable(Expression.class); + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class)); } @Override public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); + source().writeTo(out); + out.writeNamedWriteable(field()); out.writeNamedWriteable(function); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java index e03574f9cb094..f94c8e0508cd7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java @@ -24,6 +24,7 @@ import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.planner.ToAggregator; import java.io.IOException; @@ -75,13 +76,13 @@ private ToPartial(Source source, Expression field, Expression function) { } private ToPartial(StreamInput in) throws IOException { - super(in); - this.function = in.readNamedWriteable(Expression.class); + this(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(Expression.class), in.readNamedWriteable(Expression.class)); } @Override public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); + source().writeTo(out); + out.writeNamedWriteable(field()); out.writeNamedWriteable(function); } From 0f77b5b9417bb7f81722660fbd0852d3aac737d8 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 3 Jul 2024 05:05:58 +0100 Subject: [PATCH 137/216] Avoid routing loops in `TransportMasterNodeAction` (#108909) Closes #102351 Co-authored-by: Nick Tindall --- .../master/TransportMasterNodeActionIT.java | 279 ++++++++++++++++++ .../org/elasticsearch/TransportVersions.java | 1 + .../support/master/MasterNodeRequest.java | 37 ++- .../TermOverridingMasterNodeRequest.java | 128 ++++++++ .../master/TermOverridingStreamOutput.java | 60 ++++ .../master/TransportMasterNodeAction.java | 9 +- .../cluster/node/tasks/TestTaskPlugin.java | 16 +- .../TransportMasterNodeActionTests.java | 8 +- .../master/MasterNodeRequestHelper.java | 19 ++ .../test/transport/MockTransportService.java | 4 +- 10 files changed, 547 insertions(+), 14 deletions(-) create mode 100644 server/src/internalClusterTest/java/org/elasticsearch/action/support/master/TransportMasterNodeActionIT.java create mode 100644 server/src/main/java/org/elasticsearch/action/support/master/TermOverridingMasterNodeRequest.java create mode 100644 server/src/main/java/org/elasticsearch/action/support/master/TermOverridingStreamOutput.java create mode 100644 test/framework/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequestHelper.java diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/support/master/TransportMasterNodeActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/support/master/TransportMasterNodeActionIT.java new file mode 100644 index 0000000000000..7211585d766f4 --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/support/master/TransportMasterNodeActionIT.java @@ -0,0 +1,279 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.support.master; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateApplier; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.coordination.LeaderChecker; +import org.elasticsearch.cluster.coordination.PublicationTransportHandler; +import org.elasticsearch.cluster.coordination.StatefulPreVoteCollector; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ClusterServiceUtils; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CyclicBarrier; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; + +public class TransportMasterNodeActionIT extends ESIntegTestCase { + + @SuppressWarnings("unchecked") + @Override + protected Collection> nodePlugins() { + return CollectionUtils.appendToCopyNoNullElements( + super.nodePlugins(), + MockTransportService.TestPlugin.class, + TestActionPlugin.class + ); + } + + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + // detect leader failover quickly + .put(LeaderChecker.LEADER_CHECK_RETRY_COUNT_SETTING.getKey(), 1) + .put(LeaderChecker.LEADER_CHECK_INTERVAL_SETTING.getKey(), "100ms") + .build(); + } + + public void testRoutingLoopProtection() { + + final var cleanupTasks = new ArrayList(); + + try { + final var newMaster = ensureSufficientMasterEligibleNodes(); + final long originalTerm = internalCluster().masterClient().admin().cluster().prepareState().get().getState().term(); + final var previousMasterKnowsNewMasterIsElectedLatch = configureElectionLatch(newMaster, cleanupTasks); + + final var newMasterReceivedReroutedMessageFuture = new PlainActionFuture<>(); + final var newMasterReceivedReroutedMessageListener = ActionListener.assertOnce(newMasterReceivedReroutedMessageFuture); + final var reroutedMessageReceived = ActionListener.assertOnce(ActionListener.noop()); + for (final var transportService : internalCluster().getInstances(TransportService.class)) { + final var mockTransportService = asInstanceOf(MockTransportService.class, transportService); + cleanupTasks.add(mockTransportService::clearAllRules); + + if (mockTransportService.getLocalNode().getName().equals(newMaster)) { + // Complete listener when the new master receives the re-routed message, ensure it only receives it once, and only from + // a node in the newMaster term. + mockTransportService.addRequestHandlingBehavior(TEST_ACTION_TYPE.name(), (handler, request, channel, task) -> { + assertThat(asInstanceOf(MasterNodeRequest.class, request).masterTerm(), greaterThan(originalTerm)); + newMasterReceivedReroutedMessageListener.onResponse(null); + handler.messageReceived(request, channel, task); + }); + } else { + // Disable every other node's ability to send pre-vote and publish requests + mockTransportService.addSendBehavior((connection, requestId, action, request, options) -> { + if (action.equals(StatefulPreVoteCollector.REQUEST_PRE_VOTE_ACTION_NAME) + || action.equals(PublicationTransportHandler.PUBLISH_STATE_ACTION_NAME)) { + throw new ElasticsearchException("[{}] for [{}] denied", action, connection.getNode()); + } else { + connection.sendRequest(requestId, action, request, options); + } + }); + + // Assert that no other node receives the re-routed message more than once, and only from a node in the original term. + mockTransportService.addRequestHandlingBehavior(TEST_ACTION_TYPE.name(), (handler, request, channel, task) -> { + assertThat(asInstanceOf(MasterNodeRequest.class, request).masterTerm(), equalTo(originalTerm)); + reroutedMessageReceived.onResponse(null); + handler.messageReceived(request, channel, task); + }); + } + } + + final var newMasterStateApplierBlock = blockClusterStateApplier(newMaster, cleanupTasks); + + // trigger a cluster state update, which fails, causing a master failover + internalCluster().getCurrentMasterNodeInstance(ClusterService.class) + .submitUnbatchedStateUpdateTask("failover", new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) { + return ClusterState.builder(currentState).build(); + } + + @Override + public void onFailure(Exception e) { + // expected + } + }); + + // Wait until the old master has acknowledged the new master's election + safeAwait(previousMasterKnowsNewMasterIsElectedLatch); + logger.info("New master is elected"); + + // perform a TransportMasterNodeAction on the new master, which doesn't know it's the master yet + final var testActionFuture = client(newMaster).execute(TEST_ACTION_TYPE, new TestRequest()); + + // wait for the request to come back to the new master + safeGet(newMasterReceivedReroutedMessageFuture); + + // Unblock state application on new master, allow it to know of its election win + safeAwait(newMasterStateApplierBlock); + + safeGet(testActionFuture); + } finally { + Releasables.closeExpectNoException(Releasables.wrap(cleanupTasks)); + } + } + + /** + * Block the cluster state applier on a node. Returns only when applier is blocked. + * + * @param nodeName The name of the node on which to block the applier + * @param cleanupTasks The list of clean up tasks + * @return A cyclic barrier which when awaited on will un-block the applier + */ + private static CyclicBarrier blockClusterStateApplier(String nodeName, ArrayList cleanupTasks) { + final var stateApplierBarrier = new CyclicBarrier(2); + internalCluster().getInstance(ClusterService.class, nodeName).getClusterApplierService().onNewClusterState("test", () -> { + // Meet to signify application is blocked + safeAwait(stateApplierBarrier); + // Wait for the signal to unblock + safeAwait(stateApplierBarrier); + return null; + }, ActionListener.noop()); + cleanupTasks.add(stateApplierBarrier::reset); + + // Wait until state application is blocked + safeAwait(stateApplierBarrier); + return stateApplierBarrier; + } + + /** + * Configure a latch that will be released when the existing master knows of the new master's election + * + * @param newMaster The name of the newMaster node + * @param cleanupTasks The list of cleanup tasks + * @return A latch that will be released when the old master acknowledges the new master's election + */ + private CountDownLatch configureElectionLatch(String newMaster, List cleanupTasks) { + final String originalMasterName = internalCluster().getMasterName(); + logger.info("Original master was {}, new master will be {}", originalMasterName, newMaster); + final var previousMasterKnowsNewMasterIsElectedLatch = new CountDownLatch(1); + ClusterStateApplier newMasterMonitor = event -> { + DiscoveryNode masterNode = event.state().nodes().getMasterNode(); + if (masterNode != null && masterNode.getName().equals(newMaster)) { + previousMasterKnowsNewMasterIsElectedLatch.countDown(); + } + }; + ClusterService originalMasterClusterService = internalCluster().getInstance(ClusterService.class, originalMasterName); + originalMasterClusterService.addStateApplier(newMasterMonitor); + cleanupTasks.add(() -> originalMasterClusterService.removeApplier(newMasterMonitor)); + return previousMasterKnowsNewMasterIsElectedLatch; + } + + /** + * Add some master-only nodes and block until they've joined the cluster + *

    + * Ensure that we've got 5 voting nodes in the cluster, this means even if the original + * master accepts its own failed state update before standing down, we can still + * establish a quorum without its (or our own) join. + */ + private static String ensureSufficientMasterEligibleNodes() { + final var votingConfigSizeListener = ClusterServiceUtils.addTemporaryStateListener( + internalCluster().getAnyMasterNodeInstance(ClusterService.class), + cs -> 5 <= cs.coordinationMetadata().getLastCommittedConfiguration().getNodeIds().size() + ); + + try { + final var newNodeNames = internalCluster().startMasterOnlyNodes(Math.max(1, 5 - internalCluster().numMasterNodes())); + safeAwait(votingConfigSizeListener); + return newNodeNames.get(0); + } finally { + votingConfigSizeListener.onResponse(null); + } + } + + private static final ActionType TEST_ACTION_TYPE = new ActionType<>("internal:test"); + + public static final class TestActionPlugin extends Plugin implements ActionPlugin { + @Override + public Collection> getActions() { + return List.of(new ActionHandler<>(TEST_ACTION_TYPE, TestTransportAction.class)); + } + } + + public static final class TestRequest extends MasterNodeRequest { + TestRequest() { + super(TEST_REQUEST_TIMEOUT); + } + + TestRequest(StreamInput in) throws IOException { + super(in); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + } + + public static final class TestTransportAction extends TransportMasterNodeAction { + @Inject + public TestTransportAction( + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver + ) { + super( + TEST_ACTION_TYPE.name(), + transportService, + clusterService, + threadPool, + actionFilters, + TestRequest::new, + indexNameExpressionResolver, + in -> ActionResponse.Empty.INSTANCE, + threadPool.generic() + ); + } + + @Override + protected void masterOperation(Task task, TestRequest request, ClusterState state, ActionListener listener) { + listener.onResponse(ActionResponse.Empty.INSTANCE); + } + + @Override + protected ClusterBlockException checkBlock(TestRequest request, ClusterState state) { + return null; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 22094b7ec8bac..2004c6fda8ce5 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -207,6 +207,7 @@ static TransportVersion def(int id) { public static final TransportVersion K_FOR_KNN_QUERY_ADDED = def(8_698_00_0); public static final TransportVersion TEXT_SIMILARITY_RERANKER_RETRIEVER = def(8_699_00_0); public static final TransportVersion ML_INFERENCE_GOOGLE_VERTEX_AI_RERANKING_ADDED = def(8_700_00_0); + public static final TransportVersion VERSIONED_MASTER_NODE_REQUESTS = def(8_701_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java b/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java index 1b3dca31689e2..269ebd80fb36a 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.support.master; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -41,6 +42,13 @@ public abstract class MasterNodeRequest + * Note that in production this is only used for sending the request out, so there's no need to preserve other marker interfaces such + * as {@link org.elasticsearch.action.IndicesRequest} or {@link org.elasticsearch.action.IndicesRequest.Replaceable} on the wrapped request. + * The receiving node will deserialize a request without a wrapper, with the correct interfaces and the appropriate master term stored + * directly in {@link MasterNodeRequest#masterTerm()}. However in tests sometimes we want to intercept the request as it's being sent, for + * which it may be necessary to use the test utility {@code MasterNodeRequestHelper#unwrapTermOverride} to remove the wrapper and access the + * inner request. + */ +class TermOverridingMasterNodeRequest extends TransportRequest { + + private static final Logger logger = LogManager.getLogger(TermOverridingMasterNodeRequest.class); + + final MasterNodeRequest request; + final long newMasterTerm; + + TermOverridingMasterNodeRequest(MasterNodeRequest request, long newMasterTerm) { + assert request.masterTerm() <= newMasterTerm; + this.request = request; + this.newMasterTerm = newMasterTerm; + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return unsupported(); + } + + @Override + public String getDescription() { + return request.getDescription(); + } + + @Override + public void remoteAddress(InetSocketAddress remoteAddress) { + unsupported(); + } + + @Override + public InetSocketAddress remoteAddress() { + return unsupported(); + } + + @Override + public void incRef() { + request.incRef(); + } + + @Override + public boolean tryIncRef() { + return request.tryIncRef(); + } + + @Override + public boolean decRef() { + return request.decRef(); + } + + @Override + public boolean hasReferences() { + return request.hasReferences(); + } + + @Override + public void setParentTask(String parentTaskNode, long parentTaskId) { + unsupported(); + } + + @Override + public void setParentTask(TaskId taskId) { + unsupported(); + } + + @Override + public TaskId getParentTask() { + return request.getParentTask(); + } + + @Override + public void setRequestId(long requestId) { + request.setRequestId(requestId); + } + + @Override + public long getRequestId() { + return request.getRequestId(); + } + + @Override + public final void writeTo(StreamOutput out) throws IOException { + request.writeTo(new TermOverridingStreamOutput(out, newMasterTerm)); + } + + @Override + public String toString() { + return Strings.format("TermOverridingMasterNodeRequest[newMasterTerm=%d in %s]", newMasterTerm, request); + } + + private static T unsupported() { + final var exception = new UnsupportedOperationException("TermOverridingMasterNodeRequest is only for outbound requests"); + logger.error("TermOverridingMasterNodeRequest is only for outbound requests", exception); + assert false : exception; + throw exception; + } +} diff --git a/server/src/main/java/org/elasticsearch/action/support/master/TermOverridingStreamOutput.java b/server/src/main/java/org/elasticsearch/action/support/master/TermOverridingStreamOutput.java new file mode 100644 index 0000000000000..45cf52bfbe208 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/support/master/TermOverridingStreamOutput.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.support.master; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; + +/** + * Wrapper around a {@link StreamOutput} for use when sending a {@link MasterNodeRequest} to another node, exposing the {@link + * MasterNodeRequest#masterTerm()} to send out over the wire. + */ +class TermOverridingStreamOutput extends StreamOutput { + + private final StreamOutput delegate; + final long masterTerm; + + TermOverridingStreamOutput(StreamOutput delegate, long masterTerm) { + this.delegate = delegate; + this.masterTerm = masterTerm; + } + + @Override + public void writeByte(byte b) throws IOException { + delegate.writeByte(b); + } + + @Override + public void writeBytes(byte[] b, int offset, int length) throws IOException { + delegate.writeBytes(b, offset, length); + } + + @Override + public void flush() throws IOException { + delegate.flush(); + } + + @Override + public void close() throws IOException { + delegate.close(); + } + + @Override + public TransportVersion getTransportVersion() { + return delegate.getTransportVersion(); + } + + @Override + public void setTransportVersion(TransportVersion version) { + assert false : version; + delegate.setTransportVersion(version); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java b/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java index e88ebbdc07688..0cbbdb0792890 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java @@ -240,13 +240,20 @@ protected void doStart(ClusterState clusterState) { if (nodes.getMasterNode() == null) { logger.debug("no known master node, scheduling a retry"); retryOnNextState(currentStateVersion, null); + } else if (clusterState.term() < request.masterTerm()) { + logger.debug( + "request routed to master in term [{}] but local term is [{}], waiting for local term bump", + request.masterTerm(), + clusterState.term() + ); + retry(currentStateVersion, null, cs -> request.masterTerm() <= cs.term()); } else { DiscoveryNode masterNode = nodes.getMasterNode(); logger.trace("forwarding request [{}] to master [{}]", actionName, masterNode); transportService.sendRequest( masterNode, actionName, - request, + new TermOverridingMasterNodeRequest(request, clusterState.term()), new ActionListenerResponseHandler<>(listener, responseReader, executor) { @Override public void handleException(final TransportException exp) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java index 859ee68a7846d..6f2af8414187e 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java @@ -17,6 +17,7 @@ import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.MasterNodeRequestHelper; import org.elasticsearch.action.support.nodes.BaseNodeResponse; import org.elasticsearch.action.support.nodes.BaseNodesRequest; import org.elasticsearch.action.support.nodes.BaseNodesResponse; @@ -470,15 +471,16 @@ private boolean shouldHaveOrigin(String action, TransportRequest request) { */ return false; } - if (false == (request instanceof IndicesRequest)) { + + if (MasterNodeRequestHelper.unwrapTermOverride(request) instanceof IndicesRequest indicesRequest) { + /* + * When the API Tasks API makes an indices request it only every + * targets the .tasks index. Other requests come from the tests. + */ + return Arrays.equals(new String[] { ".tasks" }, indicesRequest.indices()); + } else { return false; } - IndicesRequest ir = (IndicesRequest) request; - /* - * When the API Tasks API makes an indices request it only every - * targets the .tasks index. Other requests come from the tests. - */ - return Arrays.equals(new String[] { ".tasks" }, ir.indices()); } } } diff --git a/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java b/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java index a4838f568e173..6568464705d9e 100644 --- a/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java @@ -524,7 +524,7 @@ public void testDelegateToMaster() throws ExecutionException, InterruptedExcepti assertThat(transport.capturedRequests().length, equalTo(1)); CapturingTransport.CapturedRequest capturedRequest = transport.capturedRequests()[0]; assertTrue(capturedRequest.node().isMasterNode()); - assertThat(capturedRequest.request(), equalTo(request)); + assertThat(asInstanceOf(TermOverridingMasterNodeRequest.class, capturedRequest.request()).request, equalTo(request)); assertThat(capturedRequest.action(), equalTo("internal:testAction")); Response response = new Response(); @@ -552,7 +552,7 @@ public void testDelegateToFailingMaster() throws ExecutionException, Interrupted assertThat(capturedRequests.length, equalTo(1)); CapturingTransport.CapturedRequest capturedRequest = capturedRequests[0]; assertTrue(capturedRequest.node().isMasterNode()); - assertThat(capturedRequest.request(), equalTo(request)); + assertThat(asInstanceOf(TermOverridingMasterNodeRequest.class, capturedRequest.request()).request, equalTo(request)); assertThat(capturedRequest.action(), equalTo("internal:testAction")); if (rejoinSameMaster) { @@ -586,7 +586,7 @@ public void testDelegateToFailingMaster() throws ExecutionException, Interrupted assertThat(capturedRequests.length, equalTo(1)); capturedRequest = capturedRequests[0]; assertTrue(capturedRequest.node().isMasterNode()); - assertThat(capturedRequest.request(), equalTo(request)); + assertThat(asInstanceOf(TermOverridingMasterNodeRequest.class, capturedRequest.request()).request, equalTo(request)); assertThat(capturedRequest.action(), equalTo("internal:testAction")); } else if (failsWithConnectTransportException) { transport.handleRemoteError(capturedRequest.requestId(), new ConnectTransportException(masterNode, "Fake error")); @@ -639,7 +639,7 @@ protected void masterOperation(Task task, Request request, ClusterState state, A assertThat(transport.capturedRequests().length, equalTo(1)); CapturingTransport.CapturedRequest capturedRequest = transport.capturedRequests()[0]; assertTrue(capturedRequest.node().isMasterNode()); - assertThat(capturedRequest.request(), equalTo(request)); + assertThat(asInstanceOf(TermOverridingMasterNodeRequest.class, capturedRequest.request()).request, equalTo(request)); assertThat(capturedRequest.action(), equalTo("internal:testAction")); transport.handleResponse(capturedRequest.requestId(), response); diff --git a/test/framework/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequestHelper.java b/test/framework/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequestHelper.java new file mode 100644 index 0000000000000..217b7addeb2de --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequestHelper.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.support.master; + +import org.elasticsearch.transport.TransportRequest; + +public class MasterNodeRequestHelper { + public static TransportRequest unwrapTermOverride(TransportRequest transportRequest) { + return transportRequest instanceof TermOverridingMasterNodeRequest termOverridingMasterNodeRequest + ? termOverridingMasterNodeRequest.request + : transportRequest; + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index 51893e551ba88..da478cbf1cb26 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -12,6 +12,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.master.MasterNodeRequestHelper; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; @@ -574,7 +575,8 @@ public void sendRequest( RequestHandlerRegistry reg = MockTransportService.this.getRequestHandler(action); clonedRequest = reg.newRequest(bStream.bytes().streamInput()); } - assert clonedRequest.getClass().equals(request.getClass()) : clonedRequest + " vs " + request; + assert clonedRequest.getClass().equals(MasterNodeRequestHelper.unwrapTermOverride(request).getClass()) + : clonedRequest + " vs " + request; final RunOnce runnable = new RunOnce(new AbstractRunnable() { @Override From 7ed3e049cc56287613560167c37cf755edd8f135 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 3 Jul 2024 05:24:30 +0100 Subject: [PATCH 138/216] AwaitsFix for #110406 --- .../metrics/HistogramPercentileAggregationTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistogramPercentileAggregationTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistogramPercentileAggregationTests.java index 7c6f85104b5f8..f60466bcf43cc 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistogramPercentileAggregationTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistogramPercentileAggregationTests.java @@ -241,6 +241,7 @@ public void testTDigestHistogram() throws Exception { ); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/110406") public void testBoxplotHistogram() throws Exception { int compression = TestUtil.nextInt(random(), 200, 300); setupTDigestHistogram(compression); From f8efd9dbec41603fe4b81693cf696968d86bccd9 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 3 Jul 2024 06:26:33 +0100 Subject: [PATCH 139/216] Fix `testBatchCloseIndices` (#110405) It's not enough to wait for the tasks to appear in the pending tasks queue, we must also then wait for the submitting threads to become idle to ensure that the queue size is correct and therefore that the batch is properly formed. Closes #109187 --- .../admin/cluster/tasks/ListTasksIT.java | 14 +-------- ...etadataIndexStateServiceBatchingTests.java | 11 ++++--- .../org/elasticsearch/test/ESTestCase.java | 29 +++++++++++++++++++ 3 files changed, 37 insertions(+), 17 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/tasks/ListTasksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/tasks/ListTasksIT.java index 4a076cb3b6e66..60462863dd09a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/tasks/ListTasksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/tasks/ListTasksIT.java @@ -29,10 +29,8 @@ import java.util.Collection; import java.util.List; -import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; @@ -102,7 +100,7 @@ public void testWaitForCompletion() throws Exception { })); // briefly fill up the management pool so that (a) we know the wait has started and (b) we know it's not blocking - flushThreadPool(threadPool, ThreadPool.Names.MANAGEMENT); + flushThreadPoolExecutor(threadPool, ThreadPool.Names.MANAGEMENT); final var getWaitFuture = new PlainActionFuture(); clusterAdmin().prepareGetTask(task.taskId()).setWaitForCompletion(true).execute(getWaitFuture.delegateFailure((l, getResult) -> { @@ -127,16 +125,6 @@ public void testWaitForCompletion() throws Exception { getWaitFuture.get(10, TimeUnit.SECONDS); } - private void flushThreadPool(ThreadPool threadPool, String executor) throws InterruptedException, BrokenBarrierException, - TimeoutException { - var maxThreads = threadPool.info(executor).getMax(); - var barrier = new CyclicBarrier(maxThreads + 1); - for (int i = 0; i < maxThreads; i++) { - threadPool.executor(executor).execute(() -> safeAwait(barrier)); - } - barrier.await(10, TimeUnit.SECONDS); - } - @Override protected Collection> getPlugins() { return List.of(TestPlugin.class); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexStateServiceBatchingTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexStateServiceBatchingTests.java index a093178c04814..adec7b94df6a1 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexStateServiceBatchingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexStateServiceBatchingTests.java @@ -19,13 +19,13 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.core.CheckedRunnable; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.concurrent.CyclicBarrier; -import java.util.concurrent.TimeUnit; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_BLOCKS_WRITE_SETTING; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -126,6 +126,9 @@ public void testBatchCloseIndices() throws Exception { // wait for the queue to have the second close tasks (the close-indices tasks) assertBusy(() -> assertThat(findPendingTasks(masterService, "close-indices"), hasSize(2))); + // wait for all ongoing tasks to complete on GENERIC to ensure that the batch is fully-formed (see #109187) + flushThreadPoolExecutor(getInstanceFromNode(ThreadPool.class), ThreadPool.Names.GENERIC); + block2.run(); // release block // assert that the requests were acknowledged @@ -208,14 +211,14 @@ public void testBatchBlockIndices() throws Exception { private static CheckedRunnable blockMasterService(MasterService masterService) { final var executionBarrier = new CyclicBarrier(2); masterService.createTaskQueue("block", Priority.URGENT, batchExecutionContext -> { - executionBarrier.await(10, TimeUnit.SECONDS); // notify test thread that the master service is blocked - executionBarrier.await(10, TimeUnit.SECONDS); // wait for test thread to release us + safeAwait(executionBarrier); // notify test thread that the master service is blocked + safeAwait(executionBarrier); // wait for test thread to release us for (final var taskContext : batchExecutionContext.taskContexts()) { taskContext.success(() -> {}); } return batchExecutionContext.initialState(); }).submitTask("block", new ExpectSuccessTask(), null); - return () -> executionBarrier.await(10, TimeUnit.SECONDS); + return () -> safeAwait(executionBarrier); } private static ClusterStateListener closedIndexCountListener(int closedIndices) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index ca6be72fd585b..4bb5fbd5e7031 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -75,6 +75,7 @@ import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; @@ -2290,6 +2291,34 @@ public static void safeSleep(long millis) { } } + /** + * Wait for all tasks currently running or enqueued on the given executor to complete. + */ + public static void flushThreadPoolExecutor(ThreadPool threadPool, String executorName) { + final var maxThreads = threadPool.info(executorName).getMax(); + final var barrier = new CyclicBarrier(maxThreads + 1); + final var executor = threadPool.executor(executorName); + for (int i = 0; i < maxThreads; i++) { + executor.execute(new AbstractRunnable() { + @Override + protected void doRun() { + safeAwait(barrier); + } + + @Override + public void onFailure(Exception e) { + fail(e, "unexpected"); + } + + @Override + public boolean isForceExecution() { + return true; + } + }); + } + safeAwait(barrier); + } + protected static boolean isTurkishLocale() { return Locale.getDefault().getLanguage().equals(new Locale("tr").getLanguage()) || Locale.getDefault().getLanguage().equals(new Locale("az").getLanguage()); From a2c00c417a167b832e45b59f758da2a02cb111a4 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 3 Jul 2024 07:56:52 +0100 Subject: [PATCH 140/216] AwaitsFix: https://github.com/elastic/elasticsearch/issues/110408 --- muted-tests.yml | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 376ab0164c314..e58a553f6fa8d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -4,7 +4,8 @@ tests: method: "testGuessIsDayFirstFromLocale" - class: "org.elasticsearch.test.rest.ClientYamlTestSuiteIT" issue: "https://github.com/elastic/elasticsearch/issues/108857" - method: "test {yaml=search/180_locale_dependent_mapping/Test Index and Search locale dependent mappings / dates}" + method: "test {yaml=search/180_locale_dependent_mapping/Test Index and Search locale\ + \ dependent mappings / dates}" - class: "org.elasticsearch.upgrades.SearchStatesIT" issue: "https://github.com/elastic/elasticsearch/issues/108991" method: "testCanMatch" @@ -13,7 +14,8 @@ tests: method: "testTrainedModelInference" - class: "org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT" issue: "https://github.com/elastic/elasticsearch/issues/109188" - method: "test {yaml=search/180_locale_dependent_mapping/Test Index and Search locale dependent mappings / dates}" + method: "test {yaml=search/180_locale_dependent_mapping/Test Index and Search locale\ + \ dependent mappings / dates}" - class: "org.elasticsearch.xpack.esql.qa.mixed.EsqlClientYamlIT" issue: "https://github.com/elastic/elasticsearch/issues/109189" method: "test {p0=esql/70_locale/Date format with Italian locale}" @@ -28,7 +30,8 @@ tests: method: "testTimestampFieldTypeExposedByAllIndicesServices" - class: "org.elasticsearch.analysis.common.CommonAnalysisClientYamlTestSuiteIT" issue: "https://github.com/elastic/elasticsearch/issues/109318" - method: "test {yaml=analysis-common/50_char_filters/pattern_replace error handling (too complex pattern)}" + method: "test {yaml=analysis-common/50_char_filters/pattern_replace error handling\ + \ (too complex pattern)}" - class: "org.elasticsearch.xpack.ml.integration.ClassificationHousePricingIT" issue: "https://github.com/elastic/elasticsearch/issues/101598" method: "testFeatureImportanceValues" @@ -80,7 +83,8 @@ tests: method: testLoadAll issue: https://github.com/elastic/elasticsearch/issues/110244 - class: org.elasticsearch.painless.LangPainlessClientYamlTestSuiteIT - method: test {yaml=painless/146_dense_vector_bit_basic/Cosine Similarity is not supported} + method: test {yaml=painless/146_dense_vector_bit_basic/Cosine Similarity is not + supported} issue: https://github.com/elastic/elasticsearch/issues/110290 - class: org.elasticsearch.painless.LangPainlessClientYamlTestSuiteIT method: test {yaml=painless/146_dense_vector_bit_basic/Dot Product is not supported} @@ -109,6 +113,9 @@ tests: - class: org.elasticsearch.search.aggregations.bucket.terms.RareTermsIT method: testSingleValuedString issue: https://github.com/elastic/elasticsearch/issues/110388 +- class: "org.elasticsearch.xpack.searchablesnapshots.FrozenSearchableSnapshotsIntegTests" + issue: "https://github.com/elastic/elasticsearch/issues/110408" + method: "testCreateAndRestorePartialSearchableSnapshot" # Examples: # From afbd568e8863e7fcc3023b8f5ef230c34048ce03 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Wed, 3 Jul 2024 09:19:20 +0200 Subject: [PATCH 141/216] ES|QL: add tests for NaN on BUCKET function (#110380) Closes #105166 Adding tests that verify that `BUCKET` (previously `AUTO_BUCKET`) function does not return `NaN` when an invalid number of buckets is provided (eg. 0, -1 or a very large integer) --- .../src/main/resources/bucket.csv-spec | 120 ++++++++++++++++++ 1 file changed, 120 insertions(+) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec index f41bf3f020eb5..7e2afb9267e5b 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec @@ -570,3 +570,123 @@ ROW long = TO_LONG(100), double = 99., int = 100 b1:double| b2:double| b3:double 99.0 |0.0 |99.0 ; + + +zeroBucketsRow#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +ROW a = 1 +| STATS max = max(a) BY b = BUCKET(a, 0, 0, 0) +; +warningRegex:evaluation of \[BUCKET\(a, 0, 0, 0\)\] failed, treating result as null. Only first 20 failures recorded +warningRegex:java.lang.ArithmeticException: / by zero + +max:integer | b:double +1 | null +; + + +zeroBuckets#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +FROM employees +| STATS max = max(salary) BY b = BUCKET(salary, 0, 0, 0) +; +warningRegex:evaluation of \[BUCKET\(salary, 0, 0, 0\)\] failed, treating result as null. Only first 20 failures recorded +warningRegex:java.lang.ArithmeticException: / by zero + +max:integer | b:double +74999 | null +; + + +zeroBucketsDouble#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +FROM employees +| STATS max = max(salary) BY b = BUCKET(salary, 0.) +; +warningRegex:evaluation of \[BUCKET\(salary, 0.\)\] failed, treating result as null. Only first 20 failures recorded +warningRegex:java.lang.ArithmeticException: / by zero + +max:integer | b:double +74999 | null +; + +minusOneBucketsRow#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +ROW a = 1 +| STATS max = max(a) BY b = BUCKET(a, -1, 0, 0) +; +warningRegex:evaluation of \[BUCKET\(a, -1, 0, 0\)\] failed, treating result as null. Only first 20 failures recorded +warningRegex:java.lang.ArithmeticException: / by zero + +max:integer | b:double +1 | null +; + + +minusOneBuckets#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +FROM employees +| STATS max = max(salary) BY b = BUCKET(salary, -1, 0, 0) +; +warningRegex:evaluation of \[BUCKET\(salary, -1, 0, 0\)\] failed, treating result as null. Only first 20 failures recorded +warningRegex:java.lang.ArithmeticException: / by zero + +max:integer | b:double +74999 | null +; + + +tooManyBucketsRow#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +ROW a = 1 +| STATS max = max(a) BY b = BUCKET(a, 100000000000, 0, 0) +; +warningRegex:evaluation of \[BUCKET\(a, 100000000000, 0, 0\)\] failed, treating result as null. Only first 20 failures recorded +warningRegex:java.lang.ArithmeticException: / by zero + +max:integer | b:double +1 | null +; + + +tooManyBuckets#[skip:-8.13.99, reason:BUCKET renamed in 8.14] +FROM employees +| STATS max = max(salary) BY b = BUCKET(salary, 100000000000, 0, 0) +; +warningRegex:evaluation of \[BUCKET\(salary, 100000000000, 0, 0\)\] failed, treating result as null. Only first 20 failures recorded +warningRegex:java.lang.ArithmeticException: / by zero + +max:integer | b:double +74999 | null +; + + +foldableBuckets +required_capability: casting_operator +FROM employees +| WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" +| EVAL c = concat("2", "0")::int +| STATS hires_per_month = COUNT(*) BY month = BUCKET(hire_date, c, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| SORT month +; + + hires_per_month:long | month:date +2 |1985-02-01T00:00:00.000Z +1 |1985-05-01T00:00:00.000Z +1 |1985-07-01T00:00:00.000Z +1 |1985-09-01T00:00:00.000Z +2 |1985-10-01T00:00:00.000Z +4 |1985-11-01T00:00:00.000Z +; + + +foldableBucketsInline +required_capability: casting_operator +FROM employees +| WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" +| STATS hires_per_month = COUNT(*) BY month = BUCKET(hire_date, concat("2", "0")::int, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| SORT month +; + + hires_per_month:long | month:date +2 |1985-02-01T00:00:00.000Z +1 |1985-05-01T00:00:00.000Z +1 |1985-07-01T00:00:00.000Z +1 |1985-09-01T00:00:00.000Z +2 |1985-10-01T00:00:00.000Z +4 |1985-11-01T00:00:00.000Z +; From aa611adbdf85f5367222bb2cc6b6da62336bdc02 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Wed, 3 Jul 2024 17:35:33 +1000 Subject: [PATCH 142/216] Minor javadoc fix for BalancedShardsAllocator (#110117) Relates: #109662 --- .../routing/allocation/allocator/BalancedShardsAllocator.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index 193a1558c857a..411143b1aef9d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -68,7 +68,7 @@ *

  • even shard count across nodes (weighted by cluster.routing.allocation.balance.shard)
  • *
  • spread shards of the same index across different nodes (weighted by cluster.routing.allocation.balance.index)
  • *
  • even write load of the data streams write indices across nodes (weighted by cluster.routing.allocation.balance.write_load)
  • - *
  • even disk usage across nodes (weighted by cluster.routing.allocation.balance.write_load)
  • + *
  • even disk usage across nodes (weighted by cluster.routing.allocation.balance.disk_usage)
  • * * The sensitivity of the algorithm is defined by cluster.routing.allocation.balance.threshold. * Allocator takes into account constraints set by {@code AllocationDeciders} when allocating and balancing shards. From 30b32b6a465118192c0a93a3a1c52ec4b10a12bd Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Wed, 3 Jul 2024 10:18:40 +0200 Subject: [PATCH 143/216] semantic_text: Updated copy-to docs (#110350) --- .../mapping/types/semantic-text.asciidoc | 44 ++++++++++++++++--- 1 file changed, 39 insertions(+), 5 deletions(-) diff --git a/docs/reference/mapping/types/semantic-text.asciidoc b/docs/reference/mapping/types/semantic-text.asciidoc index bbb501c4ccc36..6ee30e6b9f831 100644 --- a/docs/reference/mapping/types/semantic-text.asciidoc +++ b/docs/reference/mapping/types/semantic-text.asciidoc @@ -8,7 +8,7 @@ beta[] The `semantic_text` field type automatically generates embeddings for text -content using an inference endpoint. +content using an inference endpoint. The `semantic_text` field type specifies an inference endpoint identifier that will be used to generate embeddings. You can create the inference endpoint by using the <>. @@ -24,7 +24,7 @@ PUT my-index-000001 { "mappings": { "properties": { - "inference_field": { + "inference_field": { "type": "semantic_text", "inference_id": "my-elser-endpoint" } @@ -40,7 +40,7 @@ PUT my-index-000001 ==== Parameters for `semantic_text` fields `inference_id`:: -(Required, string) +(Required, string) Inference endpoint that will be used to generate the embeddings for the field. Use the <> to create the endpoint. @@ -137,8 +137,42 @@ field to collect the values of other fields for semantic search. Each value has its embeddings calculated separately; each field value is a separate set of chunk(s) in the resulting embeddings. -This imposes a restriction on bulk updates to documents with `semantic_text`. -In bulk requests, all fields that are copied to a `semantic_text` field must have a value to ensure every embedding is calculated correctly. +This imposes a restriction on bulk requests and ingestion pipelines that update documents with `semantic_text` fields. +In these cases, all fields that are copied to a `semantic_text` field, including the `semantic_text` field value, must have a value to ensure every embedding is calculated correctly. + +For example, the following mapping: + +[source,console] +------------------------------------------------------------ +PUT test-index +{ + "mappings": { + "properties": { + "infer_field": { + "type": "semantic_text", + "inference_id": "my-elser-endpoint" + }, + "source_field": { + "type": "text", + "copy_to": "infer_field" + } + } + } +} +------------------------------------------------------------ +// TEST[skip:TBD] + +Will need the following bulk update request to ensure that `infer_field` is updated correctly: + +[source,console] +------------------------------------------------------------ +PUT test-index/_bulk +{"update": {"_id": "1"}} +{"doc": {"infer_field": "updated inference field", "source_field": "updated source field"}} +------------------------------------------------------------ +// TEST[skip:TBD] + +Notice that both the `semantic_text` field and the source field are updated in the bulk request. [discrete] [[limitations]] From e78bdc953a8e5ab63163c5e115a19dfc45713b44 Mon Sep 17 00:00:00 2001 From: Sylvain Wallez Date: Wed, 3 Jul 2024 10:29:57 +0200 Subject: [PATCH 144/216] ESQL: add Arrow dataframes output format (#109873) Initial support for Apache Arrow's streaming format as a response for ES|QL. It triggers based on the Accept header or the format request parameter. Arrow has implementations in every mainstream language and is a backend of the Python Pandas library, which is extremely popular among data scientists and data analysts. Arrow's streaming format has also become the de facto standard for dataframe interchange. It is an efficient binary format that allows zero-cost deserialization by adding data access wrappers on top of memory buffers received from the network. This PR builds on the experiment made by @nik9000 in PR #104877 Features/limitations: - all ES|QL data types are supported - multi-valued fields are not supported - fields of type _source are output as JSON text in a varchar array. In a future iteration we may want to offer the choice of the more efficient CBOR and SMILE formats. Technical details: Arrow comes with its own memory management to handle vectors with direct memory, reference counting, etc. We don't want to use this as it conflicts with Elasticsearch's own memory management. We therefore use the Arrow library only for the metadata objects describing the dataframe schema and the structure of the streaming format. The Arrow vector data is produced directly from ES|QL blocks. --------- Co-authored-by: Nik Everett --- docs/changelog/109873.yaml | 5 + docs/reference/esql/esql-rest.asciidoc | 3 + gradle/verification-metadata.xml | 30 + .../io/stream/RecyclerBytesStreamOutput.java | 24 + .../common/io/stream/StreamOutput.java | 22 + x-pack/plugin/esql/arrow/build.gradle | 61 + .../esql/arrow/licenses/arrow-LICENSE.txt | 2261 +++++++++++++++++ .../esql/arrow/licenses/arrow-NOTICE.txt | 84 + .../arrow/licenses/checker-qual-LICENSE.txt | 22 + .../arrow/licenses/checker-qual-NOTICE.txt | 0 .../licenses/flatbuffers-java-LICENSE.txt | 202 ++ .../licenses/flatbuffers-java-NOTICE.txt | 0 .../esql/arrow/licenses/jackson-LICENSE.txt | 202 ++ .../esql/arrow/licenses/jackson-NOTICE.txt | 0 .../esql/arrow/licenses/slf4j-LICENSE.txt | 21 + .../esql/arrow/licenses/slf4j-NOTICE.txt | 0 .../esql/arrow/AllocationManagerShim.java | 69 + .../xpack/esql/arrow/ArrowFormat.java | 35 + .../xpack/esql/arrow/ArrowResponse.java | 379 +++ .../xpack/esql/arrow/BlockConverter.java | 452 ++++ .../xpack/esql/arrow/ValueConversions.java | 80 + .../xpack/esql/arrow/ArrowResponseTests.java | 600 +++++ .../esql/arrow/ValueConversionsTests.java | 84 + .../src/test/resources/plugin-security.policy | 13 + x-pack/plugin/esql/build.gradle | 2 + .../esql/qa/server/single-node/build.gradle | 14 + .../esql/qa/single_node/ArrowFormatIT.java | 242 ++ .../esql/action/EsqlResponseListener.java | 11 + .../esql/plugin/EsqlMediaTypeParser.java | 3 +- .../plugin-metadata/plugin-security.policy | 12 + 30 files changed, 4932 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/109873.yaml create mode 100644 x-pack/plugin/esql/arrow/build.gradle create mode 100644 x-pack/plugin/esql/arrow/licenses/arrow-LICENSE.txt create mode 100644 x-pack/plugin/esql/arrow/licenses/arrow-NOTICE.txt create mode 100644 x-pack/plugin/esql/arrow/licenses/checker-qual-LICENSE.txt create mode 100644 x-pack/plugin/esql/arrow/licenses/checker-qual-NOTICE.txt create mode 100644 x-pack/plugin/esql/arrow/licenses/flatbuffers-java-LICENSE.txt create mode 100644 x-pack/plugin/esql/arrow/licenses/flatbuffers-java-NOTICE.txt create mode 100644 x-pack/plugin/esql/arrow/licenses/jackson-LICENSE.txt create mode 100644 x-pack/plugin/esql/arrow/licenses/jackson-NOTICE.txt create mode 100644 x-pack/plugin/esql/arrow/licenses/slf4j-LICENSE.txt create mode 100644 x-pack/plugin/esql/arrow/licenses/slf4j-NOTICE.txt create mode 100644 x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/AllocationManagerShim.java create mode 100644 x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/ArrowFormat.java create mode 100644 x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/ArrowResponse.java create mode 100644 x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/BlockConverter.java create mode 100644 x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/ValueConversions.java create mode 100644 x-pack/plugin/esql/arrow/src/test/java/org/elasticsearch/xpack/esql/arrow/ArrowResponseTests.java create mode 100644 x-pack/plugin/esql/arrow/src/test/java/org/elasticsearch/xpack/esql/arrow/ValueConversionsTests.java create mode 100644 x-pack/plugin/esql/arrow/src/test/resources/plugin-security.policy create mode 100644 x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/ArrowFormatIT.java diff --git a/docs/changelog/109873.yaml b/docs/changelog/109873.yaml new file mode 100644 index 0000000000000..c77197cc22d0a --- /dev/null +++ b/docs/changelog/109873.yaml @@ -0,0 +1,5 @@ +pr: 109873 +summary: "ESQL: add Arrow dataframes output format" +area: ES|QL +type: feature +issues: [] diff --git a/docs/reference/esql/esql-rest.asciidoc b/docs/reference/esql/esql-rest.asciidoc index de2b6dedd8776..5b90e96d7a734 100644 --- a/docs/reference/esql/esql-rest.asciidoc +++ b/docs/reference/esql/esql-rest.asciidoc @@ -111,6 +111,9 @@ s|Description |{wikipedia}/Smile_(data_interchange_format)[Smile] binary data format similar to CBOR +|arrow +|application/vnd.apache.arrow.stream +|**Experimental.** https://arrow.apache.org/[Apache Arrow] dataframes, https://arrow.apache.org/docs/format/Columnar.html#ipc-streaming-format[IPC streaming format] |=== The `csv` format accepts a formatting URL query attribute, `delimiter`, which diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index d8df128668b45..cd408ba75aa10 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -581,6 +581,11 @@ + + + + + @@ -1841,6 +1846,26 @@ + + + + + + + + + + + + + + + + + + + + @@ -3177,6 +3202,11 @@ + + + + + diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/RecyclerBytesStreamOutput.java b/server/src/main/java/org/elasticsearch/common/io/stream/RecyclerBytesStreamOutput.java index 7be964fc1be39..c4857a8b85ea3 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/RecyclerBytesStreamOutput.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/RecyclerBytesStreamOutput.java @@ -32,7 +32,9 @@ public class RecyclerBytesStreamOutput extends BytesStream implements Releasable { static final VarHandle VH_BE_INT = MethodHandles.byteArrayViewVarHandle(int[].class, ByteOrder.BIG_ENDIAN); + static final VarHandle VH_LE_INT = MethodHandles.byteArrayViewVarHandle(int[].class, ByteOrder.LITTLE_ENDIAN); static final VarHandle VH_BE_LONG = MethodHandles.byteArrayViewVarHandle(long[].class, ByteOrder.BIG_ENDIAN); + static final VarHandle VH_LE_LONG = MethodHandles.byteArrayViewVarHandle(long[].class, ByteOrder.LITTLE_ENDIAN); private final ArrayList> pages = new ArrayList<>(); private final Recycler recycler; @@ -106,6 +108,17 @@ public void writeInt(int i) throws IOException { } } + @Override + public void writeIntLE(int i) throws IOException { + if (4 > (pageSize - currentPageOffset)) { + super.writeIntLE(i); + } else { + BytesRef currentPage = pages.get(pageIndex).v(); + VH_LE_INT.set(currentPage.bytes, currentPage.offset + currentPageOffset, i); + currentPageOffset += 4; + } + } + @Override public void writeLong(long i) throws IOException { if (8 > (pageSize - currentPageOffset)) { @@ -117,6 +130,17 @@ public void writeLong(long i) throws IOException { } } + @Override + public void writeLongLE(long i) throws IOException { + if (8 > (pageSize - currentPageOffset)) { + super.writeLongLE(i); + } else { + BytesRef currentPage = pages.get(pageIndex).v(); + VH_LE_LONG.set(currentPage.bytes, currentPage.offset + currentPageOffset, i); + currentPageOffset += 8; + } + } + @Override public void writeWithSizePrefix(Writeable writeable) throws IOException { // TODO: do this without copying the bytes from tmp by calling writeBytes and just use the pages in tmp directly through diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java index 833e7f27852c8..c245498333c94 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java @@ -190,6 +190,15 @@ public void writeInt(int i) throws IOException { writeBytes(buffer, 0, 4); } + /** + * Writes an int as four bytes, least significant bytes first. + */ + public void writeIntLE(int i) throws IOException { + final byte[] buffer = scratch.get(); + ByteUtils.writeIntLE(i, buffer, 0); + writeBytes(buffer, 0, 4); + } + /** * Writes an int in a variable-length format. Writes between one and * five bytes. Smaller values take fewer bytes. Negative numbers @@ -243,6 +252,15 @@ public void writeLong(long i) throws IOException { writeBytes(buffer, 0, 8); } + /** + * Writes a long as eight bytes. + */ + public void writeLongLE(long i) throws IOException { + final byte[] buffer = scratch.get(); + ByteUtils.writeLongLE(i, buffer, 0); + writeBytes(buffer, 0, 8); + } + /** * Writes a non-negative long in a variable-length format. Writes between one and ten bytes. Smaller values take fewer bytes. Negative * numbers use ten bytes and trip assertions (if running in tests) so prefer {@link #writeLong(long)} or {@link #writeZLong(long)} for @@ -442,6 +460,10 @@ public void writeDouble(double v) throws IOException { writeLong(Double.doubleToLongBits(v)); } + public void writeDoubleLE(double v) throws IOException { + writeLongLE(Double.doubleToLongBits(v)); + } + public void writeOptionalDouble(@Nullable Double v) throws IOException { if (v == null) { writeBoolean(false); diff --git a/x-pack/plugin/esql/arrow/build.gradle b/x-pack/plugin/esql/arrow/build.gradle new file mode 100644 index 0000000000000..e8ae4b049cf7d --- /dev/null +++ b/x-pack/plugin/esql/arrow/build.gradle @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +apply plugin: 'elasticsearch.build' + +dependencies { + compileOnly project(':server') + compileOnly project(':x-pack:plugin:esql:compute') + compileOnly project(':x-pack:plugin:esql-core') + compileOnly project(':x-pack:plugin:mapper-version') + implementation('org.apache.arrow:arrow-vector:16.1.0') + implementation('org.apache.arrow:arrow-format:16.1.0') + implementation('org.apache.arrow:arrow-memory-core:16.1.0') + implementation('org.checkerframework:checker-qual:3.42.0') + implementation('com.google.flatbuffers:flatbuffers-java:23.5.26') + // Needed for the json arrow serialization, and loaded even if we don't use it. + implementation("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}") + implementation("com.fasterxml.jackson.core:jackson-core:${versions.jackson}") + implementation("com.fasterxml.jackson.core:jackson-databind:${versions.jackson}") + implementation("org.slf4j:slf4j-api:${versions.slf4j}") + runtimeOnly "org.slf4j:slf4j-nop:${versions.slf4j}" + + testImplementation project(':test:framework') + testImplementation('org.apache.arrow:arrow-memory-unsafe:16.1.0') +} + +tasks.named("dependencyLicenses").configure { + mapping from: /jackson-.*/, to: 'jackson' + mapping from: /arrow-.*/, to: 'arrow' + mapping from: /slf4j-.*/, to: 'slf4j' +} + +tasks.named("thirdPartyAudit").configure { + ignoreViolations( + // uses sun.misc.Unsafe. Only used in tests. + 'org.apache.arrow.memory.util.hash.SimpleHasher', + 'org.apache.arrow.memory.util.hash.MurmurHasher', + 'org.apache.arrow.memory.util.MemoryUtil', + 'org.apache.arrow.memory.util.MemoryUtil$1', + 'org.apache.arrow.vector.DecimalVector', + 'org.apache.arrow.vector.BaseFixedWidthVector', + 'org.apache.arrow.vector.util.DecimalUtility', + 'org.apache.arrow.vector.Decimal256Vector', + 'org.apache.arrow.vector.util.VectorAppender', + 'org.apache.arrow.memory.ArrowBuf', + 'org.apache.arrow.vector.BitVectorHelper', + 'org.apache.arrow.memory.util.ByteFunctionHelpers', + ) + ignoreMissingClasses( + 'org.apache.commons.codec.binary.Hex' + ) +} + +test { + jvmArgs('--add-opens=java.base/java.nio=ALL-UNNAMED') +} diff --git a/x-pack/plugin/esql/arrow/licenses/arrow-LICENSE.txt b/x-pack/plugin/esql/arrow/licenses/arrow-LICENSE.txt new file mode 100644 index 0000000000000..7bb1330a1002b --- /dev/null +++ b/x-pack/plugin/esql/arrow/licenses/arrow-LICENSE.txt @@ -0,0 +1,2261 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +-------------------------------------------------------------------------------- + +src/arrow/util (some portions): Apache 2.0, and 3-clause BSD + +Some portions of this module are derived from code in the Chromium project, +copyright (c) Google inc and (c) The Chromium Authors and licensed under the +Apache 2.0 License or the under the 3-clause BSD license: + + Copyright (c) 2013 The Chromium Authors. All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +This project includes code from Daniel Lemire's FrameOfReference project. + +https://github.com/lemire/FrameOfReference/blob/6ccaf9e97160f9a3b299e23a8ef739e711ef0c71/src/bpacking.cpp +https://github.com/lemire/FrameOfReference/blob/146948b6058a976bc7767262ad3a2ce201486b93/scripts/turbopacking64.py + +Copyright: 2013 Daniel Lemire +Home page: http://lemire.me/en/ +Project page: https://github.com/lemire/FrameOfReference +License: Apache License Version 2.0 http://www.apache.org/licenses/LICENSE-2.0 + +-------------------------------------------------------------------------------- + +This project includes code from the TensorFlow project + +Copyright 2015 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +-------------------------------------------------------------------------------- + +This project includes code from the NumPy project. + +https://github.com/numpy/numpy/blob/e1f191c46f2eebd6cb892a4bfe14d9dd43a06c4e/numpy/core/src/multiarray/multiarraymodule.c#L2910 + +https://github.com/numpy/numpy/blob/68fd82271b9ea5a9e50d4e761061dfcca851382a/numpy/core/src/multiarray/datetime.c + +Copyright (c) 2005-2017, NumPy Developers. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of the NumPy Developers nor the names of any + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +This project includes code from the Boost project + +Boost Software License - Version 1.0 - August 17th, 2003 + +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + +-------------------------------------------------------------------------------- + +This project includes code from the FlatBuffers project + +Copyright 2014 Google Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +-------------------------------------------------------------------------------- + +This project includes code from the tslib project + +Copyright 2015 Microsoft Corporation. All rights reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +-------------------------------------------------------------------------------- + +This project includes code from the jemalloc project + +https://github.com/jemalloc/jemalloc + +Copyright (C) 2002-2017 Jason Evans . +All rights reserved. +Copyright (C) 2007-2012 Mozilla Foundation. All rights reserved. +Copyright (C) 2009-2017 Facebook, Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: +1. Redistributions of source code must retain the above copyright notice(s), + this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice(s), + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) ``AS IS'' AND ANY EXPRESS +OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +EVENT SHALL THE COPYRIGHT HOLDER(S) BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +-------------------------------------------------------------------------------- + +This project includes code from the Go project, BSD 3-clause license + PATENTS +weak patent termination clause +(https://github.com/golang/go/blob/master/PATENTS). + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +This project includes code from the hs2client + +https://github.com/cloudera/hs2client + +Copyright 2016 Cloudera Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +-------------------------------------------------------------------------------- + +The script ci/scripts/util_wait_for_it.sh has the following license + +Copyright (c) 2016 Giles Hall + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +-------------------------------------------------------------------------------- + +The script r/configure has the following license (MIT) + +Copyright (c) 2017, Jeroen Ooms and Jim Hester + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +-------------------------------------------------------------------------------- + +cpp/src/arrow/util/logging.cc, cpp/src/arrow/util/logging.h and +cpp/src/arrow/util/logging-test.cc are adapted from +Ray Project (https://github.com/ray-project/ray) (Apache 2.0). + +Copyright (c) 2016 Ray Project (https://github.com/ray-project/ray) + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +-------------------------------------------------------------------------------- +The files cpp/src/arrow/vendored/datetime/date.h, cpp/src/arrow/vendored/datetime/tz.h, +cpp/src/arrow/vendored/datetime/tz_private.h, cpp/src/arrow/vendored/datetime/ios.h, +cpp/src/arrow/vendored/datetime/ios.mm, +cpp/src/arrow/vendored/datetime/tz.cpp are adapted from +Howard Hinnant's date library (https://github.com/HowardHinnant/date) +It is licensed under MIT license. + +The MIT License (MIT) +Copyright (c) 2015, 2016, 2017 Howard Hinnant +Copyright (c) 2016 Adrian Colomitchi +Copyright (c) 2017 Florian Dang +Copyright (c) 2017 Paul Thompson +Copyright (c) 2018 Tomasz Kamiński + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +-------------------------------------------------------------------------------- + +The file cpp/src/arrow/util/utf8.h includes code adapted from the page + https://bjoern.hoehrmann.de/utf-8/decoder/dfa/ +with the following license (MIT) + +Copyright (c) 2008-2009 Bjoern Hoehrmann + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +-------------------------------------------------------------------------------- + +The files in cpp/src/arrow/vendored/xxhash/ have the following license +(BSD 2-Clause License) + +xxHash Library +Copyright (c) 2012-2014, Yann Collet +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, this + list of conditions and the following disclaimer in the documentation and/or + other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +You can contact the author at : +- xxHash homepage: http://www.xxhash.com +- xxHash source repository : https://github.com/Cyan4973/xxHash + +-------------------------------------------------------------------------------- + +The files in cpp/src/arrow/vendored/double-conversion/ have the following license +(BSD 3-Clause License) + +Copyright 2006-2011, the V8 project authors. All rights reserved. +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +The files in cpp/src/arrow/vendored/uriparser/ have the following license +(BSD 3-Clause License) + +uriparser - RFC 3986 URI parsing library + +Copyright (C) 2007, Weijia Song +Copyright (C) 2007, Sebastian Pipping +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + + * Redistributions of source code must retain the above + copyright notice, this list of conditions and the following + disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials + provided with the distribution. + + * Neither the name of the nor the names of its + contributors may be used to endorse or promote products + derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED +OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +The files under dev/tasks/conda-recipes have the following license + +BSD 3-clause license +Copyright (c) 2015-2018, conda-forge +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors + may be used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR +TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +The files in cpp/src/arrow/vendored/utfcpp/ have the following license + +Copyright 2006-2018 Nemanja Trifunovic + +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + +-------------------------------------------------------------------------------- + +This project includes code from Apache Kudu. + + * cpp/cmake_modules/CompilerInfo.cmake is based on Kudu's cmake_modules/CompilerInfo.cmake + +Copyright: 2016 The Apache Software Foundation. +Home page: https://kudu.apache.org/ +License: http://www.apache.org/licenses/LICENSE-2.0 + +-------------------------------------------------------------------------------- + +This project includes code from Apache Impala (incubating), formerly +Impala. The Impala code and rights were donated to the ASF as part of the +Incubator process after the initial code imports into Apache Parquet. + +Copyright: 2012 Cloudera, Inc. +Copyright: 2016 The Apache Software Foundation. +Home page: http://impala.apache.org/ +License: http://www.apache.org/licenses/LICENSE-2.0 + +-------------------------------------------------------------------------------- + +This project includes code from Apache Aurora. + +* dev/release/{release,changelog,release-candidate} are based on the scripts from + Apache Aurora + +Copyright: 2016 The Apache Software Foundation. +Home page: https://aurora.apache.org/ +License: http://www.apache.org/licenses/LICENSE-2.0 + +-------------------------------------------------------------------------------- + +This project includes code from the Google styleguide. + +* cpp/build-support/cpplint.py is based on the scripts from the Google styleguide. + +Copyright: 2009 Google Inc. All rights reserved. +Homepage: https://github.com/google/styleguide +License: 3-clause BSD + +-------------------------------------------------------------------------------- + +This project includes code from Snappy. + +* cpp/cmake_modules/{SnappyCMakeLists.txt,SnappyConfig.h} are based on code + from Google's Snappy project. + +Copyright: 2009 Google Inc. All rights reserved. +Homepage: https://github.com/google/snappy +License: 3-clause BSD + +-------------------------------------------------------------------------------- + +This project includes code from the manylinux project. + +* python/manylinux1/scripts/{build_python.sh,python-tag-abi-tag.py, + requirements.txt} are based on code from the manylinux project. + +Copyright: 2016 manylinux +Homepage: https://github.com/pypa/manylinux +License: The MIT License (MIT) + +-------------------------------------------------------------------------------- + +This project includes code from the cymove project: + +* python/pyarrow/includes/common.pxd includes code from the cymove project + +The MIT License (MIT) +Copyright (c) 2019 Omer Ozarslan + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE +OR OTHER DEALINGS IN THE SOFTWARE. + +-------------------------------------------------------------------------------- + +The projects includes code from the Ursabot project under the dev/archery +directory. + +License: BSD 2-Clause + +Copyright 2019 RStudio, Inc. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +This project include code from mingw-w64. + +* cpp/src/arrow/util/cpu-info.cc has a polyfill for mingw-w64 < 5 + +Copyright (c) 2009 - 2013 by the mingw-w64 project +Homepage: https://mingw-w64.org +License: Zope Public License (ZPL) Version 2.1. + +--------------------------------------------------------------------------------- + +This project include code from Google's Asylo project. + +* cpp/src/arrow/result.h is based on status_or.h + +Copyright (c) Copyright 2017 Asylo authors +Homepage: https://asylo.dev/ +License: Apache 2.0 + +-------------------------------------------------------------------------------- + +This project includes code from Google's protobuf project + +* cpp/src/arrow/result.h ARROW_ASSIGN_OR_RAISE is based off ASSIGN_OR_RETURN +* cpp/src/arrow/util/bit_stream_utils.h contains code from wire_format_lite.h + +Copyright 2008 Google Inc. All rights reserved. +Homepage: https://developers.google.com/protocol-buffers/ +License: + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Code generated by the Protocol Buffer compiler is owned by the owner +of the input file used when generating it. This code is not +standalone and requires a support library to be linked with it. This +support library is itself covered by the above license. + +-------------------------------------------------------------------------------- + +3rdparty dependency LLVM is statically linked in certain binary distributions. +Additionally some sections of source code have been derived from sources in LLVM +and have been clearly labeled as such. LLVM has the following license: + +============================================================================== +The LLVM Project is under the Apache License v2.0 with LLVM Exceptions: +============================================================================== + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +---- LLVM Exceptions to the Apache 2.0 License ---- + +As an exception, if, as a result of your compiling your source code, portions +of this Software are embedded into an Object form of such source code, you +may redistribute such embedded portions in such Object form without complying +with the conditions of Sections 4(a), 4(b) and 4(d) of the License. + +In addition, if you combine or link compiled forms of this Software with +software that is licensed under the GPLv2 ("Combined Software") and if a +court of competent jurisdiction determines that the patent provision (Section +3), the indemnity provision (Section 9) or other Section of the License +conflicts with the conditions of the GPLv2, you may retroactively and +prospectively choose to deem waived or otherwise exclude such Section(s) of +the License, but only in their entirety and only with respect to the Combined +Software. + +============================================================================== +Software from third parties included in the LLVM Project: +============================================================================== +The LLVM Project contains third party software which is under different license +terms. All such code will be identified clearly using at least one of two +mechanisms: +1) It will be in a separate directory tree with its own `LICENSE.txt` or + `LICENSE` file at the top containing the specific license and restrictions + which apply to that software, or +2) It will contain specific license and restriction terms at the top of every + file. + +-------------------------------------------------------------------------------- + +3rdparty dependency gRPC is statically linked in certain binary +distributions, like the python wheels. gRPC has the following license: + +Copyright 2014 gRPC authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +-------------------------------------------------------------------------------- + +3rdparty dependency Apache Thrift is statically linked in certain binary +distributions, like the python wheels. Apache Thrift has the following license: + +Apache Thrift +Copyright (C) 2006 - 2019, The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +-------------------------------------------------------------------------------- + +3rdparty dependency Apache ORC is statically linked in certain binary +distributions, like the python wheels. Apache ORC has the following license: + +Apache ORC +Copyright 2013-2019 The Apache Software Foundation + +This product includes software developed by The Apache Software +Foundation (http://www.apache.org/). + +This product includes software developed by Hewlett-Packard: +(c) Copyright [2014-2015] Hewlett-Packard Development Company, L.P + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +-------------------------------------------------------------------------------- + +3rdparty dependency zstd is statically linked in certain binary +distributions, like the python wheels. ZSTD has the following license: + +BSD License + +For Zstandard software + +Copyright (c) 2016-present, Facebook, Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + * Neither the name Facebook nor the names of its contributors may be used to + endorse or promote products derived from this software without specific + prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +3rdparty dependency lz4 is statically linked in certain binary +distributions, like the python wheels. lz4 has the following license: + +LZ4 Library +Copyright (c) 2011-2016, Yann Collet +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, this + list of conditions and the following disclaimer in the documentation and/or + other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +3rdparty dependency Brotli is statically linked in certain binary +distributions, like the python wheels. Brotli has the following license: + +Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +-------------------------------------------------------------------------------- + +3rdparty dependency rapidjson is statically linked in certain binary +distributions, like the python wheels. rapidjson and its dependencies have the +following licenses: + +Tencent is pleased to support the open source community by making RapidJSON +available. + +Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. +All rights reserved. + +If you have downloaded a copy of the RapidJSON binary from Tencent, please note +that the RapidJSON binary is licensed under the MIT License. +If you have downloaded a copy of the RapidJSON source code from Tencent, please +note that RapidJSON source code is licensed under the MIT License, except for +the third-party components listed below which are subject to different license +terms. Your integration of RapidJSON into your own projects may require +compliance with the MIT License, as well as the other licenses applicable to +the third-party components included within RapidJSON. To avoid the problematic +JSON license in your own projects, it's sufficient to exclude the +bin/jsonchecker/ directory, as it's the only code under the JSON license. +A copy of the MIT License is included in this file. + +Other dependencies and licenses: + + Open Source Software Licensed Under the BSD License: + -------------------------------------------------------------------- + + The msinttypes r29 + Copyright (c) 2006-2013 Alexander Chemeris + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of copyright holder nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY + EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR + ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH + DAMAGE. + + Terms of the MIT License: + -------------------------------------------------------------------- + + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the "Software"), + to deal in the Software without restriction, including without limitation + the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. + +-------------------------------------------------------------------------------- + +3rdparty dependency snappy is statically linked in certain binary +distributions, like the python wheels. snappy has the following license: + +Copyright 2011, Google Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of Google Inc. nor the names of its contributors may be + used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +=== + +Some of the benchmark data in testdata/ is licensed differently: + + - fireworks.jpeg is Copyright 2013 Steinar H. Gunderson, and + is licensed under the Creative Commons Attribution 3.0 license + (CC-BY-3.0). See https://creativecommons.org/licenses/by/3.0/ + for more information. + + - kppkn.gtb is taken from the Gaviota chess tablebase set, and + is licensed under the MIT License. See + https://sites.google.com/site/gaviotachessengine/Home/endgame-tablebases-1 + for more information. + + - paper-100k.pdf is an excerpt (bytes 92160 to 194560) from the paper + “Combinatorial Modeling of Chromatin Features Quantitatively Predicts DNA + Replication Timing in _Drosophila_” by Federico Comoglio and Renato Paro, + which is licensed under the CC-BY license. See + http://www.ploscompbiol.org/static/license for more ifnormation. + + - alice29.txt, asyoulik.txt, plrabn12.txt and lcet10.txt are from Project + Gutenberg. The first three have expired copyrights and are in the public + domain; the latter does not have expired copyright, but is still in the + public domain according to the license information + (http://www.gutenberg.org/ebooks/53). + +-------------------------------------------------------------------------------- + +3rdparty dependency gflags is statically linked in certain binary +distributions, like the python wheels. gflags has the following license: + +Copyright (c) 2006, Google Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +3rdparty dependency glog is statically linked in certain binary +distributions, like the python wheels. glog has the following license: + +Copyright (c) 2008, Google Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +A function gettimeofday in utilities.cc is based on + +http://www.google.com/codesearch/p?hl=en#dR3YEbitojA/COPYING&q=GetSystemTimeAsFileTime%20license:bsd + +The license of this code is: + +Copyright (c) 2003-2008, Jouni Malinen and contributors +All Rights Reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name(s) of the above-listed copyright holder(s) nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +3rdparty dependency re2 is statically linked in certain binary +distributions, like the python wheels. re2 has the following license: + +Copyright (c) 2009 The RE2 Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of Google Inc. nor the names of its contributors + may be used to endorse or promote products derived from this + software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +3rdparty dependency c-ares is statically linked in certain binary +distributions, like the python wheels. c-ares has the following license: + +# c-ares license + +Copyright (c) 2007 - 2018, Daniel Stenberg with many contributors, see AUTHORS +file. + +Copyright 1998 by the Massachusetts Institute of Technology. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, provided that +the above copyright notice appear in all copies and that both that copyright +notice and this permission notice appear in supporting documentation, and that +the name of M.I.T. not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior permission. +M.I.T. makes no representations about the suitability of this software for any +purpose. It is provided "as is" without express or implied warranty. + +-------------------------------------------------------------------------------- + +3rdparty dependency zlib is redistributed as a dynamically linked shared +library in certain binary distributions, like the python wheels. In the future +this will likely change to static linkage. zlib has the following license: + +zlib.h -- interface of the 'zlib' general purpose compression library + version 1.2.11, January 15th, 2017 + + Copyright (C) 1995-2017 Jean-loup Gailly and Mark Adler + + This software is provided 'as-is', without any express or implied + warranty. In no event will the authors be held liable for any damages + arising from the use of this software. + + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + 3. This notice may not be removed or altered from any source distribution. + + Jean-loup Gailly Mark Adler + jloup@gzip.org madler@alumni.caltech.edu + +-------------------------------------------------------------------------------- + +3rdparty dependency openssl is redistributed as a dynamically linked shared +library in certain binary distributions, like the python wheels. openssl +preceding version 3 has the following license: + + LICENSE ISSUES + ============== + + The OpenSSL toolkit stays under a double license, i.e. both the conditions of + the OpenSSL License and the original SSLeay license apply to the toolkit. + See below for the actual license texts. + + OpenSSL License + --------------- + +/* ==================================================================== + * Copyright (c) 1998-2019 The OpenSSL Project. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * + * 3. All advertising materials mentioning features or use of this + * software must display the following acknowledgment: + * "This product includes software developed by the OpenSSL Project + * for use in the OpenSSL Toolkit. (http://www.openssl.org/)" + * + * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to + * endorse or promote products derived from this software without + * prior written permission. For written permission, please contact + * openssl-core@openssl.org. + * + * 5. Products derived from this software may not be called "OpenSSL" + * nor may "OpenSSL" appear in their names without prior written + * permission of the OpenSSL Project. + * + * 6. Redistributions of any form whatsoever must retain the following + * acknowledgment: + * "This product includes software developed by the OpenSSL Project + * for use in the OpenSSL Toolkit (http://www.openssl.org/)" + * + * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY + * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR + * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + * OF THE POSSIBILITY OF SUCH DAMAGE. + * ==================================================================== + * + * This product includes cryptographic software written by Eric Young + * (eay@cryptsoft.com). This product includes software written by Tim + * Hudson (tjh@cryptsoft.com). + * + */ + + Original SSLeay License + ----------------------- + +/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) + * All rights reserved. + * + * This package is an SSL implementation written + * by Eric Young (eay@cryptsoft.com). + * The implementation was written so as to conform with Netscapes SSL. + * + * This library is free for commercial and non-commercial use as long as + * the following conditions are aheared to. The following conditions + * apply to all code found in this distribution, be it the RC4, RSA, + * lhash, DES, etc., code; not just the SSL code. The SSL documentation + * included with this distribution is covered by the same copyright terms + * except that the holder is Tim Hudson (tjh@cryptsoft.com). + * + * Copyright remains Eric Young's, and as such any Copyright notices in + * the code are not to be removed. + * If this package is used in a product, Eric Young should be given attribution + * as the author of the parts of the library used. + * This can be in the form of a textual message at program startup or + * in documentation (online or textual) provided with the package. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. All advertising materials mentioning features or use of this software + * must display the following acknowledgement: + * "This product includes cryptographic software written by + * Eric Young (eay@cryptsoft.com)" + * The word 'cryptographic' can be left out if the rouines from the library + * being used are not cryptographic related :-). + * 4. If you include any Windows specific code (or a derivative thereof) from + * the apps directory (application code) you must include an acknowledgement: + * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)" + * + * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS + * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + * + * The licence and distribution terms for any publically available version or + * derivative of this code cannot be changed. i.e. this code cannot simply be + * copied and put under another distribution licence + * [including the GNU Public Licence.] + */ + +-------------------------------------------------------------------------------- + +This project includes code from the rtools-backports project. + +* ci/scripts/PKGBUILD and ci/scripts/r_windows_build.sh are based on code + from the rtools-backports project. + +Copyright: Copyright (c) 2013 - 2019, Алексей and Jeroen Ooms. +All rights reserved. +Homepage: https://github.com/r-windows/rtools-backports +License: 3-clause BSD + +-------------------------------------------------------------------------------- + +Some code from pandas has been adapted for the pyarrow codebase. pandas is +available under the 3-clause BSD license, which follows: + +pandas license +============== + +Copyright (c) 2011-2012, Lambda Foundry, Inc. and PyData Development Team +All rights reserved. + +Copyright (c) 2008-2011 AQR Capital Management, LLC +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of the copyright holder nor the names of any + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +Some bits from DyND, in particular aspects of the build system, have been +adapted from libdynd and dynd-python under the terms of the BSD 2-clause +license + +The BSD 2-Clause License + + Copyright (C) 2011-12, Dynamic NDArray Developers + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Dynamic NDArray Developers list: + + * Mark Wiebe + * Continuum Analytics + +-------------------------------------------------------------------------------- + +Some source code from Ibis (https://github.com/cloudera/ibis) has been adapted +for PyArrow. Ibis is released under the Apache License, Version 2.0. + +-------------------------------------------------------------------------------- + +dev/tasks/homebrew-formulae/apache-arrow.rb has the following license: + +BSD 2-Clause License + +Copyright (c) 2009-present, Homebrew contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- + +cpp/src/arrow/vendored/base64.cpp has the following license + +ZLIB License + +Copyright (C) 2004-2017 René Nyffenegger + +This source code is provided 'as-is', without any express or implied +warranty. In no event will the author be held liable for any damages arising +from the use of this software. + +Permission is granted to anyone to use this software for any purpose, including +commercial applications, and to alter it and redistribute it freely, subject to +the following restrictions: + +1. The origin of this source code must not be misrepresented; you must not + claim that you wrote the original source code. If you use this source code + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + +2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original source code. + +3. This notice may not be removed or altered from any source distribution. + +René Nyffenegger rene.nyffenegger@adp-gmbh.ch + +-------------------------------------------------------------------------------- + +This project includes code from Folly. + + * cpp/src/arrow/vendored/ProducerConsumerQueue.h + +is based on Folly's + + * folly/Portability.h + * folly/lang/Align.h + * folly/ProducerConsumerQueue.h + +Copyright: Copyright (c) Facebook, Inc. and its affiliates. +Home page: https://github.com/facebook/folly +License: http://www.apache.org/licenses/LICENSE-2.0 + +-------------------------------------------------------------------------------- + +The file cpp/src/arrow/vendored/musl/strptime.c has the following license + +Copyright © 2005-2020 Rich Felker, et al. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +-------------------------------------------------------------------------------- + +The file cpp/cmake_modules/BuildUtils.cmake contains code from + +https://gist.github.com/cristianadam/ef920342939a89fae3e8a85ca9459b49 + +which is made available under the MIT license + +Copyright (c) 2019 Cristian Adam + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +-------------------------------------------------------------------------------- + +The files in cpp/src/arrow/vendored/portable-snippets/ contain code from + +https://github.com/nemequ/portable-snippets + +and have the following copyright notice: + +Each source file contains a preamble explaining the license situation +for that file, which takes priority over this file. With the +exception of some code pulled in from other repositories (such as +µnit, an MIT-licensed project which is used for testing), the code is +public domain, released using the CC0 1.0 Universal dedication (*). + +(*) https://creativecommons.org/publicdomain/zero/1.0/legalcode + +-------------------------------------------------------------------------------- + +The files in cpp/src/arrow/vendored/fast_float/ contain code from + +https://github.com/lemire/fast_float + +which is made available under the Apache License 2.0. + +-------------------------------------------------------------------------------- + +The file python/pyarrow/vendored/docscrape.py contains code from + +https://github.com/numpy/numpydoc/ + +which is made available under the BSD 2-clause license. + +-------------------------------------------------------------------------------- + +The file python/pyarrow/vendored/version.py contains code from + +https://github.com/pypa/packaging/ + +which is made available under both the Apache license v2.0 and the +BSD 2-clause license. + +-------------------------------------------------------------------------------- + +The files in cpp/src/arrow/vendored/pcg contain code from + +https://github.com/imneme/pcg-cpp + +and have the following copyright notice: + +Copyright 2014-2019 Melissa O'Neill , + and the PCG Project contributors. + +SPDX-License-Identifier: (Apache-2.0 OR MIT) + +Licensed under the Apache License, Version 2.0 (provided in +LICENSE-APACHE.txt and at http://www.apache.org/licenses/LICENSE-2.0) +or under the MIT license (provided in LICENSE-MIT.txt and at +http://opensource.org/licenses/MIT), at your option. This file may not +be copied, modified, or distributed except according to those terms. + +Distributed on an "AS IS" BASIS, WITHOUT WARRANTY OF ANY KIND, either +express or implied. See your chosen license for details. + +-------------------------------------------------------------------------------- +r/R/dplyr-count-tally.R (some portions) + +Some portions of this file are derived from code from + +https://github.com/tidyverse/dplyr/ + +which is made available under the MIT license + +Copyright (c) 2013-2019 RStudio and others. + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the “Software”), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +-------------------------------------------------------------------------------- + +The file src/arrow/util/io_util.cc contains code from the CPython project +which is made available under the Python Software Foundation License Version 2. + +-------------------------------------------------------------------------------- + +3rdparty dependency opentelemetry-cpp is statically linked in certain binary +distributions. opentelemetry-cpp is made available under the Apache License 2.0. + +Copyright The OpenTelemetry Authors +SPDX-License-Identifier: Apache-2.0 + +-------------------------------------------------------------------------------- + +ci/conan/ is based on code from Conan Package and Dependency Manager. + +Copyright (c) 2019 Conan.io + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +-------------------------------------------------------------------------------- + +3rdparty dependency UCX is redistributed as a dynamically linked shared +library in certain binary distributions. UCX has the following license: + +Copyright (c) 2014-2015 UT-Battelle, LLC. All rights reserved. +Copyright (C) 2014-2020 Mellanox Technologies Ltd. All rights reserved. +Copyright (C) 2014-2015 The University of Houston System. All rights reserved. +Copyright (C) 2015 The University of Tennessee and The University + of Tennessee Research Foundation. All rights reserved. +Copyright (C) 2016-2020 ARM Ltd. All rights reserved. +Copyright (c) 2016 Los Alamos National Security, LLC. All rights reserved. +Copyright (C) 2016-2020 Advanced Micro Devices, Inc. All rights reserved. +Copyright (C) 2019 UChicago Argonne, LLC. All rights reserved. +Copyright (c) 2018-2020 NVIDIA CORPORATION. All rights reserved. +Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved. +Copyright (C) 2016-2020 Stony Brook University. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer in the +documentation and/or other materials provided with the distribution. +3. Neither the name of the copyright holder nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +The file dev/tasks/r/github.packages.yml contains code from + +https://github.com/ursa-labs/arrow-r-nightly + +which is made available under the Apache License 2.0. + +-------------------------------------------------------------------------------- +.github/actions/sync-nightlies/action.yml (some portions) + +Some portions of this file are derived from code from + +https://github.com/JoshPiper/rsync-docker + +which is made available under the MIT license + +Copyright (c) 2020 Joshua Piper + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +-------------------------------------------------------------------------------- +.github/actions/sync-nightlies/action.yml (some portions) + +Some portions of this file are derived from code from + +https://github.com/burnett01/rsync-deployments + +which is made available under the MIT license + +Copyright (c) 2019-2022 Contention +Copyright (c) 2019-2022 Burnett01 + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +-------------------------------------------------------------------------------- +java/vector/src/main/java/org/apache/arrow/vector/util/IntObjectHashMap.java +java/vector/src/main/java/org/apache/arrow/vector/util/IntObjectMap.java + +These file are derived from code from Netty, which is made available under the +Apache License 2.0. diff --git a/x-pack/plugin/esql/arrow/licenses/arrow-NOTICE.txt b/x-pack/plugin/esql/arrow/licenses/arrow-NOTICE.txt new file mode 100644 index 0000000000000..2089c6fb20358 --- /dev/null +++ b/x-pack/plugin/esql/arrow/licenses/arrow-NOTICE.txt @@ -0,0 +1,84 @@ +Apache Arrow +Copyright 2016-2024 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + +This product includes software from the SFrame project (BSD, 3-clause). +* Copyright (C) 2015 Dato, Inc. +* Copyright (c) 2009 Carnegie Mellon University. + +This product includes software from the Feather project (Apache 2.0) +https://github.com/wesm/feather + +This product includes software from the DyND project (BSD 2-clause) +https://github.com/libdynd + +This product includes software from the LLVM project + * distributed under the University of Illinois Open Source + +This product includes software from the google-lint project + * Copyright (c) 2009 Google Inc. All rights reserved. + +This product includes software from the mman-win32 project + * Copyright https://code.google.com/p/mman-win32/ + * Licensed under the MIT License; + +This product includes software from the LevelDB project + * Copyright (c) 2011 The LevelDB Authors. All rights reserved. + * Use of this source code is governed by a BSD-style license that can be + * Moved from Kudu http://github.com/cloudera/kudu + +This product includes software from the CMake project + * Copyright 2001-2009 Kitware, Inc. + * Copyright 2012-2014 Continuum Analytics, Inc. + * All rights reserved. + +This product includes software from https://github.com/matthew-brett/multibuild (BSD 2-clause) + * Copyright (c) 2013-2016, Matt Terry and Matthew Brett; all rights reserved. + +This product includes software from the Ibis project (Apache 2.0) + * Copyright (c) 2015 Cloudera, Inc. + * https://github.com/cloudera/ibis + +This product includes software from Dremio (Apache 2.0) + * Copyright (C) 2017-2018 Dremio Corporation + * https://github.com/dremio/dremio-oss + +This product includes software from Google Guava (Apache 2.0) + * Copyright (C) 2007 The Guava Authors + * https://github.com/google/guava + +This product include software from CMake (BSD 3-Clause) + * CMake - Cross Platform Makefile Generator + * Copyright 2000-2019 Kitware, Inc. and Contributors + +The web site includes files generated by Jekyll. + +-------------------------------------------------------------------------------- + +This product includes code from Apache Kudu, which includes the following in +its NOTICE file: + + Apache Kudu + Copyright 2016 The Apache Software Foundation + + This product includes software developed at + The Apache Software Foundation (http://www.apache.org/). + + Portions of this software were developed at + Cloudera, Inc (http://www.cloudera.com/). + +-------------------------------------------------------------------------------- + +This product includes code from Apache ORC, which includes the following in +its NOTICE file: + + Apache ORC + Copyright 2013-2019 The Apache Software Foundation + + This product includes software developed by The Apache Software + Foundation (http://www.apache.org/). + + This product includes software developed by Hewlett-Packard: + (c) Copyright [2014-2015] Hewlett-Packard Development Company, L.P diff --git a/x-pack/plugin/esql/arrow/licenses/checker-qual-LICENSE.txt b/x-pack/plugin/esql/arrow/licenses/checker-qual-LICENSE.txt new file mode 100644 index 0000000000000..9837c6b69fdab --- /dev/null +++ b/x-pack/plugin/esql/arrow/licenses/checker-qual-LICENSE.txt @@ -0,0 +1,22 @@ +Checker Framework qualifiers +Copyright 2004-present by the Checker Framework developers + +MIT License: + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/x-pack/plugin/esql/arrow/licenses/checker-qual-NOTICE.txt b/x-pack/plugin/esql/arrow/licenses/checker-qual-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/x-pack/plugin/esql/arrow/licenses/flatbuffers-java-LICENSE.txt b/x-pack/plugin/esql/arrow/licenses/flatbuffers-java-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/x-pack/plugin/esql/arrow/licenses/flatbuffers-java-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/x-pack/plugin/esql/arrow/licenses/flatbuffers-java-NOTICE.txt b/x-pack/plugin/esql/arrow/licenses/flatbuffers-java-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/x-pack/plugin/esql/arrow/licenses/jackson-LICENSE.txt b/x-pack/plugin/esql/arrow/licenses/jackson-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/x-pack/plugin/esql/arrow/licenses/jackson-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/x-pack/plugin/esql/arrow/licenses/jackson-NOTICE.txt b/x-pack/plugin/esql/arrow/licenses/jackson-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/x-pack/plugin/esql/arrow/licenses/slf4j-LICENSE.txt b/x-pack/plugin/esql/arrow/licenses/slf4j-LICENSE.txt new file mode 100644 index 0000000000000..f687729a0b452 --- /dev/null +++ b/x-pack/plugin/esql/arrow/licenses/slf4j-LICENSE.txt @@ -0,0 +1,21 @@ +Copyright (c) 2004-2022 QOS.ch Sarl (Switzerland) +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/x-pack/plugin/esql/arrow/licenses/slf4j-NOTICE.txt b/x-pack/plugin/esql/arrow/licenses/slf4j-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/AllocationManagerShim.java b/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/AllocationManagerShim.java new file mode 100644 index 0000000000000..b52d1053ff595 --- /dev/null +++ b/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/AllocationManagerShim.java @@ -0,0 +1,69 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.arrow; + +import org.apache.arrow.memory.AllocationManager; +import org.apache.arrow.memory.ArrowBuf; +import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.memory.DefaultAllocationManagerOption; +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; + +import java.lang.reflect.Field; +import java.security.AccessController; +import java.security.PrivilegedAction; + +/** + * An Arrow memory allocation manager that always fails. + *

    + * We don't actually use Arrow's memory manager as we stream dataframe buffers directly from ESQL blocks. + * But Arrow won't initialize properly unless it has one (and requires either the arrow-memory-netty or arrow-memory-unsafe libraries). + * It also does some fancy classpath scanning and calls to {@code setAccessible} which will be rejected by the security manager. + *

    + * So we configure an allocation manager that will fail on any attempt to allocate memory. + * + * @see DefaultAllocationManagerOption + */ +public class AllocationManagerShim implements AllocationManager.Factory { + + private static final Logger logger = LogManager.getLogger(AllocationManagerShim.class); + + /** + * Initialize the Arrow memory allocation manager shim. + */ + @SuppressForbidden(reason = "Inject the default Arrow memory allocation manager") + public static void init() { + try { + Class.forName("org.elasticsearch.test.ESTestCase"); + logger.info("We're in tests, not disabling Arrow memory manager so we can use a real runtime for testing"); + } catch (ClassNotFoundException notfound) { + logger.debug("Disabling Arrow's allocation manager"); + AccessController.doPrivileged((PrivilegedAction) () -> { + try { + Field field = DefaultAllocationManagerOption.class.getDeclaredField("DEFAULT_ALLOCATION_MANAGER_FACTORY"); + field.setAccessible(true); + field.set(null, new AllocationManagerShim()); + } catch (Exception e) { + throw new AssertionError("Can't init Arrow", e); + } + return null; + }); + } + } + + @Override + public AllocationManager create(BufferAllocator accountingAllocator, long size) { + throw new UnsupportedOperationException("Arrow memory manager is disabled"); + } + + @Override + public ArrowBuf empty() { + throw new UnsupportedOperationException("Arrow memory manager is disabled"); + } +} diff --git a/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/ArrowFormat.java b/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/ArrowFormat.java new file mode 100644 index 0000000000000..762c95cdce3e7 --- /dev/null +++ b/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/ArrowFormat.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.arrow; + +import org.elasticsearch.xcontent.MediaType; + +import java.util.Map; +import java.util.Set; + +public class ArrowFormat implements MediaType { + public static final ArrowFormat INSTANCE = new ArrowFormat(); + + private static final String FORMAT = "arrow"; + // See https://www.iana.org/assignments/media-types/application/vnd.apache.arrow.stream + public static final String CONTENT_TYPE = "application/vnd.apache.arrow.stream"; + private static final String VENDOR_CONTENT_TYPE = "application/vnd.elasticsearch+arrow+stream"; + + @Override + public String queryParameter() { + return FORMAT; + } + + @Override + public Set headerValues() { + return Set.of( + new HeaderValue(CONTENT_TYPE, Map.of("header", "present|absent")), + new HeaderValue(VENDOR_CONTENT_TYPE, Map.of("header", "present|absent", COMPATIBLE_WITH_PARAMETER_NAME, VERSION_PATTERN)) + ); + } +} diff --git a/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/ArrowResponse.java b/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/ArrowResponse.java new file mode 100644 index 0000000000000..8c2243284a538 --- /dev/null +++ b/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/ArrowResponse.java @@ -0,0 +1,379 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.arrow; + +import org.apache.arrow.memory.ArrowBuf; +import org.apache.arrow.vector.compression.NoCompressionCodec; +import org.apache.arrow.vector.ipc.ArrowStreamWriter; +import org.apache.arrow.vector.ipc.WriteChannel; +import org.apache.arrow.vector.ipc.message.ArrowFieldNode; +import org.apache.arrow.vector.ipc.message.ArrowRecordBatch; +import org.apache.arrow.vector.ipc.message.IpcOption; +import org.apache.arrow.vector.ipc.message.MessageSerializer; +import org.apache.arrow.vector.types.Types.MinorType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.arrow.vector.types.pojo.Schema; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.bytes.ReleasableBytesReference; +import org.elasticsearch.common.io.stream.BytesStream; +import org.elasticsearch.common.io.stream.RecyclerBytesStreamOutput; +import org.elasticsearch.common.recycler.Recycler; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.rest.ChunkedRestResponseBodyPart; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.channels.WritableByteChannel; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +public class ArrowResponse implements ChunkedRestResponseBodyPart, Releasable { + + public static class Column { + private final BlockConverter converter; + private final String name; + + public Column(String esqlType, String name) { + this.converter = ESQL_CONVERTERS.get(esqlType); + if (converter == null) { + throw new IllegalArgumentException("ES|QL type [" + esqlType + "] is not supported by the Arrow format"); + } + this.name = name; + } + } + + private final List columns; + private Iterator segments; + private ResponseSegment currentSegment; + + public ArrowResponse(List columns, List pages) { + this.columns = columns; + + currentSegment = new SchemaResponse(this); + List rest = new ArrayList<>(pages.size()); + for (int p = 0; p < pages.size(); p++) { + var page = pages.get(p); + rest.add(new PageResponse(this, page)); + // Multivalued fields are not supported yet. + for (int b = 0; b < page.getBlockCount(); b++) { + if (page.getBlock(b).mayHaveMultivaluedFields()) { + throw new IllegalArgumentException( + "ES|QL response field [" + columns.get(b).name + "] is multi-valued. This isn't supported yet by the Arrow format" + ); + } + } + } + rest.add(new EndResponse(this)); + segments = rest.iterator(); + } + + @Override + public boolean isPartComplete() { + return currentSegment == null; + } + + @Override + public boolean isLastPart() { + // Even if sent in chunks, the entirety of ESQL data is available, so it's single (chunked) part + return true; + } + + @Override + public void getNextPart(ActionListener listener) { + listener.onFailure(new IllegalStateException("no continuations available")); + } + + @Override + public ReleasableBytesReference encodeChunk(int sizeHint, Recycler recycler) throws IOException { + try { + return currentSegment.encodeChunk(sizeHint, recycler); + } finally { + if (currentSegment.isDone()) { + currentSegment = segments.hasNext() ? segments.next() : null; + } + } + } + + @Override + public String getResponseContentTypeString() { + return ArrowFormat.CONTENT_TYPE; + } + + @Override + public void close() { + currentSegment = null; + segments = null; + } + + /** + * An Arrow response is composed of different segments, each being a set of chunks: + * the schema header, the data buffers, and the trailer. + */ + protected abstract static class ResponseSegment { + static { + // Init the Arrow memory manager shim + AllocationManagerShim.init(); + } + + protected final ArrowResponse response; + + ResponseSegment(ArrowResponse response) { + this.response = response; + } + + public final ReleasableBytesReference encodeChunk(int sizeHint, Recycler recycler) throws IOException { + RecyclerBytesStreamOutput output = new RecyclerBytesStreamOutput(recycler); + try { + encodeChunk(sizeHint, output); + BytesReference ref = output.bytes(); + RecyclerBytesStreamOutput closeRef = output; + output = null; + ReleasableBytesReference result = new ReleasableBytesReference(ref, () -> Releasables.closeExpectNoException(closeRef)); + return result; + } finally { + Releasables.closeExpectNoException(output); + } + } + + protected abstract void encodeChunk(int sizeHint, RecyclerBytesStreamOutput out) throws IOException; + + protected abstract boolean isDone(); + + /** + * Adapts a {@link BytesStream} so that Arrow can write to it. + */ + protected static WritableByteChannel arrowOut(BytesStream output) { + return new WritableByteChannel() { + @Override + public int write(ByteBuffer byteBuffer) throws IOException { + if (byteBuffer.hasArray() == false) { + throw new AssertionError("only implemented for array backed buffers"); + } + int length = byteBuffer.remaining(); + output.write(byteBuffer.array(), byteBuffer.arrayOffset() + byteBuffer.position(), length); + byteBuffer.position(byteBuffer.position() + length); + assert byteBuffer.hasRemaining() == false; + return length; + } + + @Override + public boolean isOpen() { + return true; + } + + @Override + public void close() {} + }; + } + } + + /** + * Header part of the Arrow response containing the dataframe schema. + * + * @see IPC Streaming Format + */ + private static class SchemaResponse extends ResponseSegment { + private boolean done = false; + + SchemaResponse(ArrowResponse response) { + super(response); + } + + @Override + public boolean isDone() { + return done; + } + + @Override + protected void encodeChunk(int sizeHint, RecyclerBytesStreamOutput out) throws IOException { + WriteChannel arrowOut = new WriteChannel(arrowOut(out)); + MessageSerializer.serialize(arrowOut, arrowSchema()); + done = true; + } + + private Schema arrowSchema() { + return new Schema(response.columns.stream().map(c -> new Field(c.name, c.converter.arrowFieldType(), List.of())).toList()); + } + } + + /** + * Page response segment: write an ES|QL page as an Arrow RecordBatch + */ + private static class PageResponse extends ResponseSegment { + private final Page page; + private boolean done = false; + + PageResponse(ArrowResponse response, Page page) { + super(response); + this.page = page; + } + + @Override + public boolean isDone() { + return done; + } + + // Writes some data and returns the number of bytes written. + interface BufWriter { + long write() throws IOException; + } + + @Override + protected void encodeChunk(int sizeHint, RecyclerBytesStreamOutput out) throws IOException { + // An Arrow record batch consists of: + // - fields metadata, giving the number of items and the number of null values for each field + // - data buffers for each field. The number of buffers for a field depends on its type, e.g.: + // - for primitive types, there's a validity buffer (for nulls) and a value buffer. + // - for strings, there's a validity buffer, an offsets buffer and a data buffer + // See https://arrow.apache.org/docs/format/Columnar.html#recordbatch-message + + // Field metadata + List nodes = new ArrayList<>(page.getBlockCount()); + + // Buffers added to the record batch. They're used to track data size so that Arrow can compute offsets + // but contain no data. Actual writing will be done by the bufWriters. This avoids having to deal with + // Arrow's memory management, and in the future will allow direct write from ESQL block vectors. + List bufs = new ArrayList<>(page.getBlockCount() * 2); + + // Closures that will actually write a Block's data. Maps 1:1 to `bufs`. + List bufWriters = new ArrayList<>(page.getBlockCount() * 2); + + // Give Arrow a WriteChannel that will iterate on `bufWriters` when requested to write a buffer. + WriteChannel arrowOut = new WriteChannel(arrowOut(out)) { + int bufIdx = 0; + long extraPosition = 0; + + @Override + public void write(ArrowBuf buffer) throws IOException { + extraPosition += bufWriters.get(bufIdx++).write(out); + } + + @Override + public long getCurrentPosition() { + return super.getCurrentPosition() + extraPosition; + } + + @Override + public long align() throws IOException { + int trailingByteSize = (int) (getCurrentPosition() % 8); + if (trailingByteSize != 0) { // align on 8 byte boundaries + return writeZeros(8 - trailingByteSize); + } + return 0; + } + }; + + // Create Arrow buffers for each of the blocks in this page + for (int b = 0; b < page.getBlockCount(); b++) { + var converter = response.columns.get(b).converter; + + Block block = page.getBlock(b); + nodes.add(new ArrowFieldNode(block.getPositionCount(), converter.nullValuesCount(block))); + converter.convert(block, bufs, bufWriters); + } + + // Create the batch and serialize it + ArrowRecordBatch batch = new ArrowRecordBatch( + page.getPositionCount(), + nodes, + bufs, + NoCompressionCodec.DEFAULT_BODY_COMPRESSION, + true, // align buffers + false // retain buffers + ); + MessageSerializer.serialize(arrowOut, batch); + + done = true; // one day we should respect sizeHint here. kindness. + } + } + + /** + * Trailer segment: write the Arrow end of stream marker + */ + private static class EndResponse extends ResponseSegment { + private boolean done = false; + + private EndResponse(ArrowResponse response) { + super(response); + } + + @Override + public boolean isDone() { + return done; + } + + @Override + protected void encodeChunk(int sizeHint, RecyclerBytesStreamOutput out) throws IOException { + ArrowStreamWriter.writeEndOfStream(new WriteChannel(arrowOut(out)), IpcOption.DEFAULT); + done = true; + } + } + + /** + * Converters for every ES|QL type + */ + static final Map ESQL_CONVERTERS = Map.ofEntries( + // For reference: + // - EsqlDataTypes: list of ESQL data types (not all are present in outputs) + // - PositionToXContent: conversions for ESQL JSON output + // - EsqlDataTypeConverter: conversions to ESQL datatypes + // Missing: multi-valued values + + buildEntry(new BlockConverter.AsNull("null")), + buildEntry(new BlockConverter.AsNull("unsupported")), + + buildEntry(new BlockConverter.AsBoolean("boolean")), + + buildEntry(new BlockConverter.AsInt32("integer")), + buildEntry(new BlockConverter.AsInt32("counter_integer")), + + buildEntry(new BlockConverter.AsInt64("long")), + // FIXME: counters: are they signed? + buildEntry(new BlockConverter.AsInt64("counter_long")), + buildEntry(new BlockConverter.AsInt64("unsigned_long", MinorType.UINT8)), + + buildEntry(new BlockConverter.AsFloat64("double")), + buildEntry(new BlockConverter.AsFloat64("counter_double")), + + buildEntry(new BlockConverter.AsVarChar("keyword")), + buildEntry(new BlockConverter.AsVarChar("text")), + + // date: array of int64 seconds since epoch + // FIXME: is it signed? + buildEntry(new BlockConverter.AsInt64("date", MinorType.TIMESTAMPMILLI)), + + // ip are represented as 16-byte ipv6 addresses. We shorten mapped ipv4 addresses to 4 bytes. + // Another option would be to use a fixed size binary to avoid the offset array. But with mostly + // ipv4 addresses it would still be twice as big. + buildEntry(new BlockConverter.TransformedBytesRef("ip", MinorType.VARBINARY, ValueConversions::shortenIpV4Addresses)), + + // geo_point: Keep WKB format (JSON converts to WKT) + buildEntry(new BlockConverter.AsVarBinary("geo_point")), + buildEntry(new BlockConverter.AsVarBinary("geo_shape")), + buildEntry(new BlockConverter.AsVarBinary("cartesian_point")), + buildEntry(new BlockConverter.AsVarBinary("cartesian_shape")), + + // version: convert to string + buildEntry(new BlockConverter.TransformedBytesRef("version", MinorType.VARCHAR, ValueConversions::versionToString)), + + // _source: json + // TODO: support also CBOR and SMILE with an additional formatting parameter + buildEntry(new BlockConverter.TransformedBytesRef("_source", MinorType.VARCHAR, ValueConversions::sourceToJson)) + ); + + private static Map.Entry buildEntry(BlockConverter converter) { + return Map.entry(converter.esqlType(), converter); + } +} diff --git a/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/BlockConverter.java b/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/BlockConverter.java new file mode 100644 index 0000000000000..0a65792ab8e13 --- /dev/null +++ b/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/BlockConverter.java @@ -0,0 +1,452 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.arrow; + +import org.apache.arrow.memory.ArrowBuf; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.pojo.FieldType; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.io.stream.RecyclerBytesStreamOutput; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; + +import java.io.IOException; +import java.util.BitSet; +import java.util.List; +import java.util.Map; +import java.util.function.BiFunction; + +public abstract class BlockConverter { + + private final FieldType fieldType; + private final String esqlType; + + protected BlockConverter(String esqlType, Types.MinorType minorType) { + // Add the exact ESQL type as field metadata + var meta = Map.of("elastic:type", esqlType); + this.fieldType = new FieldType(true, minorType.getType(), null, meta); + this.esqlType = esqlType; + } + + public final String esqlType() { + return this.esqlType; + } + + public final FieldType arrowFieldType() { + return this.fieldType; + } + + // Block.nullValuesCount was more efficient but was removed in https://github.com/elastic/elasticsearch/pull/108916 + protected int nullValuesCount(Block block) { + if (block.mayHaveNulls() == false) { + return 0; + } + + if (block.areAllValuesNull()) { + return block.getPositionCount(); + } + + int count = 0; + for (int i = 0; i < block.getPositionCount(); i++) { + if (block.isNull(i)) { + count++; + } + } + return count; + } + + public interface BufWriter { + long write(RecyclerBytesStreamOutput out) throws IOException; + } + + /** + * Convert a block into Arrow buffers. + * @param block the ESQL block + * @param bufs arrow buffers, used to track sizes + * @param bufWriters buffer writers, that will do the actual work of writing the data + */ + public abstract void convert(Block block, List bufs, List bufWriters); + + /** + * Conversion of Double blocks + */ + public static class AsFloat64 extends BlockConverter { + + public AsFloat64(String esqlType) { + super(esqlType, Types.MinorType.FLOAT8); + } + + @Override + public void convert(Block b, List bufs, List bufWriters) { + DoubleBlock block = (DoubleBlock) b; + + accumulateVectorValidity(bufs, bufWriters, block); + + bufs.add(dummyArrowBuf(vectorLength(block))); + bufWriters.add(out -> { + if (block.areAllValuesNull()) { + return BlockConverter.writeZeroes(out, vectorLength(block)); + } + + // TODO could we "just" get the memory of the array and dump it? + int count = block.getPositionCount(); + for (int i = 0; i < count; i++) { + out.writeDoubleLE(block.getDouble(i)); + } + return vectorLength(block); + }); + } + + private static int vectorLength(DoubleBlock b) { + return Double.BYTES * b.getPositionCount(); + } + } + + /** + * Conversion of Int blocks + */ + public static class AsInt32 extends BlockConverter { + + public AsInt32(String esqlType) { + super(esqlType, Types.MinorType.INT); + } + + @Override + public void convert(Block b, List bufs, List bufWriters) { + IntBlock block = (IntBlock) b; + + accumulateVectorValidity(bufs, bufWriters, block); + + bufs.add(dummyArrowBuf(vectorLength(block))); + bufWriters.add(out -> { + if (block.areAllValuesNull()) { + return BlockConverter.writeZeroes(out, vectorLength(block)); + } + + // TODO could we "just" get the memory of the array and dump it? + int count = block.getPositionCount(); + for (int i = 0; i < count; i++) { + out.writeIntLE(block.getInt(i)); + } + return vectorLength(block); + }); + } + + private static int vectorLength(IntBlock b) { + return Integer.BYTES * b.getPositionCount(); + } + } + + /** + * Conversion of Long blocks + */ + public static class AsInt64 extends BlockConverter { + public AsInt64(String esqlType) { + this(esqlType, Types.MinorType.BIGINT); + } + + protected AsInt64(String esqlType, Types.MinorType minorType) { + super(esqlType, minorType); + } + + @Override + public void convert(Block b, List bufs, List bufWriters) { + LongBlock block = (LongBlock) b; + accumulateVectorValidity(bufs, bufWriters, block); + + bufs.add(dummyArrowBuf(vectorLength(block))); + bufWriters.add(out -> { + if (block.areAllValuesNull()) { + return BlockConverter.writeZeroes(out, vectorLength(block)); + } + + // TODO could we "just" get the memory of the array and dump it? + int count = block.getPositionCount(); + for (int i = 0; i < count; i++) { + out.writeLongLE(block.getLong(i)); + } + return vectorLength(block); + }); + } + + private static int vectorLength(LongBlock b) { + return Long.BYTES * b.getPositionCount(); + } + } + + /** + * Conversion of Boolean blocks + */ + public static class AsBoolean extends BlockConverter { + public AsBoolean(String esqlType) { + super(esqlType, Types.MinorType.BIT); + } + + @Override + public void convert(Block b, List bufs, List bufWriters) { + BooleanBlock block = (BooleanBlock) b; + accumulateVectorValidity(bufs, bufWriters, block); + + bufs.add(dummyArrowBuf(vectorLength(block))); + bufWriters.add(out -> { + int count = block.getPositionCount(); + BitSet bits = new BitSet(); + + // Only set the bits that are true, writeBitSet will take + // care of adding zero bytes if needed. + if (block.areAllValuesNull() == false) { + for (int i = 0; i < count; i++) { + if (block.getBoolean(i)) { + bits.set(i); + } + } + } + + return BlockConverter.writeBitSet(out, bits, count); + }); + } + + private static int vectorLength(BooleanBlock b) { + return BlockConverter.bitSetLength(b.getPositionCount()); + } + } + + /** + * Conversion of ByteRef blocks + */ + public static class BytesRefConverter extends BlockConverter { + + public BytesRefConverter(String esqlType, Types.MinorType minorType) { + super(esqlType, minorType); + } + + @Override + public void convert(Block b, List bufs, List bufWriters) { + BytesRefBlock block = (BytesRefBlock) b; + + BlockConverter.accumulateVectorValidity(bufs, bufWriters, block); + + // Offsets vector + bufs.add(dummyArrowBuf(offsetVectorLength(block))); + + bufWriters.add(out -> { + if (block.areAllValuesNull()) { + var count = block.getPositionCount() + 1; + for (int i = 0; i < count; i++) { + out.writeIntLE(0); + } + return offsetVectorLength(block); + } + + // TODO could we "just" get the memory of the array and dump it? + BytesRef scratch = new BytesRef(); + int offset = 0; + for (int i = 0; i < block.getPositionCount(); i++) { + out.writeIntLE(offset); + // FIXME: add a ByteRefsVector.getLength(position): there are some cases + // where getBytesRef will allocate, which isn't needed here. + BytesRef v = block.getBytesRef(i, scratch); + + offset += v.length; + } + out.writeIntLE(offset); + return offsetVectorLength(block); + }); + + // Data vector + bufs.add(BlockConverter.dummyArrowBuf(dataVectorLength(block))); + + bufWriters.add(out -> { + if (block.areAllValuesNull()) { + return 0; + } + + // TODO could we "just" get the memory of the array and dump it? + BytesRef scratch = new BytesRef(); + long length = 0; + for (int i = 0; i < block.getPositionCount(); i++) { + BytesRef v = block.getBytesRef(i, scratch); + + out.write(v.bytes, v.offset, v.length); + length += v.length; + } + return length; + }); + } + + private static int offsetVectorLength(BytesRefBlock block) { + return Integer.BYTES * (block.getPositionCount() + 1); + } + + private int dataVectorLength(BytesRefBlock block) { + if (block.areAllValuesNull()) { + return 0; + } + + // TODO we can probably get the length from the vector without all this sum + + int length = 0; + BytesRef scratch = new BytesRef(); + for (int i = 0; i < block.getPositionCount(); i++) { + BytesRef v = block.getBytesRef(i, scratch); + length += v.length; + } + return length; + } + } + + /** + * Conversion of ByteRefs where each value is itself converted to a different format. + */ + public static class TransformedBytesRef extends BytesRefConverter { + + private final BiFunction valueConverter; + + /** + * + * @param esqlType ESQL type name + * @param minorType Arrow type + * @param valueConverter a function that takes (value, scratch) input parameters and returns the transformed value + */ + public TransformedBytesRef(String esqlType, Types.MinorType minorType, BiFunction valueConverter) { + super(esqlType, minorType); + this.valueConverter = valueConverter; + } + + @Override + public void convert(Block b, List bufs, List bufWriters) { + BytesRefBlock block = (BytesRefBlock) b; + try (BytesRefBlock transformed = transformValues(block)) { + super.convert(transformed, bufs, bufWriters); + } + } + + /** + * Creates a new BytesRefBlock by applying the value converter to each non null and non empty value + */ + private BytesRefBlock transformValues(BytesRefBlock block) { + try (BytesRefBlock.Builder builder = block.blockFactory().newBytesRefBlockBuilder(block.getPositionCount())) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < block.getPositionCount(); i++) { + if (block.isNull(i)) { + builder.appendNull(); + } else { + BytesRef bytes = block.getBytesRef(i, scratch); + if (bytes.length != 0) { + bytes = valueConverter.apply(bytes, scratch); + } + builder.appendBytesRef(bytes); + } + } + return builder.build(); + } + } + } + + public static class AsVarChar extends BytesRefConverter { + public AsVarChar(String esqlType) { + super(esqlType, Types.MinorType.VARCHAR); + } + } + + public static class AsVarBinary extends BytesRefConverter { + public AsVarBinary(String esqlType) { + super(esqlType, Types.MinorType.VARBINARY); + } + } + + public static class AsNull extends BlockConverter { + public AsNull(String esqlType) { + super(esqlType, Types.MinorType.NULL); + } + + @Override + public void convert(Block block, List bufs, List bufWriters) { + // Null vector in arrow has no associated buffers + // See https://arrow.apache.org/docs/format/Columnar.html#null-layout + } + } + + // Create a dummy ArrowBuf used for size accounting purposes. + private static ArrowBuf dummyArrowBuf(long size) { + return new ArrowBuf(null, null, 0, 0).writerIndex(size); + } + + // Length in bytes of a validity buffer + private static int bitSetLength(int totalValues) { + return (totalValues + 7) / 8; + } + + private static void accumulateVectorValidity(List bufs, List bufWriters, Block b) { + bufs.add(dummyArrowBuf(bitSetLength(b.getPositionCount()))); + bufWriters.add(out -> { + if (b.mayHaveNulls() == false) { + return writeAllTrueValidity(out, b.getPositionCount()); + } else if (b.areAllValuesNull()) { + return writeAllFalseValidity(out, b.getPositionCount()); + } else { + return writeValidities(out, b); + } + }); + } + + private static long writeAllTrueValidity(RecyclerBytesStreamOutput out, int valueCount) { + int allOnesCount = valueCount / 8; + for (int i = 0; i < allOnesCount; i++) { + out.writeByte((byte) 0xff); + } + int remaining = valueCount % 8; + if (remaining == 0) { + return allOnesCount; + } + out.writeByte((byte) ((1 << remaining) - 1)); + return allOnesCount + 1; + } + + private static long writeAllFalseValidity(RecyclerBytesStreamOutput out, int valueCount) { + int count = bitSetLength(valueCount); + for (int i = 0; i < count; i++) { + out.writeByte((byte) 0x00); + } + return count; + } + + private static long writeValidities(RecyclerBytesStreamOutput out, Block block) { + int valueCount = block.getPositionCount(); + BitSet bits = new BitSet(valueCount); + for (int i = 0; i < block.getPositionCount(); i++) { + if (block.isNull(i) == false) { + bits.set(i); + } + } + return writeBitSet(out, bits, valueCount); + } + + private static long writeBitSet(RecyclerBytesStreamOutput out, BitSet bits, int bitCount) { + byte[] bytes = bits.toByteArray(); + out.writeBytes(bytes, 0, bytes.length); + + // toByteArray will return bytes up to the last bit set. It may therefore + // have a length lower than what is needed to actually store bitCount bits. + int expectedLength = bitSetLength(bitCount); + writeZeroes(out, expectedLength - bytes.length); + + return expectedLength; + } + + private static long writeZeroes(RecyclerBytesStreamOutput out, int byteCount) { + for (int i = 0; i < byteCount; i++) { + out.writeByte((byte) 0); + } + return byteCount; + } +} diff --git a/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/ValueConversions.java b/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/ValueConversions.java new file mode 100644 index 0000000000000..8139380aef1c8 --- /dev/null +++ b/x-pack/plugin/esql/arrow/src/main/java/org/elasticsearch/xpack/esql/arrow/ValueConversions.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.arrow; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.versionfield.Version; + +import java.io.IOException; + +/** + * Utilities to convert some of byte-encoded ESQL values into to a format more suitable + * for Arrow output. + */ +public class ValueConversions { + + /** + * Shorten ipv6-mapped ipv4 IP addresses to 4 bytes + */ + public static BytesRef shortenIpV4Addresses(BytesRef value, BytesRef scratch) { + // Same logic as sun.net.util.IPAddressUtil#isIPv4MappedAddress + // See https://datatracker.ietf.org/doc/html/rfc4291#section-2.5.5.2 + if (value.length == 16) { + int pos = value.offset; + byte[] bytes = value.bytes; + boolean isIpV4 = bytes[pos++] == 0 + && bytes[pos++] == 0 + && bytes[pos++] == 0 + && bytes[pos++] == 0 + && bytes[pos++] == 0 + && bytes[pos++] == 0 + && bytes[pos++] == 0 + && bytes[pos++] == 0 + && bytes[pos++] == 0 + && bytes[pos++] == 0 + && bytes[pos++] == (byte) 0xFF + && bytes[pos] == (byte) 0xFF; + + if (isIpV4) { + scratch.bytes = value.bytes; + scratch.offset = value.offset + 12; + scratch.length = 4; + return scratch; + } + } + return value; + } + + /** + * Convert binary-encoded versions to strings + */ + public static BytesRef versionToString(BytesRef value, BytesRef scratch) { + return new BytesRef(new Version(value).toString()); + } + + /** + * Convert any xcontent source to json + */ + public static BytesRef sourceToJson(BytesRef value, BytesRef scratch) { + try { + var valueArray = new BytesArray(value); + XContentType xContentType = XContentHelper.xContentType(valueArray); + if (xContentType == XContentType.JSON) { + return value; + } else { + String json = XContentHelper.convertToJson(valueArray, false, xContentType); + return new BytesRef(json); + } + } catch (IOException e) { + throw new RuntimeException(e); + } + } +} diff --git a/x-pack/plugin/esql/arrow/src/test/java/org/elasticsearch/xpack/esql/arrow/ArrowResponseTests.java b/x-pack/plugin/esql/arrow/src/test/java/org/elasticsearch/xpack/esql/arrow/ArrowResponseTests.java new file mode 100644 index 0000000000000..cf49b37db2805 --- /dev/null +++ b/x-pack/plugin/esql/arrow/src/test/java/org/elasticsearch/xpack/esql/arrow/ArrowResponseTests.java @@ -0,0 +1,600 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.arrow; + +import org.apache.arrow.memory.RootAllocator; +import org.apache.arrow.vector.BigIntVector; +import org.apache.arrow.vector.BitVector; +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.Float8Vector; +import org.apache.arrow.vector.IntVector; +import org.apache.arrow.vector.TimeStampMilliVector; +import org.apache.arrow.vector.UInt8Vector; +import org.apache.arrow.vector.ValueVector; +import org.apache.arrow.vector.VarBinaryVector; +import org.apache.arrow.vector.VarCharVector; +import org.apache.arrow.vector.VectorSchemaRoot; +import org.apache.arrow.vector.ipc.ArrowStreamReader; +import org.apache.arrow.vector.util.VectorSchemaRootAppender; +import org.apache.lucene.document.InetAddressPoint; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.TriFunction; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.bytes.CompositeBytesReference; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVectorBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.transport.BytesRefRecycler; +import org.elasticsearch.xpack.versionfield.Version; +import org.junit.AfterClass; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.function.BiFunction; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.IntStream; +import java.util.stream.Stream; + +public class ArrowResponseTests extends ESTestCase { + + private static final BlockFactory BLOCK_FACTORY = BlockFactory.getInstance( + new NoopCircuitBreaker("test-noop"), + BigArrays.NON_RECYCLING_INSTANCE + ); + + private static final RootAllocator ALLOCATOR = new RootAllocator(); + + @AfterClass + public static void afterClass() throws Exception { + ALLOCATOR.close(); + } + + // --------------------------------------------------------------------------------------------- + // Value creation, getters for ESQL and Arrow + + static final ValueType INTEGER_VALUES = new ValueTypeImpl( + factory -> factory.newIntBlockBuilder(0), + block -> block.appendInt(randomInt()), + (block, i, scratch) -> block.getInt(i), + IntVector::get + ); + + static final ValueType LONG_VALUES = new ValueTypeImpl( + factory -> factory.newLongBlockBuilder(0), + block -> block.appendLong(randomLong()), + (block, i, scratch) -> block.getLong(i), + BigIntVector::get + ); + + static final ValueType ULONG_VALUES = new ValueTypeImpl( + factory -> factory.newLongBlockBuilder(0), + block -> block.appendLong(randomLong()), + (block, i, scratch) -> block.getLong(i), + UInt8Vector::get + ); + + static final ValueType DATE_VALUES = new ValueTypeImpl( + factory -> factory.newLongBlockBuilder(0), + block -> block.appendLong(randomLong()), + (block, i, scratch) -> block.getLong(i), + TimeStampMilliVector::get + ); + + static final ValueType DOUBLE_VALUES = new ValueTypeImpl( + factory -> factory.newDoubleBlockBuilder(0), + block -> block.appendDouble(randomDouble()), + (block, i, scratch) -> block.getDouble(i), + Float8Vector::get + ); + + static final ValueType BOOLEAN_VALUES = new ValueTypeImpl( + factory -> factory.newBooleanBlockBuilder(0), + block -> block.appendBoolean(randomBoolean()), + (b, i, s) -> b.getBoolean(i), + (v, i) -> v.get(i) != 0 // Arrow's BitVector returns 0 or 1 + ); + + static final ValueType TEXT_VALUES = new ValueTypeImpl( + factory -> factory.newBytesRefBlockBuilder(0), + block -> block.appendBytesRef(new BytesRef("🚀" + randomAlphaOfLengthBetween(1, 20))), + (b, i, s) -> b.getBytesRef(i, s).utf8ToString(), + (v, i) -> new String(v.get(i), StandardCharsets.UTF_8) + ); + + static final ValueType SOURCE_VALUES = new ValueTypeImpl( + factory -> factory.newBytesRefBlockBuilder(0), + // Use a constant value, conversion is tested separately + block -> block.appendBytesRef(new BytesRef("{\"foo\": 42}")), + (b, i, s) -> b.getBytesRef(i, s).utf8ToString(), + (v, i) -> new String(v.get(i), StandardCharsets.UTF_8) + ); + + static final ValueType IP_VALUES = new ValueTypeImpl( + factory -> factory.newBytesRefBlockBuilder(0), + block -> { + byte[] addr = InetAddressPoint.encode(randomIp(randomBoolean())); + assertEquals(16, addr.length); // Make sure all is ipv6-mapped + block.appendBytesRef(new BytesRef(addr)); + }, + (b, i, s) -> ValueConversions.shortenIpV4Addresses(b.getBytesRef(i, s), new BytesRef()), + (v, i) -> new BytesRef(v.get(i)) + ); + + static final ValueType BINARY_VALUES = new ValueTypeImpl( + factory -> factory.newBytesRefBlockBuilder(0), + block -> block.appendBytesRef(new BytesRef(randomByteArrayOfLength(randomIntBetween(1, 100)))), + BytesRefBlock::getBytesRef, + (v, i) -> new BytesRef(v.get(i)) + ); + + static final ValueType VERSION_VALUES = new ValueTypeImpl( + factory -> factory.newBytesRefBlockBuilder(0), + block -> block.appendBytesRef(new Version(between(0, 100) + "." + between(0, 100) + "." + between(0, 100)).toBytesRef()), + (b, i, s) -> new Version(b.getBytesRef(i, s)).toString(), + (v, i) -> new String(v.get(i), StandardCharsets.UTF_8) + ); + + static final ValueType NULL_VALUES = new ValueTypeImpl( + factory -> factory.newBytesRefBlockBuilder(0), + Block.Builder::appendNull, + (b, i, s) -> b.isNull(i) ? null : "non-null in block", + (v, i) -> v.isNull(i) ? null : "non-null in vector" + ); + + static final Map VALUE_TYPES = Map.ofEntries( + Map.entry("integer", INTEGER_VALUES), + Map.entry("counter_integer", INTEGER_VALUES), + Map.entry("long", LONG_VALUES), + Map.entry("counter_long", LONG_VALUES), + Map.entry("unsigned_long", ULONG_VALUES), + Map.entry("double", DOUBLE_VALUES), + Map.entry("counter_double", DOUBLE_VALUES), + + Map.entry("text", TEXT_VALUES), + Map.entry("keyword", TEXT_VALUES), + + Map.entry("boolean", BOOLEAN_VALUES), + Map.entry("date", DATE_VALUES), + Map.entry("ip", IP_VALUES), + Map.entry("version", VERSION_VALUES), + Map.entry("_source", SOURCE_VALUES), + + Map.entry("null", NULL_VALUES), + Map.entry("unsupported", NULL_VALUES), + + // All geo types just pass-through WKB, use random binary data + Map.entry("geo_point", BINARY_VALUES), + Map.entry("geo_shape", BINARY_VALUES), + Map.entry("cartesian_point", BINARY_VALUES), + Map.entry("cartesian_shape", BINARY_VALUES) + ); + + // --------------------------------------------------------------------------------------------- + // Tests + + public void testTestHarness() { + TestColumn testColumn = TestColumn.create("foo", "integer"); + TestBlock denseBlock = TestBlock.create(BLOCK_FACTORY, testColumn, Density.Dense, 3); + TestBlock sparseBlock = TestBlock.create(BLOCK_FACTORY, testColumn, Density.Sparse, 5); + TestBlock emptyBlock = TestBlock.create(BLOCK_FACTORY, testColumn, Density.Empty, 7); + + // Test that density works as expected + assertTrue(denseBlock.block instanceof IntVectorBlock); + assertEquals("IntArrayBlock", sparseBlock.block.getClass().getSimpleName()); // non-public class + assertEquals("ConstantNullBlock", emptyBlock.block.getClass().getSimpleName()); + + // Test that values iterator scans all pages + List pages = Stream.of(denseBlock, sparseBlock, emptyBlock).map(b -> new TestPage(List.of(b))).toList(); + TestCase tc = new TestCase(List.of(testColumn), pages); + EsqlValuesIterator valuesIterator = new EsqlValuesIterator(tc, 0); + int count = 0; + while (valuesIterator.hasNext()) { + valuesIterator.next(); + count++; + } + assertEquals(3 + 5 + 7, count); + + // Test that we have value types for all types + List converters = new ArrayList<>(ArrowResponse.ESQL_CONVERTERS.keySet()); + Collections.sort(converters); + List valueTypes = new ArrayList<>(VALUE_TYPES.keySet()); + Collections.sort(valueTypes); + assertEquals("Missing test value types", converters, valueTypes); + } + + /** + * Test single-column for all types with a mix of dense/sparse/empty pages + */ + public void testSingleColumn() throws IOException { + for (var type : VALUE_TYPES.keySet()) { + TestColumn testColumn = new TestColumn("foo", type, VALUE_TYPES.get(type)); + List pages = new ArrayList<>(); + + for (var density : Density.values()) { + TestBlock testBlock = TestBlock.create(BLOCK_FACTORY, testColumn, density, 10); + TestPage testPage = new TestPage(List.of(testBlock)); + pages.add(testPage); + } + TestCase testCase = new TestCase(List.of(testColumn), pages); + + compareEsqlAndArrow(testCase); + } + } + + public void testSingleBlock() throws IOException { + // Simple test to easily focus on a specific type & density + String type = "text"; + Density density = Density.Dense; + + TestColumn testColumn = new TestColumn("foo", type, VALUE_TYPES.get(type)); + List pages = new ArrayList<>(); + + TestBlock testBlock = TestBlock.create(BLOCK_FACTORY, testColumn, density, 10); + TestPage testPage = new TestPage(List.of(testBlock)); + pages.add(testPage); + + TestCase testCase = new TestCase(List.of(testColumn), pages); + + compareEsqlAndArrow(testCase); + } + + /** + * Test that multivalued arrays are rejected + */ + public void testMultivaluedField() throws IOException { + IntBlock.Builder builder = BLOCK_FACTORY.newIntBlockBuilder(0); + builder.appendInt(42); + builder.appendNull(); + builder.beginPositionEntry(); + builder.appendInt(44); + builder.appendInt(45); + builder.endPositionEntry(); + builder.appendInt(46); + IntBlock block = builder.build(); + + // Consistency check + assertTrue(block.mayHaveMultivaluedFields()); + assertEquals(0, block.getFirstValueIndex(0)); + assertEquals(1, block.getValueCount(0)); + + // null values still use one position in the array + assertEquals(0, block.getValueCount(1)); + assertEquals(1, block.getFirstValueIndex(1)); + assertTrue(block.isNull(1)); + assertEquals(0, block.getInt(1)); + + assertEquals(2, block.getFirstValueIndex(2)); + assertEquals(2, block.getValueCount(2)); + assertEquals(2, block.getFirstValueIndex(2)); + assertEquals(45, block.getInt(block.getFirstValueIndex(2) + 1)); + + assertEquals(4, block.getFirstValueIndex(3)); + + var column = TestColumn.create("some-field", "integer"); + TestCase testCase = new TestCase(List.of(column), List.of(new TestPage(List.of(new TestBlock(column, block, Density.Dense))))); + + IllegalArgumentException exc = assertThrows(IllegalArgumentException.class, () -> compareEsqlAndArrow(testCase)); + + assertEquals("ES|QL response field [some-field] is multi-valued. This isn't supported yet by the Arrow format", exc.getMessage()); + + } + + /** + * Test a random set of types/columns/pages/densities + */ + public void testRandomTypesAndSize() throws IOException { + + // Shuffle types to randomize their succession in the Arrow stream + List types = new ArrayList<>(VALUE_TYPES.keySet()); + Collections.shuffle(types, random()); + + List columns = types.stream().map(type -> TestColumn.create("col-" + type, type)).toList(); + + List pages = IntStream + // 1 to 10 pages of random density and 1 to 1000 values + .range(0, randomIntBetween(1, 100)) + .mapToObj(i -> TestPage.create(BLOCK_FACTORY, columns)) + .toList(); + + TestCase testCase = new TestCase(columns, pages); + // System.out.println(testCase); + // for (TestPage page: pages) { + // System.out.println(page); + // } + + compareEsqlAndArrow(testCase); + } + + // --------------------------------------------------------------------------------------------- + // Test harness + + private void compareEsqlAndArrow(TestCase testCase) throws IOException { + try (VectorSchemaRoot arrowVectors = toArrowVectors(testCase)) { + compareEsqlAndArrow(testCase, arrowVectors); + } + } + + private void compareEsqlAndArrow(TestCase testCase, VectorSchemaRoot root) { + for (int i = 0; i < testCase.columns.size(); i++) { + + // Check esql type in the metadata + var metadata = root.getSchema().getFields().get(i).getMetadata(); + assertEquals(testCase.columns.get(i).type, metadata.get("elastic:type")); + + // Check values + var esqlValuesIterator = new EsqlValuesIterator(testCase, i); + var arrowValuesIterator = new ArrowValuesIterator(testCase, root, i); + + while (esqlValuesIterator.hasNext() && arrowValuesIterator.hasNext()) { + assertEquals(esqlValuesIterator.next(), arrowValuesIterator.next()); + } + + // Make sure we entirely consumed both sides. + assertFalse(esqlValuesIterator.hasNext()); + assertFalse(arrowValuesIterator.hasNext()); + } + } + + private VectorSchemaRoot toArrowVectors(TestCase testCase) throws IOException { + ArrowResponse response = new ArrowResponse( + testCase.columns.stream().map(c -> new ArrowResponse.Column(c.type, c.name)).toList(), + testCase.pages.stream().map(p -> new Page(p.blocks.stream().map(b -> b.block).toArray(Block[]::new))).toList() + ); + + assertEquals("application/vnd.apache.arrow.stream", response.getResponseContentTypeString()); + + BytesReference bytes = serializeBlocksDirectly(response); + try ( + ArrowStreamReader reader = new ArrowStreamReader(bytes.streamInput(), ALLOCATOR); + VectorSchemaRoot readerRoot = reader.getVectorSchemaRoot(); + ) { + VectorSchemaRoot root = VectorSchemaRoot.create(readerRoot.getSchema(), ALLOCATOR); + root.allocateNew(); + + while (reader.loadNextBatch()) { + VectorSchemaRootAppender.append(root, readerRoot); + } + + return root; + } + } + + /** + * An iterator over values of a column across all pages. + */ + static class EsqlValuesIterator implements Iterator { + private final int fieldPos; + private final ValueType type; + private final BytesRef scratch = new BytesRef(); + private final Iterator pages; + + private TestPage page; + private int position; + + EsqlValuesIterator(TestCase testCase, int column) { + this.fieldPos = column; + this.type = testCase.columns.get(column).valueType; + this.position = 0; + this.pages = testCase.pages.iterator(); + this.page = pages.next(); + } + + @Override + public boolean hasNext() { + return page != null; + } + + @Override + public Object next() { + if (page == null) { + throw new NoSuchElementException(); + } + Block block = page.blocks.get(fieldPos).block; + Object result = block.isNull(position) ? null : type.valueAt(block, position, scratch); + position++; + if (position >= block.getPositionCount()) { + position = 0; + page = pages.hasNext() ? pages.next() : null; + } + return result; + } + } + + static class ArrowValuesIterator implements Iterator { + private final ValueType type; + private ValueVector vector; + private int position; + + ArrowValuesIterator(TestCase testCase, VectorSchemaRoot root, int column) { + this(root.getVector(column), testCase.columns.get(column).valueType); + } + + ArrowValuesIterator(ValueVector vector, ValueType type) { + this.vector = vector; + this.type = type; + } + + @Override + public boolean hasNext() { + return vector != null; + } + + @Override + public Object next() { + if (vector == null) { + throw new NoSuchElementException(); + } + Object result = vector.isNull(position) ? null : type.valueAt(vector, position); + position++; + if (position >= vector.getValueCount()) { + vector = null; + } + return result; + } + } + + private BytesReference serializeBlocksDirectly(ArrowResponse body) throws IOException { + // Ensure there's a single part, this will fail if we ever change it. + assertTrue(body.isLastPart()); + + List ourEncoding = new ArrayList<>(); + int page = 0; + while (body.isPartComplete() == false) { + ourEncoding.add(body.encodeChunk(1500, BytesRefRecycler.NON_RECYCLING_INSTANCE)); + page++; + } + return CompositeBytesReference.of(ourEncoding.toArray(BytesReference[]::new)); + } + + record TestCase(List columns, List pages) { + @Override + public String toString() { + return pages.size() + " pages of " + columns.stream().map(TestColumn::type).collect(Collectors.joining("|")); + } + } + + record TestColumn(String name, String type, ValueType valueType) { + static TestColumn create(String name, String type) { + return new TestColumn(name, type, VALUE_TYPES.get(type)); + } + } + + record TestPage(List blocks) { + + static TestPage create(BlockFactory factory, List columns) { + int size = randomIntBetween(1, 1000); + return new TestPage(columns.stream().map(column -> TestBlock.create(factory, column, size)).toList()); + } + + @Override + public String toString() { + return blocks.get(0).block.getPositionCount() + + " items - " + + blocks.stream().map(b -> b.density.toString()).collect(Collectors.joining("|")); + } + } + + record TestBlock(TestColumn column, Block block, Density density) { + + static TestBlock create(BlockFactory factory, TestColumn column, int positions) { + return create(factory, column, randomFrom(Density.values()), positions); + } + + static TestBlock create(BlockFactory factory, TestColumn column, Density density, int positions) { + ValueType valueType = column.valueType(); + Block block; + if (density == Density.Empty) { + block = factory.newConstantNullBlock(positions); + } else { + Block.Builder builder = valueType.createBlockBuilder(factory); + int start = 0; + if (density == Density.Sparse && positions >= 2) { + // Make sure it's really sparse even if randomness of values may decide otherwise + valueType.addValue(builder, Density.Dense); + valueType.addValue(builder, Density.Empty); + start = 2; + } + for (int i = start; i < positions; i++) { + valueType.addValue(builder, density); + } + // Will create an ArrayBlock if there are null values, VectorBlock otherwise + block = builder.build(); + } + return new TestBlock(column, block, density); + } + } + + public enum Density { + Empty, + Sparse, + Dense; + + boolean nextIsNull() { + return switch (this) { + case Empty -> true; + case Sparse -> randomBoolean(); + case Dense -> false; + }; + } + } + + interface ValueType { + Block.Builder createBlockBuilder(BlockFactory factory); + + void addValue(Block.Builder builder, Density density); + + Object valueAt(Block block, int position, BytesRef scratch); + + Object valueAt(ValueVector arrowVec, int position); + } + + public static class ValueTypeImpl + implements + ValueType { + private final Function builderCreator; + private final Consumer valueAdder; + private final TriFunction blockGetter; + private final BiFunction vectorGetter; + + public ValueTypeImpl( + Function builderCreator, + Consumer valueAdder, + TriFunction blockGetter, + BiFunction vectorGetter + ) { + this.builderCreator = builderCreator; + this.valueAdder = valueAdder; + this.blockGetter = blockGetter; + this.vectorGetter = vectorGetter; + } + + @Override + public Block.Builder createBlockBuilder(BlockFactory factory) { + return builderCreator.apply(factory); + } + + @Override + @SuppressWarnings("unchecked") + public void addValue(Block.Builder builder, Density density) { + if (density.nextIsNull()) { + builder.appendNull(); + } else { + valueAdder.accept((BlockBT) builder); + } + } + + @Override + @SuppressWarnings("unchecked") + public Object valueAt(Block block, int position, BytesRef scratch) { + return blockGetter.apply((BlockT) block, position, scratch); + } + + @Override + @SuppressWarnings("unchecked") + public Object valueAt(ValueVector arrowVec, int position) { + return vectorGetter.apply((VectorT) arrowVec, position); + } + } +} diff --git a/x-pack/plugin/esql/arrow/src/test/java/org/elasticsearch/xpack/esql/arrow/ValueConversionsTests.java b/x-pack/plugin/esql/arrow/src/test/java/org/elasticsearch/xpack/esql/arrow/ValueConversionsTests.java new file mode 100644 index 0000000000000..e700bbd6a3eb5 --- /dev/null +++ b/x-pack/plugin/esql/arrow/src/test/java/org/elasticsearch/xpack/esql/arrow/ValueConversionsTests.java @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.arrow; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xpack.esql.core.util.StringUtils; +import org.elasticsearch.xpack.versionfield.Version; + +public class ValueConversionsTests extends ESTestCase { + + public void testIpConversion() throws Exception { + { + // ipv6 address + BytesRef bytes = StringUtils.parseIP("2a00:1450:4007:818::200e"); + assertArrayEquals( + new byte[] { 0x2a, 0x00, 0x14, 0x50, 0x40, 0x07, 0x08, 0x18, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x20, 0x0e }, + bytes.bytes + ); + + BytesRef scratch = new BytesRef(); + BytesRef bytes2 = ValueConversions.shortenIpV4Addresses(bytes.clone(), scratch); + assertEquals(bytes, bytes2); + } + { + // ipv6 mapped ipv4 address + BytesRef bytes = StringUtils.parseIP("216.58.214.174"); + assertArrayEquals( + new byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, (byte) 0xFF, (byte) 0xFF, (byte) 216, (byte) 58, (byte) 214, (byte) 174 }, + bytes.bytes + ); + + BytesRef scratch = new BytesRef(); + BytesRef bytes2 = ValueConversions.shortenIpV4Addresses(bytes.clone(), scratch); + + assertTrue(new BytesRef(new byte[] { (byte) 216, (byte) 58, (byte) 214, (byte) 174 }).bytesEquals(bytes2)); + + } + } + + public void testVersionConversion() { + String version = "1.2.3-alpha"; + + BytesRef bytes = new Version("1.2.3-alpha").toBytesRef(); + + BytesRef scratch = new BytesRef(); + BytesRef bytes2 = ValueConversions.versionToString(bytes, scratch); + + // Some conversion happened + assertNotEquals(bytes.length, bytes2.length); + assertEquals(version, bytes2.utf8ToString()); + } + + public void testSourceToJson() throws Exception { + BytesRef bytes = new BytesRef("{\"foo\": 42}"); + + BytesRef scratch = new BytesRef(); + BytesRef bytes2 = ValueConversions.sourceToJson(bytes, scratch); + // No change, even indentation + assertEquals("{\"foo\": 42}", bytes2.utf8ToString()); + } + + public void testCborSourceToJson() throws Exception { + XContentBuilder builder = XContentFactory.cborBuilder(); + builder.startObject(); + builder.field("foo", 42); + builder.endObject(); + builder.close(); + BytesRef bytesRef = BytesReference.bytes(builder).toBytesRef(); + + BytesRef scratch = new BytesRef(); + BytesRef bytes2 = ValueConversions.sourceToJson(bytesRef, scratch); + // Converted to JSON + assertEquals("{\"foo\":42}", bytes2.utf8ToString()); + } +} diff --git a/x-pack/plugin/esql/arrow/src/test/resources/plugin-security.policy b/x-pack/plugin/esql/arrow/src/test/resources/plugin-security.policy new file mode 100644 index 0000000000000..c5da65410d3da --- /dev/null +++ b/x-pack/plugin/esql/arrow/src/test/resources/plugin-security.policy @@ -0,0 +1,13 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +// Needed by the Arrow memory manager +grant { + permission java.lang.RuntimePermission "accessDeclaredMembers"; + permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; + permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; +}; diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index efe274512c886..c213afae8b01c 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -25,6 +25,8 @@ dependencies { implementation project('compute:ann') implementation project(':libs:elasticsearch-dissect') implementation project(':libs:elasticsearch-grok') + implementation project('arrow') + // Also contains a dummy processor to allow compilation with unused annotations. annotationProcessor project('compute:gen') diff --git a/x-pack/plugin/esql/qa/server/single-node/build.gradle b/x-pack/plugin/esql/qa/server/single-node/build.gradle index 10366a500a532..865d7cf5f5e6c 100644 --- a/x-pack/plugin/esql/qa/server/single-node/build.gradle +++ b/x-pack/plugin/esql/qa/server/single-node/build.gradle @@ -7,6 +7,19 @@ dependencies { javaRestTestImplementation project(xpackModule('esql:qa:testFixtures')) javaRestTestImplementation project(xpackModule('esql:qa:server')) yamlRestTestImplementation project(xpackModule('esql:qa:server')) + + javaRestTestImplementation('org.apache.arrow:arrow-vector:16.1.0') + javaRestTestImplementation('org.apache.arrow:arrow-format:16.1.0') + javaRestTestImplementation('org.apache.arrow:arrow-memory-core:16.1.0') + javaRestTestImplementation('org.checkerframework:checker-qual:3.42.0') + javaRestTestImplementation('com.google.flatbuffers:flatbuffers-java:23.5.26') + javaRestTestImplementation("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}") + javaRestTestImplementation("com.fasterxml.jackson.core:jackson-core:${versions.jackson}") + javaRestTestImplementation("com.fasterxml.jackson.core:jackson-databind:${versions.jackson}") + javaRestTestImplementation("org.slf4j:slf4j-api:${versions.slf4j}") + javaRestTestImplementation("org.slf4j:slf4j-nop:${versions.slf4j}") + javaRestTestImplementation('org.apache.arrow:arrow-memory-unsafe:16.1.0') + dependencies { clusterPlugins project(':plugins:mapper-size') clusterPlugins project(':plugins:mapper-murmur3') @@ -25,6 +38,7 @@ restResources { tasks.named('javaRestTest') { usesDefaultDistribution() maxParallelForks = 1 + jvmArgs('--add-opens=java.base/java.nio=ALL-UNNAMED') } tasks.named('yamlRestTest') { diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/ArrowFormatIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/ArrowFormatIT.java new file mode 100644 index 0000000000000..20d04977d21f3 --- /dev/null +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/ArrowFormatIT.java @@ -0,0 +1,242 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.qa.single_node; + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.apache.arrow.memory.RootAllocator; +import org.apache.arrow.vector.IntVector; +import org.apache.arrow.vector.VarBinaryVector; +import org.apache.arrow.vector.VarCharVector; +import org.apache.arrow.vector.VectorSchemaRoot; +import org.apache.arrow.vector.ipc.ArrowStreamReader; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.arrow.vector.util.VectorSchemaRootAppender; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.test.TestClustersThreadFilter; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.ClassRule; + +import java.io.IOException; +import java.io.InputStream; +import java.util.List; + +@ThreadLeakFilters(filters = TestClustersThreadFilter.class) +public class ArrowFormatIT extends ESRestTestCase { + + private static final RootAllocator ALLOCATOR = new RootAllocator(); + + @AfterClass + public static void afterClass() { + ALLOCATOR.close(); + } + + @ClassRule + public static ElasticsearchCluster cluster = Clusters.testCluster(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Before + @After + public void assertRequestBreakerEmpty() throws Exception { + EsqlSpecTestCase.assertRequestBreakerEmpty(); + } + + @Before + public void initIndex() throws IOException { + Request request = new Request("PUT", "/arrow-test"); + request.setJsonEntity(""" + { + "mappings": { + "properties": { + "value": { + "type": "integer" + }, + "description": { + "type": "keyword" + }, + "ip": { + "type": "ip" + }, + "v": { + "type": "version" + } + } + } + } + """); + assertEquals(200, client().performRequest(request).getStatusLine().getStatusCode()); + + request = new Request("POST", "/_bulk?index=arrow-test&refresh=true"); + // 4 documents with a null in the middle, leading to 3 ESQL pages and 3 Arrow batches + request.setJsonEntity(""" + {"index": {"_id": "1"}} + {"value": 1, "ip": "192.168.0.1", "v": "1.0.1", "description": "number one"} + {"index": {"_id": "2"}} + {"value": 2, "ip": "192.168.0.2", "v": "1.0.2", "description": "number two"} + {"index": {"_id": "3"}} + {"value": 3, "ip": "2001:db8::1:0:0:1"} + {"index": {"_id": "4"}} + {"value": 4, "ip": "::afff:4567:890a", "v": "1.0.4", "description": "number four"} + """); + assertEquals(200, client().performRequest(request).getStatusLine().getStatusCode()); + } + + private VectorSchemaRoot esql(String query) throws IOException { + Request request = new Request("POST", "/_query?format=arrow"); + request.setJsonEntity(query); + Response response = client().performRequest(request); + + assertEquals("application/vnd.apache.arrow.stream", response.getEntity().getContentType().getValue()); + return readArrow(response.getEntity().getContent()); + } + + public void testInteger() throws Exception { + try (VectorSchemaRoot root = esql(""" + { + "query": "FROM arrow-test | SORT value | LIMIT 100 | KEEP value" + }""")) { + List fields = root.getSchema().getFields(); + assertEquals(1, fields.size()); + + assertValues(root); + } + } + + public void testString() throws Exception { + try (VectorSchemaRoot root = esql(""" + { + "query": "FROM arrow-test | SORT value | LIMIT 100 | KEEP description" + }""")) { + List fields = root.getSchema().getFields(); + assertEquals(1, fields.size()); + + assertDescription(root); + } + } + + public void testIp() throws Exception { + try (VectorSchemaRoot root = esql(""" + { + "query": "FROM arrow-test | SORT value | LIMIT 100 | KEEP ip" + }""")) { + List fields = root.getSchema().getFields(); + assertEquals(1, fields.size()); + + assertIp(root); + } + } + + public void testVersion() throws Exception { + try (VectorSchemaRoot root = esql(""" + { + "query": "FROM arrow-test | SORT value | LIMIT 100 | KEEP v" + }""")) { + List fields = root.getSchema().getFields(); + assertEquals(1, fields.size()); + + assertVersion(root); + } + } + + public void testEverything() throws Exception { + try (VectorSchemaRoot root = esql(""" + { + "query": "FROM arrow-test | SORT value | LIMIT 100" + }""")) { + List fields = root.getSchema().getFields(); + assertEquals(4, fields.size()); + + assertDescription(root); + assertValues(root); + assertIp(root); + assertVersion(root); + } + } + + private VectorSchemaRoot readArrow(InputStream input) throws IOException { + try ( + ArrowStreamReader reader = new ArrowStreamReader(input, ALLOCATOR); + VectorSchemaRoot readerRoot = reader.getVectorSchemaRoot(); + ) { + VectorSchemaRoot root = VectorSchemaRoot.create(readerRoot.getSchema(), ALLOCATOR); + root.allocateNew(); + + while (reader.loadNextBatch()) { + VectorSchemaRootAppender.append(root, readerRoot); + } + + return root; + } + } + + private void assertValues(VectorSchemaRoot root) { + var valueVector = (IntVector) root.getVector("value"); + assertEquals(1, valueVector.get(0)); + assertEquals(2, valueVector.get(1)); + assertEquals(3, valueVector.get(2)); + assertEquals(4, valueVector.get(3)); + } + + private void assertDescription(VectorSchemaRoot root) { + var descVector = (VarCharVector) root.getVector("description"); + assertEquals("number one", descVector.getObject(0).toString()); + assertEquals("number two", descVector.getObject(1).toString()); + assertTrue(descVector.isNull(2)); + assertEquals("number four", descVector.getObject(3).toString()); + } + + private void assertIp(VectorSchemaRoot root) { + // Test data that has been transformed during output (ipV4 truncated to 32bits) + var ipVector = (VarBinaryVector) root.getVector("ip"); + assertArrayEquals(new byte[] { (byte) 192, (byte) 168, 0, 1 }, ipVector.getObject(0)); + assertArrayEquals(new byte[] { (byte) 192, (byte) 168, 0, 2 }, ipVector.getObject(1)); + assertArrayEquals( + new byte[] { 0x20, 0x01, 0x0d, (byte) 0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01 }, + ipVector.getObject(2) + ); + assertArrayEquals( + new byte[] { + 0x00, + 0x00, + 0x00, + 0x00, + 0x00, + 0x00, + 0x00, + 0x00, + 0x00, + 0x00, + (byte) 0xaf, + (byte) 0xff, + 0x45, + 0x67, + (byte) 0x89, + 0x0A }, + ipVector.getObject(3) + ); + } + + private void assertVersion(VectorSchemaRoot root) { + // Version is binary-encoded in ESQL vectors, turned into a string in arrow output + var versionVector = (VarCharVector) root.getVector("v"); + assertEquals("1.0.1", versionVector.getObject(0).toString()); + assertEquals("1.0.2", versionVector.getObject(1).toString()); + assertTrue(versionVector.isNull(2)); + assertEquals("1.0.4", versionVector.getObject(3).toString()); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResponseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResponseListener.java index 0ed77b624f5b0..3e3f65daeeec5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResponseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResponseListener.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; @@ -19,6 +20,8 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestRefCountedChunkedToXContentListener; import org.elasticsearch.xcontent.MediaType; +import org.elasticsearch.xpack.esql.arrow.ArrowFormat; +import org.elasticsearch.xpack.esql.arrow.ArrowResponse; import org.elasticsearch.xpack.esql.formatter.TextFormat; import org.elasticsearch.xpack.esql.plugin.EsqlMediaTypeParser; @@ -135,6 +138,13 @@ private RestResponse buildResponse(EsqlQueryResponse esqlResponse) throws IOExce ChunkedRestResponseBodyPart.fromTextChunks(format.contentType(restRequest), format.format(restRequest, esqlResponse)), releasable ); + } else if (mediaType == ArrowFormat.INSTANCE) { + ArrowResponse arrowResponse = new ArrowResponse( + // Map here to avoid cyclic dependencies between the arrow subproject and its parent + esqlResponse.columns().stream().map(c -> new ArrowResponse.Column(c.outputType(), c.name())).toList(), + esqlResponse.pages() + ); + restResponse = RestResponse.chunked(RestStatus.OK, arrowResponse, Releasables.wrap(arrowResponse, releasable)); } else { restResponse = RestResponse.chunked( RestStatus.OK, @@ -179,4 +189,5 @@ public ActionListener wrapWithLogging() { listener.onFailure(ex); }); } + } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlMediaTypeParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlMediaTypeParser.java index 9f522858358fc..915efe9302a92 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlMediaTypeParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlMediaTypeParser.java @@ -13,6 +13,7 @@ import org.elasticsearch.xcontent.ParsedMediaType; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; +import org.elasticsearch.xpack.esql.arrow.ArrowFormat; import org.elasticsearch.xpack.esql.formatter.TextFormat; import java.util.Arrays; @@ -23,7 +24,7 @@ public class EsqlMediaTypeParser { public static final MediaTypeRegistry MEDIA_TYPE_REGISTRY = new MediaTypeRegistry<>().register( XContentType.values() - ).register(TextFormat.values()); + ).register(TextFormat.values()).register(new MediaType[] { ArrowFormat.INSTANCE }); /* * Since we support {@link TextFormat} and diff --git a/x-pack/plugin/esql/src/main/plugin-metadata/plugin-security.policy b/x-pack/plugin/esql/src/main/plugin-metadata/plugin-security.policy index e69de29bb2d1d..22884437add88 100644 --- a/x-pack/plugin/esql/src/main/plugin-metadata/plugin-security.policy +++ b/x-pack/plugin/esql/src/main/plugin-metadata/plugin-security.policy @@ -0,0 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +grant codeBase "${codebase.arrow}" { + // Needed for AllocationManagerShim + permission java.lang.RuntimePermission "accessDeclaredMembers"; + permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; +}; From 89cd966b247148372aed73c3586b0754d6e176ca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20Fred=C3=A9n?= <109296772+jfreden@users.noreply.github.com> Date: Wed, 3 Jul 2024 11:04:53 +0200 Subject: [PATCH 145/216] Add bulk delete roles API (#110383) * Add bulk delete roles API --- docs/build.gradle | 14 ++ docs/changelog/110383.yaml | 5 + docs/reference/rest-api/security.asciidoc | 2 + .../security/bulk-delete-roles.asciidoc | 120 ++++++++++++++ .../api/security.bulk_delete_role.json | 43 +++++ .../core/security/action/ActionTypes.java | 5 +- .../action/role/BulkDeleteRolesRequest.java | 59 +++++++ .../role/BulkPutRoleRequestBuilder.java | 2 +- ...esResponse.java => BulkRolesResponse.java} | 8 +- .../xpack/security/operator/Constants.java | 1 + .../SecurityOnTrialLicenseRestTestCase.java | 14 ++ .../security/role/BulkDeleteRoleRestIT.java | 112 +++++++++++++ .../security/role/BulkPutRoleRestIT.java | 18 --- .../xpack/security/Security.java | 4 + .../role/TransportBulkDeleteRolesAction.java | 34 ++++ .../role/TransportBulkPutRolesAction.java | 7 +- .../authz/store/NativeRolesStore.java | 149 ++++++++++++++---- .../role/RestBulkDeleteRolesAction.java | 62 ++++++++ .../authz/store/NativeRolesStoreTests.java | 41 ++++- .../test/roles/60_bulk_roles.yml | 19 +-- 20 files changed, 647 insertions(+), 72 deletions(-) create mode 100644 docs/changelog/110383.yaml create mode 100644 docs/reference/rest-api/security/bulk-delete-roles.asciidoc create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/security.bulk_delete_role.json create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkDeleteRolesRequest.java rename x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/{BulkPutRolesResponse.java => BulkRolesResponse.java} (94%) create mode 100644 x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/BulkDeleteRoleRestIT.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportBulkDeleteRolesAction.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestBulkDeleteRolesAction.java diff --git a/docs/build.gradle b/docs/build.gradle index e5b8f8d8622ce..99453b840b0d2 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -1815,6 +1815,20 @@ setups['setup-snapshots'] = setups['setup-repository'] + ''' "run_as": [ "other_user" ], "metadata" : {"version": 1} } +''' + setups['user_role'] = ''' + - do: + security.put_role: + name: "my_user_role" + body: > + { + "description": "Grants user access to some indicies.", + "indices": [ + {"names": ["index1", "index2" ], "privileges": ["all"], "field_security" : {"grant" : [ "title", "body" ]}} + ], + "metadata" : {"version": 1} + } + ''' setups['jacknich_user'] = ''' - do: diff --git a/docs/changelog/110383.yaml b/docs/changelog/110383.yaml new file mode 100644 index 0000000000000..5e9bddd4bfcd2 --- /dev/null +++ b/docs/changelog/110383.yaml @@ -0,0 +1,5 @@ +pr: 110383 +summary: Add bulk delete roles API +area: Security +type: enhancement +issues: [] diff --git a/docs/reference/rest-api/security.asciidoc b/docs/reference/rest-api/security.asciidoc index 80734ca51b989..04cd838c45600 100644 --- a/docs/reference/rest-api/security.asciidoc +++ b/docs/reference/rest-api/security.asciidoc @@ -48,6 +48,7 @@ Use the following APIs to add, remove, update, and retrieve roles in the native * <> * <> * <> +* <> * <> [discrete] @@ -173,6 +174,7 @@ include::security/put-app-privileges.asciidoc[] include::security/create-role-mappings.asciidoc[] include::security/create-roles.asciidoc[] include::security/bulk-create-roles.asciidoc[] +include::security/bulk-delete-roles.asciidoc[] include::security/create-users.asciidoc[] include::security/create-service-token.asciidoc[] include::security/delegate-pki-authentication.asciidoc[] diff --git a/docs/reference/rest-api/security/bulk-delete-roles.asciidoc b/docs/reference/rest-api/security/bulk-delete-roles.asciidoc new file mode 100644 index 0000000000000..a782b5e37fcb9 --- /dev/null +++ b/docs/reference/rest-api/security/bulk-delete-roles.asciidoc @@ -0,0 +1,120 @@ +[role="xpack"] +[[security-api-bulk-delete-role]] +=== Bulk delete roles API +preview::[] +++++ +Bulk delete roles API +++++ + +Bulk deletes roles in the native realm. + +[[security-api-bulk-delete-role-request]] +==== {api-request-title} + +`DELETE /_security/role/` + +[[security-api-bulk-delete-role-prereqs]] +==== {api-prereq-title} + +* To use this API, you must have at least the `manage_security` cluster +privilege. + +[[security-api-bulk-delete-role-desc]] +==== {api-description-title} + +The role management APIs are generally the preferred way to manage roles, rather than using +<>. The bulk delete roles API cannot delete +roles that are defined in roles files. + +[[security-api-bulk-delete-role-path-params]] +==== {api-path-parms-title} + +`refresh`:: +Optional setting of the {ref}/docs-refresh.html[refresh policy] for the write request. Defaults to Immediate. + +[[security-api-bulk-delete-role-request-body]] +==== {api-request-body-title} + +The following parameters can be specified in the body of a DELETE request +and pertain to deleting a set of roles: + +`names`:: +(list) A list of role names to delete. + +[[security-bulk-api-delete-role-example]] +==== {api-examples-title} +The following example deletes a `my_admin_role` and `my_user_role` roles: + +[source,console] +-------------------------------------------------- +DELETE /_security/role +{ + "names": ["my_admin_role", "my_user_role"] +} +-------------------------------------------------- +// TEST[setup:admin_role,user_role] + +If the roles are successfully deleted, the request returns: + +[source,console-result] +-------------------------------------------------- +{ + "deleted": [ + "my_admin_role", + "my_user_role" + ] +} +-------------------------------------------------- + +If a role cannot be found, the not found roles are grouped under `not_found`: + +[source,console] +-------------------------------------------------- +DELETE /_security/role +{ + "names": ["my_admin_role", "not_an_existing_role"] +} +-------------------------------------------------- +// TEST[setup:admin_role] + +[source,console-result] +-------------------------------------------------- +{ + "deleted": [ + "my_admin_role" + ], + "not_found": [ + "not_an_existing_role" + ] +} +-------------------------------------------------- + +If a request fails or is invalid, the errors are grouped under `errors`: + +[source,console] +-------------------------------------------------- +DELETE /_security/role +{ + "names": ["my_admin_role", "superuser"] +} +-------------------------------------------------- +// TEST[setup:admin_role] + + +[source,console-result] +-------------------------------------------------- +{ + "deleted": [ + "my_admin_role" + ], + "errors": { + "count": 1, + "details": { + "superuser": { + "type": "illegal_argument_exception", + "reason": "role [superuser] is reserved and cannot be deleted" + } + } + } +} +-------------------------------------------------- diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.bulk_delete_role.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.bulk_delete_role.json new file mode 100644 index 0000000000000..8810602aa2c18 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.bulk_delete_role.json @@ -0,0 +1,43 @@ +{ + "security.bulk_delete_role": { + "documentation": { + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-bulk-delete-role.html", + "description": "Bulk delete roles in the native realm." + }, + "stability": "stable", + "visibility": "public", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_security/role", + "methods": [ + "DELETE" + ] + } + ] + }, + "params": { + "refresh": { + "type": "enum", + "options": [ + "true", + "false", + "wait_for" + ], + "description": "If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes." + } + }, + "body": { + "description": "The roles to delete", + "required": true + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/ActionTypes.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/ActionTypes.java index 5406ecb105d0e..52f8c7cf456d9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/ActionTypes.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/ActionTypes.java @@ -9,7 +9,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.xpack.core.security.action.role.BulkPutRolesResponse; +import org.elasticsearch.xpack.core.security.action.role.BulkRolesResponse; import org.elasticsearch.xpack.core.security.action.role.QueryRoleResponse; import org.elasticsearch.xpack.core.security.action.user.QueryUserResponse; @@ -25,6 +25,7 @@ public final class ActionTypes { ); public static final ActionType QUERY_USER_ACTION = new ActionType<>("cluster:admin/xpack/security/user/query"); + public static final ActionType BULK_PUT_ROLES = new ActionType<>("cluster:admin/xpack/security/role/bulk_put"); public static final ActionType QUERY_ROLE_ACTION = new ActionType<>("cluster:admin/xpack/security/role/query"); - public static final ActionType BULK_PUT_ROLES = new ActionType<>("cluster:admin/xpack/security/role/bulk_put"); + public static final ActionType BULK_DELETE_ROLES = new ActionType<>("cluster:admin/xpack/security/role/bulk_delete"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkDeleteRolesRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkDeleteRolesRequest.java new file mode 100644 index 0000000000000..d7009a683b0e9 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkDeleteRolesRequest.java @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.action.role; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.WriteRequest; + +import java.util.List; +import java.util.Objects; + +public class BulkDeleteRolesRequest extends ActionRequest { + + private List roleNames; + + public BulkDeleteRolesRequest(List roleNames) { + this.roleNames = roleNames; + } + + private WriteRequest.RefreshPolicy refreshPolicy = WriteRequest.RefreshPolicy.IMMEDIATE; + + @Override + public ActionRequestValidationException validate() { + // Handle validation where delete role is handled to produce partial success if validation fails + return null; + } + + public List getRoleNames() { + return roleNames; + } + + public BulkDeleteRolesRequest setRefreshPolicy(WriteRequest.RefreshPolicy refreshPolicy) { + this.refreshPolicy = refreshPolicy; + return this; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass() || super.equals(o)) return false; + + BulkDeleteRolesRequest that = (BulkDeleteRolesRequest) o; + return Objects.equals(roleNames, that.roleNames); + } + + @Override + public int hashCode() { + return Objects.hash(roleNames); + } + + public WriteRequest.RefreshPolicy getRefreshPolicy() { + return refreshPolicy; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRoleRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRoleRequestBuilder.java index c601bbdd79396..ba199e183d4af 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRoleRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRoleRequestBuilder.java @@ -27,7 +27,7 @@ /** * Builder for requests to bulk add a roles to the security index */ -public class BulkPutRoleRequestBuilder extends ActionRequestBuilder { +public class BulkPutRoleRequestBuilder extends ActionRequestBuilder { private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().allowDescription(true).build(); @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRolesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkRolesResponse.java similarity index 94% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRolesResponse.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkRolesResponse.java index 15870806f25fd..b74cc1fa15a4a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRolesResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkRolesResponse.java @@ -21,7 +21,7 @@ import java.util.Map; import java.util.stream.Collectors; -public class BulkPutRolesResponse extends ActionResponse implements ToXContentObject { +public class BulkRolesResponse extends ActionResponse implements ToXContentObject { private final List items; @@ -34,12 +34,12 @@ public Builder addItem(Item item) { return this; } - public BulkPutRolesResponse build() { - return new BulkPutRolesResponse(items); + public BulkRolesResponse build() { + return new BulkRolesResponse(items); } } - public BulkPutRolesResponse(List items) { + public BulkRolesResponse(List items) { this.items = items; } diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index a85be132ebca8..ffa4d1082c7e6 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -281,6 +281,7 @@ public class Constants { "cluster:admin/xpack/security/role/query", "cluster:admin/xpack/security/role/put", "cluster:admin/xpack/security/role/bulk_put", + "cluster:admin/xpack/security/role/bulk_delete", "cluster:admin/xpack/security/role_mapping/delete", "cluster:admin/xpack/security/role_mapping/get", "cluster:admin/xpack/security/role_mapping/put", diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java index d877ae63d0037..1abb9bbb067dc 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java @@ -187,4 +187,18 @@ protected void fetchRoleAndAssertEqualsExpected(final String roleName, final Rol ); assertThat(actual, equalTo(Map.of(expectedRoleDescriptor.getName(), expectedRoleDescriptor))); } + + protected Map upsertRoles(String roleDescriptorsByName) throws IOException { + Request request = rolesRequest(roleDescriptorsByName); + Response response = adminClient().performRequest(request); + assertOK(response); + return responseAsMap(response); + } + + protected Request rolesRequest(String roleDescriptorsByName) { + Request rolesRequest; + rolesRequest = new Request(HttpPost.METHOD_NAME, "/_security/role"); + rolesRequest.setJsonEntity(org.elasticsearch.core.Strings.format(roleDescriptorsByName)); + return rolesRequest; + } } diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/BulkDeleteRoleRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/BulkDeleteRoleRestIT.java new file mode 100644 index 0000000000000..c0d673694a0e7 --- /dev/null +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/BulkDeleteRoleRestIT.java @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.role; + +import org.apache.http.client.methods.HttpDelete; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.core.Strings; +import org.elasticsearch.xpack.security.SecurityOnTrialLicenseRestTestCase; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.not; + +public class BulkDeleteRoleRestIT extends SecurityOnTrialLicenseRestTestCase { + @SuppressWarnings("unchecked") + public void testDeleteValidExistingRoles() throws Exception { + Map responseMap = upsertRoles(""" + {"roles": {"test1": {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["all"]}]}, "test2": + {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["read"]}]}, "test3": + {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["write"]}]}}}"""); + assertThat(responseMap, not(hasKey("errors"))); + + List rolesToDelete = List.of("test1", "test3"); + Map response = deleteRoles(rolesToDelete); + List deleted = (List) response.get("deleted"); + assertThat(deleted, equalTo(rolesToDelete)); + + assertRolesDeleted(rolesToDelete); + assertRolesNotDeleted(List.of("test2")); + } + + @SuppressWarnings("unchecked") + public void testTryDeleteNonExistingRoles() throws Exception { + Map responseMap = upsertRoles(""" + {"roles": {"test1": {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["all"]}]}}}"""); + assertThat(responseMap, not(hasKey("errors"))); + + List rolesToDelete = List.of("test1", "test2", "test3"); + + Map response = deleteRoles(rolesToDelete); + List deleted = (List) response.get("deleted"); + + List notFound = (List) response.get("not_found"); + + assertThat(deleted, equalTo(List.of("test1"))); + assertThat(notFound, equalTo(List.of("test2", "test3"))); + + assertRolesDeleted(rolesToDelete); + } + + @SuppressWarnings("unchecked") + public void testTryDeleteReservedRoleName() throws Exception { + Map responseMap = upsertRoles(""" + {"roles": {"test1": {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["all"]}]}}}"""); + assertThat(responseMap, not(hasKey("errors"))); + + Map response = deleteRoles(List.of("superuser", "test1")); + + List deleted = (List) response.get("deleted"); + assertThat(deleted, equalTo(List.of("test1"))); + + Map errors = (Map) response.get("errors"); + assertThat((Integer) errors.get("count"), equalTo(1)); + Map errorDetails = (Map) ((Map) errors.get("details")).get("superuser"); + + assertThat( + errorDetails, + equalTo(Map.of("type", "illegal_argument_exception", "reason", "role [superuser] is reserved and cannot be deleted")) + ); + + assertRolesDeleted(List.of("test1")); + assertRolesNotDeleted(List.of("superuser")); + } + + protected Map deleteRoles(List roles) throws IOException { + Request request = new Request(HttpDelete.METHOD_NAME, "/_security/role"); + request.setJsonEntity(Strings.format(""" + {"names": [%s]}""", String.join(",", roles.stream().map(role -> "\"" + role + "\"").toList()))); + + Response response = adminClient().performRequest(request); + assertOK(response); + return responseAsMap(response); + } + + protected void assertRolesDeleted(List roleNames) { + for (String roleName : roleNames) { + ResponseException exception = assertThrows( + ResponseException.class, + () -> adminClient().performRequest(new Request("GET", "/_security/role/" + roleName)) + ); + assertThat(exception.getResponse().getStatusLine().getStatusCode(), equalTo(404)); + } + } + + protected void assertRolesNotDeleted(List roleNames) throws IOException { + for (String roleName : roleNames) { + Response response = adminClient().performRequest(new Request("GET", "/_security/role/" + roleName)); + assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); + } + } +} diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/BulkPutRoleRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/BulkPutRoleRestIT.java index 6e111c8f54552..0297abad7a508 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/BulkPutRoleRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/BulkPutRoleRestIT.java @@ -7,14 +7,11 @@ package org.elasticsearch.xpack.security.role; -import org.apache.http.client.methods.HttpPost; import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.security.SecurityOnTrialLicenseRestTestCase; -import java.io.IOException; import java.util.List; import java.util.Map; @@ -213,19 +210,4 @@ public void testBulkUpdates() throws Exception { assertEquals(3, items.size()); } } - - protected Map upsertRoles(String roleDescriptorsByName) throws IOException { - Request request = rolesRequest(roleDescriptorsByName); - Response response = adminClient().performRequest(request); - assertOK(response); - return responseAsMap(response); - } - - protected Request rolesRequest(String roleDescriptorsByName) { - Request rolesRequest; - rolesRequest = new Request(HttpPost.METHOD_NAME, "/_security/role"); - rolesRequest.setJsonEntity(org.elasticsearch.core.Strings.format(roleDescriptorsByName)); - return rolesRequest; - } - } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index d5099729c52b3..11c688e9ee5eb 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -254,6 +254,7 @@ import org.elasticsearch.xpack.security.action.profile.TransportSuggestProfilesAction; import org.elasticsearch.xpack.security.action.profile.TransportUpdateProfileDataAction; import org.elasticsearch.xpack.security.action.realm.TransportClearRealmCacheAction; +import org.elasticsearch.xpack.security.action.role.TransportBulkDeleteRolesAction; import org.elasticsearch.xpack.security.action.role.TransportBulkPutRolesAction; import org.elasticsearch.xpack.security.action.role.TransportClearRolesCacheAction; import org.elasticsearch.xpack.security.action.role.TransportDeleteRoleAction; @@ -373,6 +374,7 @@ import org.elasticsearch.xpack.security.rest.action.profile.RestSuggestProfilesAction; import org.elasticsearch.xpack.security.rest.action.profile.RestUpdateProfileDataAction; import org.elasticsearch.xpack.security.rest.action.realm.RestClearRealmCacheAction; +import org.elasticsearch.xpack.security.rest.action.role.RestBulkDeleteRolesAction; import org.elasticsearch.xpack.security.rest.action.role.RestBulkPutRolesAction; import org.elasticsearch.xpack.security.rest.action.role.RestClearRolesCacheAction; import org.elasticsearch.xpack.security.rest.action.role.RestDeleteRoleAction; @@ -1540,6 +1542,7 @@ public void onIndexModule(IndexModule module) { new ActionHandler<>(ActionTypes.QUERY_ROLE_ACTION, TransportQueryRoleAction.class), new ActionHandler<>(PutRoleAction.INSTANCE, TransportPutRoleAction.class), new ActionHandler<>(ActionTypes.BULK_PUT_ROLES, TransportBulkPutRolesAction.class), + new ActionHandler<>(ActionTypes.BULK_DELETE_ROLES, TransportBulkDeleteRolesAction.class), new ActionHandler<>(DeleteRoleAction.INSTANCE, TransportDeleteRoleAction.class), new ActionHandler<>(TransportChangePasswordAction.TYPE, TransportChangePasswordAction.class), new ActionHandler<>(AuthenticateAction.INSTANCE, TransportAuthenticateAction.class), @@ -1635,6 +1638,7 @@ public List getRestHandlers( new RestGetRolesAction(settings, getLicenseState()), new RestQueryRoleAction(settings, getLicenseState()), new RestBulkPutRolesAction(settings, getLicenseState(), bulkPutRoleRequestBuilderFactory.get()), + new RestBulkDeleteRolesAction(settings, getLicenseState()), new RestPutRoleAction(settings, getLicenseState(), putRoleRequestBuilderFactory.get()), new RestDeleteRoleAction(settings, getLicenseState()), new RestChangePasswordAction(settings, securityContext.get(), getLicenseState()), diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportBulkDeleteRolesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportBulkDeleteRolesAction.java new file mode 100644 index 0000000000000..1bd9e6e108e45 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportBulkDeleteRolesAction.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.security.action.role; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.security.action.ActionTypes; +import org.elasticsearch.xpack.core.security.action.role.BulkDeleteRolesRequest; +import org.elasticsearch.xpack.core.security.action.role.BulkRolesResponse; +import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; + +public class TransportBulkDeleteRolesAction extends TransportAction { + + private final NativeRolesStore rolesStore; + + @Inject + public TransportBulkDeleteRolesAction(ActionFilters actionFilters, NativeRolesStore rolesStore, TransportService transportService) { + super(ActionTypes.BULK_DELETE_ROLES.name(), actionFilters, transportService.getTaskManager()); + this.rolesStore = rolesStore; + } + + @Override + protected void doExecute(Task task, BulkDeleteRolesRequest request, ActionListener listener) { + rolesStore.deleteRoles(request.getRoleNames(), request.getRefreshPolicy(), listener); + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportBulkPutRolesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportBulkPutRolesAction.java index fca354d04c7c5..19972e90bdbbe 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportBulkPutRolesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportBulkPutRolesAction.java @@ -14,11 +14,10 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.ActionTypes; import org.elasticsearch.xpack.core.security.action.role.BulkPutRolesRequest; -import org.elasticsearch.xpack.core.security.action.role.BulkPutRolesResponse; +import org.elasticsearch.xpack.core.security.action.role.BulkRolesResponse; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; -public class TransportBulkPutRolesAction extends TransportAction { - +public class TransportBulkPutRolesAction extends TransportAction { private final NativeRolesStore rolesStore; @Inject @@ -28,7 +27,7 @@ public TransportBulkPutRolesAction(ActionFilters actionFilters, NativeRolesStore } @Override - protected void doExecute(Task task, final BulkPutRolesRequest request, final ActionListener listener) { + protected void doExecute(Task task, final BulkPutRolesRequest request, final ActionListener listener) { rolesStore.putRoles(request.getRefreshPolicy(), request.getRoles(), listener); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index 00714dd3b024f..adeada6cbf6cf 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -49,7 +49,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.ScrollHelper; -import org.elasticsearch.xpack.core.security.action.role.BulkPutRolesResponse; +import org.elasticsearch.xpack.core.security.action.role.BulkRolesResponse; import org.elasticsearch.xpack.core.security.action.role.ClearRolesCacheAction; import org.elasticsearch.xpack.core.security.action.role.ClearRolesCacheRequest; import org.elasticsearch.xpack.core.security.action.role.ClearRolesCacheResponse; @@ -310,7 +310,7 @@ public void deleteRole(final DeleteRoleRequest deleteRoleRequest, final ActionLi listener.onFailure(frozenSecurityIndex.getUnavailableReason(PRIMARY_SHARDS)); } else { securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> { - DeleteRequest request = client.prepareDelete(SECURITY_MAIN_ALIAS, getIdForRole(deleteRoleRequest.name())).request(); + DeleteRequest request = createRoleDeleteRequest(deleteRoleRequest.name()); request.setRefreshPolicy(deleteRoleRequest.getRefreshPolicy()); executeAsyncWithOrigin( client.threadPool().getThreadContext(), @@ -338,6 +338,114 @@ public void onFailure(Exception e) { } } + public void deleteRoles( + final List roleNames, + WriteRequest.RefreshPolicy refreshPolicy, + final ActionListener listener + ) { + if (enabled == false) { + listener.onFailure(new IllegalStateException("Native role management is disabled")); + return; + } + + BulkRequest bulkRequest = new BulkRequest().setRefreshPolicy(refreshPolicy); + Map validationErrorByRoleName = new HashMap<>(); + + for (String roleName : roleNames) { + if (reservedRoleNameChecker.isReserved(roleName)) { + validationErrorByRoleName.put( + roleName, + new IllegalArgumentException("role [" + roleName + "] is reserved and cannot be deleted") + ); + } else { + bulkRequest.add(createRoleDeleteRequest(roleName)); + } + } + + if (bulkRequest.numberOfActions() == 0) { + bulkResponseWithOnlyValidationErrors(roleNames, validationErrorByRoleName, listener); + return; + } + + final SecurityIndexManager frozenSecurityIndex = securityIndex.defensiveCopy(); + if (frozenSecurityIndex.indexExists() == false) { + logger.debug("security index does not exist"); + listener.onResponse(new BulkRolesResponse(List.of())); + } else if (frozenSecurityIndex.isAvailable(PRIMARY_SHARDS) == false) { + listener.onFailure(frozenSecurityIndex.getUnavailableReason(PRIMARY_SHARDS)); + } else { + securityIndex.checkIndexVersionThenExecute( + listener::onFailure, + () -> executeAsyncWithOrigin( + client.threadPool().getThreadContext(), + SECURITY_ORIGIN, + bulkRequest, + new ActionListener() { + @Override + public void onResponse(BulkResponse bulkResponse) { + bulkResponseAndRefreshRolesCache(roleNames, bulkResponse, validationErrorByRoleName, listener); + } + + @Override + public void onFailure(Exception e) { + logger.error(() -> "failed to delete roles", e); + listener.onFailure(e); + } + }, + client::bulk + ) + ); + } + } + + private void bulkResponseAndRefreshRolesCache( + List roleNames, + BulkResponse bulkResponse, + Map validationErrorByRoleName, + ActionListener listener + ) { + Iterator bulkItemResponses = bulkResponse.iterator(); + BulkRolesResponse.Builder bulkPutRolesResponseBuilder = new BulkRolesResponse.Builder(); + List rolesToRefreshInCache = new ArrayList<>(roleNames.size()); + roleNames.stream().map(roleName -> { + if (validationErrorByRoleName.containsKey(roleName)) { + return BulkRolesResponse.Item.failure(roleName, validationErrorByRoleName.get(roleName)); + } + BulkItemResponse resp = bulkItemResponses.next(); + if (resp.isFailed()) { + return BulkRolesResponse.Item.failure(roleName, resp.getFailure().getCause()); + } + if (UPDATE_ROLES_REFRESH_CACHE_RESULTS.contains(resp.getResponse().getResult())) { + rolesToRefreshInCache.add(roleName); + } + return BulkRolesResponse.Item.success(roleName, resp.getResponse().getResult()); + }).forEach(bulkPutRolesResponseBuilder::addItem); + + clearRoleCache(rolesToRefreshInCache.toArray(String[]::new), ActionListener.wrap(res -> { + listener.onResponse(bulkPutRolesResponseBuilder.build()); + }, listener::onFailure), bulkResponse); + } + + private void bulkResponseWithOnlyValidationErrors( + List roleNames, + Map validationErrorByRoleName, + ActionListener listener + ) { + BulkRolesResponse.Builder bulkRolesResponseBuilder = new BulkRolesResponse.Builder(); + roleNames.stream() + .map(roleName -> BulkRolesResponse.Item.failure(roleName, validationErrorByRoleName.get(roleName))) + .forEach(bulkRolesResponseBuilder::addItem); + + listener.onResponse(bulkRolesResponseBuilder.build()); + } + + private void executeAsyncRolesBulkRequest(BulkRequest bulkRequest, ActionListener listener) { + securityIndex.checkIndexVersionThenExecute( + listener::onFailure, + () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, bulkRequest, listener, client::bulk) + ); + } + private Exception validateRoleDescriptor(RoleDescriptor role) { ActionRequestValidationException validationException = null; validationException = RoleDescriptorRequestValidator.validate(role, validationException); @@ -423,7 +531,7 @@ public void onFailure(Exception e) { public void putRoles( final WriteRequest.RefreshPolicy refreshPolicy, final List roles, - final ActionListener listener + final ActionListener listener ) { if (enabled == false) { listener.onFailure(new IllegalStateException("Native role management is disabled")); @@ -454,14 +562,10 @@ public void putRoles( List roleNames = roles.stream().map(RoleDescriptor::getName).toList(); if (bulkRequest.numberOfActions() == 0) { - BulkPutRolesResponse.Builder bulkPutRolesResponseBuilder = new BulkPutRolesResponse.Builder(); - roleNames.stream() - .map(roleName -> BulkPutRolesResponse.Item.failure(roleName, validationErrorByRoleName.get(roleName))) - .forEach(bulkPutRolesResponseBuilder::addItem); - - listener.onResponse(bulkPutRolesResponseBuilder.build()); + bulkResponseWithOnlyValidationErrors(roleNames, validationErrorByRoleName, listener); return; } + securityIndex.prepareIndexIfNeededThenExecute( listener::onFailure, () -> executeAsyncWithOrigin( @@ -471,28 +575,7 @@ public void putRoles( new ActionListener() { @Override public void onResponse(BulkResponse bulkResponse) { - List rolesToRefreshInCache = new ArrayList<>(roleNames.size()); - - Iterator bulkItemResponses = bulkResponse.iterator(); - BulkPutRolesResponse.Builder bulkPutRolesResponseBuilder = new BulkPutRolesResponse.Builder(); - - roleNames.stream().map(roleName -> { - if (validationErrorByRoleName.containsKey(roleName)) { - return BulkPutRolesResponse.Item.failure(roleName, validationErrorByRoleName.get(roleName)); - } - BulkItemResponse resp = bulkItemResponses.next(); - if (resp.isFailed()) { - return BulkPutRolesResponse.Item.failure(roleName, resp.getFailure().getCause()); - } - if (UPDATE_ROLES_REFRESH_CACHE_RESULTS.contains(resp.getResponse().getResult())) { - rolesToRefreshInCache.add(roleName); - } - return BulkPutRolesResponse.Item.success(roleName, resp.getResponse().getResult()); - }).forEach(bulkPutRolesResponseBuilder::addItem); - - clearRoleCache(rolesToRefreshInCache.toArray(String[]::new), ActionListener.wrap(res -> { - listener.onResponse(bulkPutRolesResponseBuilder.build()); - }, listener::onFailure), bulkResponse); + bulkResponseAndRefreshRolesCache(roleNames, bulkResponse, validationErrorByRoleName, listener); } @Override @@ -520,6 +603,10 @@ private UpdateRequest createRoleUpsertRequest(final RoleDescriptor role) throws .request(); } + private DeleteRequest createRoleDeleteRequest(final String roleName) { + return client.prepareDelete(SECURITY_MAIN_ALIAS, getIdForRole(roleName)).request(); + } + private XContentBuilder createRoleXContentBuilder(RoleDescriptor role) throws IOException { assert NativeRealmValidationUtil.validateRoleName(role.getName(), false) == null : "Role name was invalid or reserved: " + role.getName(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestBulkDeleteRolesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestBulkDeleteRolesAction.java new file mode 100644 index 0000000000000..683faf5cfa914 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/role/RestBulkDeleteRolesAction.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.security.rest.action.role; + +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xpack.core.security.action.ActionTypes; +import org.elasticsearch.xpack.core.security.action.role.BulkDeleteRolesRequest; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.DELETE; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +/** + * Rest endpoint to bulk delete roles to the security index + */ +public class RestBulkDeleteRolesAction extends NativeRoleBaseRestHandler { + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "bulk_delete_roles_request", + a -> new BulkDeleteRolesRequest((List) a[0]) + ); + + static { + PARSER.declareStringArray(constructorArg(), new ParseField("names")); + } + + public RestBulkDeleteRolesAction(Settings settings, XPackLicenseState licenseState) { + super(settings, licenseState); + } + + @Override + public List routes() { + return List.of(Route.builder(DELETE, "/_security/role").build()); + } + + @Override + public String getName() { + return "security_bulk_delete_roles_action"; + } + + @Override + protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { + BulkDeleteRolesRequest bulkDeleteRolesRequest = PARSER.parse(request.contentParser(), null); + if (request.param("refresh") != null) { + bulkDeleteRolesRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.parse(request.param("refresh"))); + } + return channel -> client.execute(ActionTypes.BULK_DELETE_ROLES, bulkDeleteRolesRequest, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java index e22883d80cb8d..a4ee449438fe0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.delete.DeleteRequestBuilder; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.support.PlainActionFuture; @@ -57,7 +58,7 @@ import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.security.action.role.BulkPutRolesResponse; +import org.elasticsearch.xpack.core.security.action.role.BulkRolesResponse; import org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; @@ -123,6 +124,7 @@ public void beforeNativeRoleStoreTests() { when(client.threadPool()).thenReturn(threadPool); when(client.prepareIndex(SECURITY_MAIN_ALIAS)).thenReturn(new IndexRequestBuilder(client)); when(client.prepareUpdate(any(), any())).thenReturn(new UpdateRequestBuilder(client)); + when(client.prepareDelete(any(), any())).thenReturn(new DeleteRequestBuilder(client, SECURITY_MAIN_ALIAS)); } @After @@ -162,7 +164,7 @@ private void putRole(NativeRolesStore rolesStore, RoleDescriptor roleDescriptor, rolesStore.putRole(WriteRequest.RefreshPolicy.IMMEDIATE, roleDescriptor, actionListener); } else { rolesStore.putRoles(WriteRequest.RefreshPolicy.IMMEDIATE, List.of(roleDescriptor), ActionListener.wrap(resp -> { - BulkPutRolesResponse.Item item = resp.getItems().get(0); + BulkRolesResponse.Item item = resp.getItems().get(0); if (item.getResultType().equals("created")) { actionListener.onResponse(true); } else { @@ -765,13 +767,46 @@ public void testManyValidRoles() throws IOException { ) .toList(); - AtomicReference response = new AtomicReference<>(); + AtomicReference response = new AtomicReference<>(); AtomicReference exception = new AtomicReference<>(); rolesStore.putRoles(WriteRequest.RefreshPolicy.IMMEDIATE, roleDescriptors, ActionListener.wrap(response::set, exception::set)); assertNull(exception.get()); verify(client, times(1)).bulk(any(BulkRequest.class), any()); } + public void testBulkDeleteRoles() { + final NativeRolesStore rolesStore = createRoleStoreForTest(); + + AtomicReference response = new AtomicReference<>(); + AtomicReference exception = new AtomicReference<>(); + rolesStore.deleteRoles( + List.of("test-role-1", "test-role-2", "test-role-3"), + WriteRequest.RefreshPolicy.IMMEDIATE, + ActionListener.wrap(response::set, exception::set) + ); + assertNull(exception.get()); + verify(client, times(1)).bulk(any(BulkRequest.class), any()); + } + + public void testBulkDeleteReservedRole() { + final NativeRolesStore rolesStore = createRoleStoreForTest(); + + AtomicReference response = new AtomicReference<>(); + AtomicReference exception = new AtomicReference<>(); + rolesStore.deleteRoles( + List.of("superuser"), + WriteRequest.RefreshPolicy.IMMEDIATE, + ActionListener.wrap(response::set, exception::set) + ); + assertNull(exception.get()); + assertThat(response.get().getItems().size(), equalTo(1)); + BulkRolesResponse.Item item = response.get().getItems().get(0); + assertThat(item.getCause().getMessage(), equalTo("role [superuser] is reserved and cannot be deleted")); + assertThat(item.getRoleName(), equalTo("superuser")); + + verify(client, times(0)).bulk(any(BulkRequest.class), any()); + } + private ClusterService mockClusterServiceWithMinNodeVersion(TransportVersion transportVersion) { final ClusterService clusterService = mock(ClusterService.class, Mockito.RETURNS_DEEP_STUBS); when(clusterService.state().getMinTransportVersion()).thenReturn(transportVersion); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/60_bulk_roles.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/60_bulk_roles.yml index 72a240ab92695..e608e9e14972d 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/60_bulk_roles.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/60_bulk_roles.yml @@ -21,16 +21,8 @@ teardown: security.delete_user: username: "joe" ignore: 404 - - do: - security.delete_role: - name: "admin_role" - ignore: 404 - - do: - security.delete_role: - name: "role_with_description" - ignore: 404 --- -"Test bulk put roles api": +"Test bulk put and delete roles api": - do: security.bulk_put_role: body: > @@ -81,3 +73,12 @@ teardown: name: "role_with_description" - match: { role_with_description.cluster.0: "manage_security" } - match: { role_with_description.description: "Allows all security-related operations such as CRUD operations on users and roles and cache clearing." } + + - do: + security.bulk_delete_role: + body: > + { + "names": ["admin_role", "role_with_description"] + } + - match: { deleted.0: "admin_role" } + - match: { deleted.1: "role_with_description" } From fcaef5915e654b6f7780baac261676833d9c1442 Mon Sep 17 00:00:00 2001 From: Nick Tindall Date: Wed, 3 Jul 2024 19:15:46 +1000 Subject: [PATCH 146/216] Don't detect PlainActionFuture deadlock on concurrent complete (#110361) Closes #110360 Closes #110181 --- docs/changelog/110361.yaml | 7 ++++ .../action/support/PlainActionFuture.java | 6 +++- .../support/PlainActionFutureTests.java | 36 +++++++++++++++++++ 3 files changed, 48 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/110361.yaml diff --git a/docs/changelog/110361.yaml b/docs/changelog/110361.yaml new file mode 100644 index 0000000000000..8558c88e06049 --- /dev/null +++ b/docs/changelog/110361.yaml @@ -0,0 +1,7 @@ +pr: 110361 +summary: Don't detect `PlainActionFuture` deadlock on concurrent complete +area: Distributed +type: bug +issues: + - 110181 + - 110360 diff --git a/server/src/main/java/org/elasticsearch/action/support/PlainActionFuture.java b/server/src/main/java/org/elasticsearch/action/support/PlainActionFuture.java index c52c9ba1264db..06b5fa4ffd0e8 100644 --- a/server/src/main/java/org/elasticsearch/action/support/PlainActionFuture.java +++ b/server/src/main/java/org/elasticsearch/action/support/PlainActionFuture.java @@ -379,7 +379,11 @@ private boolean complete(@Nullable V v, @Nullable Exception e, int finalState) { } else if (getState() == COMPLETING) { // If some other thread is currently completing the future, block until // they are done so we can guarantee completion. - acquireShared(-1); + // Don't use acquire here, to prevent false-positive deadlock detection + // when multiple threads from the same pool are completing the future + while (isDone() == false) { + Thread.onSpinWait(); + } } return doCompletion; } diff --git a/server/src/test/java/org/elasticsearch/action/support/PlainActionFutureTests.java b/server/src/test/java/org/elasticsearch/action/support/PlainActionFutureTests.java index 2ca914eb23c61..aa9456eaaa2e9 100644 --- a/server/src/test/java/org/elasticsearch/action/support/PlainActionFutureTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/PlainActionFutureTests.java @@ -14,6 +14,8 @@ import org.elasticsearch.core.Assertions; import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RemoteTransportException; import java.util.concurrent.CancellationException; @@ -21,6 +23,7 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; public class PlainActionFutureTests extends ESTestCase { @@ -142,6 +145,39 @@ public void testCancelException() { assertPropagatesInterrupt(() -> future.actionGet(10, TimeUnit.SECONDS)); } + public void testAssertCompleteAllowedAllowsConcurrentCompletesFromSamePool() { + final AtomicReference> futureReference = new AtomicReference<>(new PlainActionFuture<>()); + final var executorName = randomFrom(ThreadPool.Names.GENERIC, ThreadPool.Names.MANAGEMENT); + final var running = new AtomicBoolean(true); + try (TestThreadPool threadPool = new TestThreadPool(getTestName())) { + // We only need 4 threads to reproduce this issue reliably, using more threads + // just increases the run time due to the additional synchronisation + final var threadCount = Math.min(threadPool.info(executorName).getMax(), 4); + final var startBarrier = new CyclicBarrier(threadCount + 1); + // N threads competing to complete the futures + for (int i = 0; i < threadCount; i++) { + threadPool.executor(executorName).execute(() -> { + safeAwait(startBarrier); + while (running.get()) { + futureReference.get().onResponse(null); + } + }); + } + // The race can only occur once per completion, so we provide + // a stream of new futures to the competing threads to + // maximise the probability it occurs. Providing them + // with new futures while they spin proved to be much + // more reliable at reproducing the issue than releasing + // them all from a barrier to complete a single future. + safeAwait(startBarrier); + for (int i = 0; i < 20; i++) { + futureReference.set(new PlainActionFuture<>()); + safeSleep(1); + } + running.set(false); + } + } + private static void assertCancellation(ThrowingRunnable runnable) { final var cancellationException = expectThrows(CancellationException.class, runnable); assertEquals("Task was cancelled.", cancellationException.getMessage()); From 40e9c2537414d9c9b6d7f503cacd20cde0b016ff Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Wed, 3 Jul 2024 11:24:28 +0200 Subject: [PATCH 147/216] Fix missing cases for ST_DISTANCE Lucene pushdown (#110391) * Fix missing cases for ST_DISTANCE Lucene pushdown The feature to pushdown ST_DISTANCE to Lucene was not working when combined with OR and NOT clauses, or more deeply nested. This fixes that by traversing the predicate tree recursively. * Update docs/changelog/110391.yaml * Fixed changelog --- docs/changelog/110391.yaml | 6 ++ .../src/main/resources/spatial.csv-spec | 54 +++++++++++ .../optimizer/LocalPhysicalPlanOptimizer.java | 63 ++++++------ .../optimizer/PhysicalPlanOptimizerTests.java | 95 +++++++++++++++++++ 4 files changed, 187 insertions(+), 31 deletions(-) create mode 100644 docs/changelog/110391.yaml diff --git a/docs/changelog/110391.yaml b/docs/changelog/110391.yaml new file mode 100644 index 0000000000000..1e00eda970398 --- /dev/null +++ b/docs/changelog/110391.yaml @@ -0,0 +1,6 @@ +pr: 110391 +summary: Fix ST_DISTANCE Lucene push-down for complex predicates +area: ES|QL +type: bug +issues: + - 110349 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index 018a22db1337a..02067e9dbe490 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -989,6 +989,60 @@ ARN | Arlanda | POINT(17.9307299016916 59.6511203397372) | SVG | Stavanger Sola | POINT (5.6298103297218 58.8821564842185) | Norway | Sandnes | POINT (5.7361 58.8517) | 548.26 | 541.35 ; +airportsWithComplexDistancePredicateFromCopenhagenTrainStation +required_capability: st_distance + +FROM airports +| WHERE (ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) <= 600000 + AND ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) >= 400000) + OR + (ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) < 300000 + AND ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) > 200000) +| EVAL distance = ROUND(ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)"))/1000,2) +| EVAL city_distance = ROUND(ST_DISTANCE(city_location, TO_GEOPOINT("POINT(12.565 55.673)"))/1000,2) +| KEEP abbrev, name, location, country, city, city_location, distance, city_distance +| SORT distance ASC +; + +abbrev:k | name:text | location:geo_point | country:k | city:k | city_location:geo_point | distance:d | city_distance:d +GOT | Gothenburg | POINT(12.2938269092573 57.6857493534879) | Sweden | Gothenburg | POINT(11.9675 57.7075) | 224.42 | 229.15 +HAM | Hamburg | POINT(10.005647830925 53.6320011640866) | Germany | Norderstedt | POINT(10.0103 53.7064) | 280.34 | 273.42 +GDN | Gdansk Lech Walesa | POINT(18.4684422165911 54.3807025352925) | Poland | Gdańsk | POINT(18.6453 54.3475) | 402.61 | 414.59 +NYO | Stockholm-Skavsta | POINT(16.9216055584254 58.7851041303448) | Sweden | Nyköping | POINT(17.0086 58.7531) | 433.99 | 434.43 +OSL | Oslo Gardermoen | POINT(11.0991032762581 60.1935783171386) | Norway | Oslo | POINT(10.7389 59.9133) | 510.03 | 483.71 +DRS | Dresden | POINT(13.7649671440047 51.1250912428871) | Germany | Dresden | POINT(13.74 51.05) | 511.9 | 519.91 +BMA | Bromma | POINT(17.9456175406145 59.3555902065112) | Sweden | Stockholm | POINT(18.0686 59.3294) | 520.18 | 522.54 +PLQ | Palanga Int'l | POINT(21.0974463986251 55.9713426235358) | Lithuania | Klaipėda | POINT(21.1667 55.75) | 533.67 | 538.56 +ARN | Arlanda | POINT(17.9307299016916 59.6511203397372) | Sweden | Stockholm | POINT(18.0686 59.3294) | 545.09 | 522.54 +SVG | Stavanger Sola | POINT (5.6298103297218 58.8821564842185) | Norway | Sandnes | POINT (5.7361 58.8517) | 548.26 | 541.35 +; + +airportsWithVeryComplexDistancePredicateFromCopenhagenTrainStation +required_capability: st_distance + +FROM airports +| WHERE ((ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) <= 600000 + AND ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) >= 400000 + AND NOT (ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) < 500000 + AND ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) > 430000)) + OR + (ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) < 300000 + AND ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) > 200000)) + AND NOT abbrev == "PLQ" + AND scalerank < 6 +| EVAL distance = ROUND(ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)"))/1000,2) +| EVAL city_distance = ROUND(ST_DISTANCE(city_location, TO_GEOPOINT("POINT(12.565 55.673)"))/1000,2) +| KEEP abbrev, scalerank, name, location, country, city, city_location, distance, city_distance +| SORT distance ASC +; + +abbrev:k | scalerank:i | name:text | location:geo_point | country:k | city:k | city_location:geo_point | distance:d | city_distance:d +HAM | 3 | Hamburg | POINT(10.005647830925 53.6320011640866) | Germany | Norderstedt | POINT(10.0103 53.7064) | 280.34 | 273.42 +OSL | 2 | Oslo Gardermoen | POINT(11.0991032762581 60.1935783171386) | Norway | Oslo | POINT(10.7389 59.9133) | 510.03 | 483.71 +BMA | 5 | Bromma | POINT(17.9456175406145 59.3555902065112) | Sweden | Stockholm | POINT(18.0686 59.3294) | 520.18 | 522.54 +ARN | 2 | Arlanda | POINT(17.9307299016916 59.6511203397372) | Sweden | Stockholm | POINT(18.0686 59.3294) | 545.09 | 522.54 +; + airportsWithinDistanceCopenhagenTrainStationCount required_capability: st_distance diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java index d88b46cbbc530..9447e018bc142 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java @@ -31,6 +31,7 @@ import org.elasticsearch.xpack.esql.core.expression.TypedAttribute; import org.elasticsearch.xpack.esql.core.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; @@ -599,46 +600,43 @@ public static class EnableSpatialDistancePushdown extends PhysicalOptimizerRules protected PhysicalPlan rule(FilterExec filterExec, LocalPhysicalOptimizerContext ctx) { PhysicalPlan plan = filterExec; if (filterExec.child() instanceof EsQueryExec) { - List rewritten = new ArrayList<>(); - List notRewritten = new ArrayList<>(); - for (Expression exp : splitAnd(filterExec.condition())) { - boolean didRewrite = false; - if (exp instanceof EsqlBinaryComparison comparison) { - ComparisonType comparisonType = ComparisonType.from(comparison.getFunctionType()); - if (comparison.left() instanceof StDistance dist && comparison.right().foldable()) { - didRewrite = rewriteComparison(rewritten, dist, comparison.right(), comparisonType); - } else if (comparison.right() instanceof StDistance dist && comparison.left().foldable()) { - didRewrite = rewriteComparison(rewritten, dist, comparison.left(), ComparisonType.invert(comparisonType)); - } - } - if (didRewrite == false) { - notRewritten.add(exp); + // Find and rewrite any binary comparisons that involve a distance function and a literal + var rewritten = filterExec.condition().transformDown(EsqlBinaryComparison.class, comparison -> { + ComparisonType comparisonType = ComparisonType.from(comparison.getFunctionType()); + if (comparison.left() instanceof StDistance dist && comparison.right().foldable()) { + return rewriteComparison(comparison, dist, comparison.right(), comparisonType); + } else if (comparison.right() instanceof StDistance dist && comparison.left().foldable()) { + return rewriteComparison(comparison, dist, comparison.left(), ComparisonType.invert(comparisonType)); } - } - if (rewritten.isEmpty() == false) { - rewritten.addAll(notRewritten); - plan = new FilterExec(filterExec.source(), filterExec.child(), Predicates.combineAnd(rewritten)); + return comparison; + }); + if (rewritten.equals(filterExec.condition()) == false) { + plan = new FilterExec(filterExec.source(), filterExec.child(), rewritten); } } return plan; } - private boolean rewriteComparison(List rewritten, StDistance dist, Expression literal, ComparisonType comparisonType) { + private Expression rewriteComparison( + EsqlBinaryComparison comparison, + StDistance dist, + Expression literal, + ComparisonType comparisonType + ) { Object value = literal.fold(); if (value instanceof Number number) { if (dist.right().foldable()) { - return rewriteDistanceFilter(rewritten, dist.source(), dist.left(), dist.right(), number, comparisonType); + return rewriteDistanceFilter(comparison, dist.left(), dist.right(), number, comparisonType); } else if (dist.left().foldable()) { - return rewriteDistanceFilter(rewritten, dist.source(), dist.right(), dist.left(), number, comparisonType); + return rewriteDistanceFilter(comparison, dist.right(), dist.left(), number, comparisonType); } } - return false; + return comparison; } - private boolean rewriteDistanceFilter( - List rewritten, - Source source, + private Expression rewriteDistanceFilter( + EsqlBinaryComparison comparison, Expression spatialExp, Expression literalExp, Number number, @@ -647,19 +645,22 @@ private boolean rewriteDistanceFilter( Geometry geometry = SpatialRelatesUtils.makeGeometryFromLiteral(literalExp); if (geometry instanceof Point point) { double distance = number.doubleValue(); + Source source = comparison.source(); if (comparisonType.lt) { distance = comparisonType.eq ? distance : Math.nextDown(distance); - rewritten.add(new SpatialIntersects(source, spatialExp, makeCircleLiteral(point, distance, literalExp))); + return new SpatialIntersects(source, spatialExp, makeCircleLiteral(point, distance, literalExp)); } else if (comparisonType.gt) { distance = comparisonType.eq ? distance : Math.nextUp(distance); - rewritten.add(new SpatialDisjoint(source, spatialExp, makeCircleLiteral(point, distance, literalExp))); + return new SpatialDisjoint(source, spatialExp, makeCircleLiteral(point, distance, literalExp)); } else if (comparisonType.eq) { - rewritten.add(new SpatialIntersects(source, spatialExp, makeCircleLiteral(point, distance, literalExp))); - rewritten.add(new SpatialDisjoint(source, spatialExp, makeCircleLiteral(point, Math.nextDown(distance), literalExp))); + return new And( + source, + new SpatialIntersects(source, spatialExp, makeCircleLiteral(point, distance, literalExp)), + new SpatialDisjoint(source, spatialExp, makeCircleLiteral(point, Math.nextDown(distance), literalExp)) + ); } - return true; } - return false; + return comparison; } private Literal makeCircleLiteral(Point point, double distance, Expression literalExpression) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 210c4d1be6225..96f401ba894a5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -45,6 +45,7 @@ import org.elasticsearch.xpack.esql.core.expression.function.FunctionRegistry; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.esql.core.index.EsIndex; import org.elasticsearch.xpack.esql.core.index.IndexResolution; @@ -3575,6 +3576,100 @@ AND ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) >= 400000 assertShapeQueryRange(shapeQueryBuilders, 400000.0, 600000.0); } + public void testPushSpatialDistanceDisjointBandsToSource() { + var query = """ + FROM airports + | WHERE (ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) <= 600000 + AND ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) >= 400000) + OR + (ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) <= 300000 + AND ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) >= 200000) + """; + var plan = this.physicalPlan(query, airports); + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var limit2 = as(fragment.fragment(), Limit.class); + var filter = as(limit2.child(), Filter.class); + var or = as(filter.condition(), Or.class); + assertThat("OR has two predicates", or.arguments().size(), equalTo(2)); + for (Expression expression : or.arguments()) { + var and = as(expression, And.class); + for (Expression exp : and.arguments()) { + var comp = as(exp, EsqlBinaryComparison.class); + var expectedComp = comp.equals(and.left()) ? LessThanOrEqual.class : GreaterThanOrEqual.class; + assertThat("filter contains expected binary comparison", comp, instanceOf(expectedComp)); + assertThat("filter contains ST_DISTANCE", comp.left(), instanceOf(StDistance.class)); + } + } + + var optimized = optimizedPlan(plan); + var topLimit = as(optimized, LimitExec.class); + exchange = as(topLimit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var fieldExtract = as(project.child(), FieldExtractExec.class); + var source = source(fieldExtract.child()); + var bool = as(source.query(), BoolQueryBuilder.class); + var disjuntiveQueryBuilders = bool.should().stream().filter(p -> p instanceof BoolQueryBuilder).toList(); + assertThat("Expected two disjunctive query builders", disjuntiveQueryBuilders.size(), equalTo(2)); + for (int i = 0; i < disjuntiveQueryBuilders.size(); i++) { + var subRangeBool = as(disjuntiveQueryBuilders.get(i), BoolQueryBuilder.class); + var shapeQueryBuilders = subRangeBool.must().stream().filter(p -> p instanceof SpatialRelatesQuery.ShapeQueryBuilder).toList(); + assertShapeQueryRange(shapeQueryBuilders, i == 0 ? 400000.0 : 200000.0, i == 0 ? 600000.0 : 300000.0); + } + } + + public void testPushSpatialDistanceComplexPredicateToSource() { + var query = """ + FROM airports + | WHERE ((ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) <= 600000 + AND ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) >= 400000 + AND NOT (ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) <= 500000 + AND ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) >= 430000)) + OR (ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) <= 300000 + AND ST_DISTANCE(location, TO_GEOPOINT("POINT(12.565 55.673)")) >= 200000)) + AND NOT abbrev == "PLQ" + AND scalerank < 6 + """; + var plan = this.physicalPlan(query, airports); + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var limit2 = as(fragment.fragment(), Limit.class); + var filter = as(limit2.child(), Filter.class); + var outerAnd = as(filter.condition(), And.class); + var outerLeft = as(outerAnd.left(), And.class); + as(outerLeft.right(), Not.class); + as(outerAnd.right(), LessThan.class); + var or = as(outerLeft.left(), Or.class); + var innerAnd1 = as(or.left(), And.class); + var innerAnd2 = as(or.right(), And.class); + for (Expression exp : innerAnd2.arguments()) { + var comp = as(exp, EsqlBinaryComparison.class); + var expectedComp = comp.equals(innerAnd2.left()) ? LessThanOrEqual.class : GreaterThanOrEqual.class; + assertThat("filter contains expected binary comparison", comp, instanceOf(expectedComp)); + assertThat("filter contains ST_DISTANCE", comp.left(), instanceOf(StDistance.class)); + } + + var optimized = optimizedPlan(plan); + var topLimit = as(optimized, LimitExec.class); + exchange = as(topLimit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var fieldExtract = as(project.child(), FieldExtractExec.class); + var source = source(fieldExtract.child()); + var bool = as(source.query(), BoolQueryBuilder.class); + assertThat("Expected boolean query of three MUST clauses", bool.must().size(), equalTo(2)); + assertThat("Expected boolean query of one FILTER clause", bool.filter().size(), equalTo(1)); + var boolDisjuntive = as(bool.filter().get(0), BoolQueryBuilder.class); + var disjuntiveQueryBuilders = boolDisjuntive.should().stream().filter(p -> p instanceof BoolQueryBuilder).toList(); + assertThat("Expected two disjunctive query builders", disjuntiveQueryBuilders.size(), equalTo(2)); + for (int i = 0; i < disjuntiveQueryBuilders.size(); i++) { + var subRangeBool = as(disjuntiveQueryBuilders.get(i), BoolQueryBuilder.class); + var shapeQueryBuilders = subRangeBool.must().stream().filter(p -> p instanceof SpatialRelatesQuery.ShapeQueryBuilder).toList(); + assertShapeQueryRange(shapeQueryBuilders, i == 0 ? 400000.0 : 200000.0, i == 0 ? 600000.0 : 300000.0); + } + } + private void assertShapeQueryRange(List shapeQueryBuilders, double min, double max) { assertThat("Expected two shape query builders", shapeQueryBuilders.size(), equalTo(2)); var relationStats = new HashMap(); From d83d2e81f8e2fd2eb0692ad5b13f518d693c4945 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Wed, 3 Jul 2024 10:52:55 +0100 Subject: [PATCH 148/216] Add tests for FileSettingsService.handleSnapshotRestore (#110376) Also streamline FileSettingsService tests a bit --- .../service/FileSettingsServiceTests.java | 163 +++++++++++------- 1 file changed, 99 insertions(+), 64 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java index cc5f0e22ad4ee..01c3e37a9ae77 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java @@ -14,11 +14,14 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NodeConnectionsService; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.ReservedStateMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RerouteService; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; @@ -38,8 +41,8 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; -import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -47,6 +50,8 @@ import java.util.function.Consumer; import static org.elasticsearch.node.Node.NODE_NAME_SETTING; +import static org.hamcrest.Matchers.anEmptyMap; +import static org.hamcrest.Matchers.hasEntry; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; @@ -57,9 +62,9 @@ public class FileSettingsServiceTests extends ESTestCase { private Environment env; private ClusterService clusterService; - private FileSettingsService fileSettingsService; private ReservedClusterStateService controller; private ThreadPool threadpool; + private FileSettingsService fileSettingsService; @Before public void setUp() throws Exception { @@ -67,20 +72,17 @@ public void setUp() throws Exception { threadpool = new TestThreadPool("file_settings_service_tests"); - clusterService = spy( - new ClusterService( - Settings.builder().put(NODE_NAME_SETTING.getKey(), "test").build(), - new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), - threadpool, - new TaskManager(Settings.EMPTY, threadpool, Set.of()) - ) + clusterService = new ClusterService( + Settings.builder().put(NODE_NAME_SETTING.getKey(), "test").build(), + new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), + threadpool, + new TaskManager(Settings.EMPTY, threadpool, Set.of()) ); - final DiscoveryNode localNode = DiscoveryNodeUtils.create("node"); - final ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) + DiscoveryNode localNode = DiscoveryNodeUtils.create("node"); + ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) .nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId()).masterNodeId(localNode.getId())) .build(); - doAnswer((Answer) invocation -> clusterState).when(clusterService).state(); clusterService.setNodeConnectionsService(mock(NodeConnectionsService.class)); clusterService.getClusterApplierService().setInitialState(clusterState); @@ -99,16 +101,25 @@ public void setUp() throws Exception { ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); - controller = new ReservedClusterStateService( - clusterService, - mock(RerouteService.class), - List.of(new ReservedClusterSettingsAction(clusterSettings)) + controller = spy( + new ReservedClusterStateService( + clusterService, + mock(RerouteService.class), + List.of(new ReservedClusterSettingsAction(clusterSettings)) + ) ); fileSettingsService = spy(new FileSettingsService(clusterService, controller, env)); } @After public void tearDown() throws Exception { + if (fileSettingsService.lifecycleState() == Lifecycle.State.STARTED) { + fileSettingsService.stop(); + } + if (fileSettingsService.lifecycleState() == Lifecycle.State.STOPPED) { + fileSettingsService.close(); + } + super.tearDown(); clusterService.close(); threadpool.shutdownNow(); @@ -121,7 +132,6 @@ public void testStartStop() { assertTrue(fileSettingsService.watching()); fileSettingsService.stop(); assertFalse(fileSettingsService.watching()); - fileSettingsService.close(); } public void testOperatorDirName() { @@ -136,85 +146,66 @@ public void testOperatorDirName() { @SuppressWarnings("unchecked") public void testInitialFileError() throws Exception { - ReservedClusterStateService stateService = mock(ReservedClusterStateService.class); - doAnswer((Answer) invocation -> { ((Consumer) invocation.getArgument(2)).accept(new IllegalStateException("Some exception")); return null; - }).when(stateService).process(any(), any(XContentParser.class), any()); + }).when(controller).process(any(), any(XContentParser.class), any()); AtomicBoolean settingsChanged = new AtomicBoolean(false); CountDownLatch latch = new CountDownLatch(1); - final FileSettingsService service = spy(new FileSettingsService(clusterService, stateService, env)); - - service.addFileChangedListener(() -> settingsChanged.set(true)); + fileSettingsService.addFileChangedListener(() -> settingsChanged.set(true)); - doAnswer((Answer) invocation -> { + doAnswer((Answer) invocation -> { try { - invocation.callRealMethod(); + return invocation.callRealMethod(); } finally { latch.countDown(); } - return null; - }).when(service).processFileChanges(); + }).when(fileSettingsService).processFileChanges(); - Files.createDirectories(service.watchedFileDir()); + Files.createDirectories(fileSettingsService.watchedFileDir()); // contents of the JSON don't matter, we just need a file to exist - writeTestFile(service.watchedFile(), "{}"); + writeTestFile(fileSettingsService.watchedFile(), "{}"); - service.start(); - service.clusterChanged(new ClusterChangedEvent("test", clusterService.state(), ClusterState.EMPTY_STATE)); + fileSettingsService.start(); + fileSettingsService.clusterChanged(new ClusterChangedEvent("test", clusterService.state(), ClusterState.EMPTY_STATE)); // wait until the watcher thread has started, and it has discovered the file assertTrue(latch.await(20, TimeUnit.SECONDS)); - verify(service, times(1)).processFileChanges(); + verify(fileSettingsService, times(1)).processFileChanges(); // assert we never notified any listeners of successful application of file based settings assertFalse(settingsChanged.get()); - - service.stop(); - service.close(); } @SuppressWarnings("unchecked") public void testInitialFileWorks() throws Exception { - ReservedClusterStateService stateService = mock(ReservedClusterStateService.class); - // Let's check that if we didn't throw an error that everything works doAnswer((Answer) invocation -> { ((Consumer) invocation.getArgument(2)).accept(null); return null; - }).when(stateService).process(any(), any(XContentParser.class), any()); + }).when(controller).process(any(), any(XContentParser.class), any()); CountDownLatch latch = new CountDownLatch(1); - final FileSettingsService service = spy(new FileSettingsService(clusterService, stateService, env)); - - service.addFileChangedListener(latch::countDown); + fileSettingsService.addFileChangedListener(latch::countDown); - Files.createDirectories(service.watchedFileDir()); + Files.createDirectories(fileSettingsService.watchedFileDir()); // contents of the JSON don't matter, we just need a file to exist - writeTestFile(service.watchedFile(), "{}"); + writeTestFile(fileSettingsService.watchedFile(), "{}"); - service.start(); - service.clusterChanged(new ClusterChangedEvent("test", clusterService.state(), ClusterState.EMPTY_STATE)); + fileSettingsService.start(); + fileSettingsService.clusterChanged(new ClusterChangedEvent("test", clusterService.state(), ClusterState.EMPTY_STATE)); // wait for listener to be called assertTrue(latch.await(20, TimeUnit.SECONDS)); - verify(service, times(1)).processFileChanges(); - - service.stop(); - service.close(); + verify(fileSettingsService, times(1)).processFileChanges(); } @SuppressWarnings("unchecked") public void testStopWorksInMiddleOfProcessing() throws Exception { - var spiedController = spy(controller); - var fsService = new FileSettingsService(clusterService, spiedController, env); - FileSettingsService service = spy(fsService); - CountDownLatch processFileLatch = new CountDownLatch(1); CountDownLatch deadThreadLatch = new CountDownLatch(1); @@ -229,36 +220,80 @@ public void testStopWorksInMiddleOfProcessing() throws Exception { throw new RuntimeException(e); } }).start(); - return new ReservedStateChunk(Collections.emptyMap(), new ReservedStateVersion(1L, Version.CURRENT)); - }).when(spiedController).parse(any(String.class), any()); + return new ReservedStateChunk(Map.of(), new ReservedStateVersion(1L, Version.CURRENT)); + }).when(controller).parse(any(String.class), any()); doAnswer((Answer) invocation -> { var completionListener = invocation.getArgument(1, ActionListener.class); completionListener.onResponse(null); return null; - }).when(spiedController).initEmpty(any(String.class), any()); + }).when(controller).initEmpty(any(String.class), any()); - service.start(); - service.clusterChanged(new ClusterChangedEvent("test", clusterService.state(), ClusterState.EMPTY_STATE)); - assertTrue(service.watching()); + fileSettingsService.start(); + fileSettingsService.clusterChanged(new ClusterChangedEvent("test", clusterService.state(), ClusterState.EMPTY_STATE)); + assertTrue(fileSettingsService.watching()); - Files.createDirectories(service.watchedFileDir()); + Files.createDirectories(fileSettingsService.watchedFileDir()); // Make some fake settings file to cause the file settings service to process it - writeTestFile(service.watchedFile(), "{}"); + writeTestFile(fileSettingsService.watchedFile(), "{}"); // we need to wait a bit, on MacOS it may take up to 10 seconds for the Java watcher service to notice the file, // on Linux is instantaneous. Windows is instantaneous too. assertTrue(processFileLatch.await(30, TimeUnit.SECONDS)); // Stopping the service should interrupt the watcher thread, we should be able to stop - service.stop(); - assertFalse(service.watching()); - service.close(); + fileSettingsService.stop(); + assertFalse(fileSettingsService.watching()); + fileSettingsService.close(); // let the deadlocked thread end, so we can cleanly exit the test deadThreadLatch.countDown(); } + public void testHandleSnapshotRestoreClearsMetadata() throws Exception { + ClusterState state = ClusterState.builder(clusterService.state()) + .metadata( + Metadata.builder(clusterService.state().metadata()) + .put(new ReservedStateMetadata(FileSettingsService.NAMESPACE, 1L, Map.of(), null)) + .build() + ) + .build(); + + Metadata.Builder metadata = Metadata.builder(state.metadata()); + fileSettingsService.handleSnapshotRestore(state, metadata); + + assertThat(metadata.build().reservedStateMetadata(), anEmptyMap()); + } + + public void testHandleSnapshotRestoreResetsMetadata() throws Exception { + fileSettingsService.start(); + fileSettingsService.clusterChanged(new ClusterChangedEvent("test", clusterService.state(), ClusterState.EMPTY_STATE)); + + Files.createDirectories(fileSettingsService.watchedFileDir()); + // contents of the JSON don't matter, we just need a file to exist + writeTestFile(fileSettingsService.watchedFile(), "{}"); + assertTrue(fileSettingsService.watching()); + + ClusterState state = ClusterState.builder(clusterService.state()) + .metadata( + Metadata.builder(clusterService.state().metadata()) + .put(new ReservedStateMetadata(FileSettingsService.NAMESPACE, 1L, Map.of(), null)) + .build() + ) + .build(); + + Metadata.Builder metadata = Metadata.builder(); + fileSettingsService.handleSnapshotRestore(state, metadata); + + assertThat( + metadata.build().reservedStateMetadata(), + hasEntry( + FileSettingsService.NAMESPACE, + new ReservedStateMetadata(FileSettingsService.NAMESPACE, ReservedStateMetadata.RESTORED_VERSION, Map.of(), null) + ) + ); + } + // helpers private void writeTestFile(Path path, String contents) throws IOException { Path tempFilePath = createTempFile(); From b9394f737946ab8059576f2b4b7c9e6809d3e266 Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Wed, 3 Jul 2024 13:23:58 +0300 Subject: [PATCH 149/216] Adding trace logging for SearchProgressActionListenerIT (#110378) --- .../action/search/SearchProgressActionListenerIT.java | 5 +++++ .../java/org/elasticsearch/action/search/SearchPhase.java | 1 + .../elasticsearch/action/search/SearchTransportService.java | 5 +++++ .../main/java/org/elasticsearch/search/SearchService.java | 2 ++ 4 files changed, 13 insertions(+) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/SearchProgressActionListenerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/SearchProgressActionListenerIT.java index 227a3b8612331..e5dca62a97494 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/SearchProgressActionListenerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/SearchProgressActionListenerIT.java @@ -23,6 +23,7 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.junit.annotations.TestIssueLogging; import java.util.ArrayList; import java.util.Arrays; @@ -38,6 +39,10 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.lessThan; +@TestIssueLogging( + issueUrl = "https://github.com/elastic/elasticsearch/issues/109830", + value = "org.elasticsearch.action.search:TRACE," + "org.elasticsearch.search.SearchService:TRACE" +) public class SearchProgressActionListenerIT extends ESSingleNodeTestCase { private List shards; diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java index 5ed449667fe57..7ad81154691c0 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java @@ -84,6 +84,7 @@ protected void releaseIrrelevantSearchContext(SearchPhaseResult searchPhaseResul && context.getRequest().scroll() == null && (context.isPartOfPointInTime(phaseResult.getContextId()) == false)) { try { + context.getLogger().trace("trying to release search context [{}]", phaseResult.getContextId()); SearchShardTarget shardTarget = phaseResult.getSearchShardTarget(); Transport.Connection connection = context.getConnection(shardTarget.getClusterAlias(), shardTarget.getNodeId()); context.sendReleaseSearchContext( diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java index 399a4ad526537..9713d804ddc13 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java @@ -8,6 +8,8 @@ package org.elasticsearch.action.search; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.IndicesRequest; @@ -108,6 +110,8 @@ public class SearchTransportService { */ public static final String QUERY_CAN_MATCH_NODE_NAME = "indices:data/read/search[can_match][n]"; + private static final Logger logger = LogManager.getLogger(SearchTransportService.class); + private final TransportService transportService; private final NodeClient client; private final BiFunction< @@ -442,6 +446,7 @@ public static void registerRequestHandler( SearchTransportAPMMetrics searchTransportMetrics ) { final TransportRequestHandler freeContextHandler = (request, channel, task) -> { + logger.trace("releasing search context [{}]", request.id()); boolean freed = searchService.freeReaderContext(request.id()); channel.sendResponse(new SearchFreeContextResponse(freed)); }; diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index b45a2e2e2ca14..0c9d5ee51a9f0 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -475,6 +475,7 @@ protected void putReaderContext(ReaderContext context) { } protected ReaderContext removeReaderContext(long id) { + logger.trace("removing reader context [{}]", id); return activeReaders.remove(id); } @@ -1175,6 +1176,7 @@ private void freeAllContextsForShard(ShardId shardId) { } public boolean freeReaderContext(ShardSearchContextId contextId) { + logger.trace("freeing reader context [{}]", contextId); if (sessionId.equals(contextId.getSessionId())) { try (ReaderContext context = removeReaderContext(contextId.getId())) { return context != null; From a7917a9395d67ebc3445f69de2f0c14867477dd7 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Wed, 3 Jul 2024 12:28:54 +0200 Subject: [PATCH 150/216] Include ent search yaml rest specs in rest resources zip (#110411) This should allow supporting ent-search api in Elasticsearch specification validation --- x-pack/plugin/ent-search/qa/rest/build.gradle | 4 ++++ x-pack/rest-resources-zip/build.gradle | 1 + 2 files changed, 5 insertions(+) diff --git a/x-pack/plugin/ent-search/qa/rest/build.gradle b/x-pack/plugin/ent-search/qa/rest/build.gradle index e47bcf82f0f8c..5b04a326f142c 100644 --- a/x-pack/plugin/ent-search/qa/rest/build.gradle +++ b/x-pack/plugin/ent-search/qa/rest/build.gradle @@ -33,3 +33,7 @@ testClusters.configureEach { user username: 'entsearch-user', password: 'entsearch-user-password', role: 'user' user username: 'entsearch-unprivileged', password: 'entsearch-unprivileged-password', role: 'unprivileged' } + +artifacts { + restXpackTests(new File(projectDir, "src/yamlRestTest/resources/rest-api-spec/test")) +} diff --git a/x-pack/rest-resources-zip/build.gradle b/x-pack/rest-resources-zip/build.gradle index 2ac8bd65ddc36..3d0533b4ec57e 100644 --- a/x-pack/rest-resources-zip/build.gradle +++ b/x-pack/rest-resources-zip/build.gradle @@ -25,6 +25,7 @@ dependencies { freeCompatTests project(path: ':rest-api-spec', configuration: 'restCompatTests') platinumTests project(path: ':x-pack:plugin', configuration: 'restXpackTests') platinumTests project(path: ':x-pack:plugin:eql:qa:rest', configuration: 'restXpackTests') + platinumTests project(path: ':x-pack:plugin:ent-search:qa:rest', configuration: 'restXpackTests') platinumCompatTests project(path: ':x-pack:plugin', configuration: 'restCompatTests') platinumCompatTests project(path: ':x-pack:plugin:eql:qa:rest', configuration: 'restCompatTests') } From c3c8b6ddc779e3ff9a09f398b31b00f838428b46 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20Fred=C3=A9n?= <109296772+jfreden@users.noreply.github.com> Date: Wed, 3 Jul 2024 13:05:35 +0200 Subject: [PATCH 151/216] AwaitsFix: https://github.com/elastic/elasticsearch/issues/110416 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index e58a553f6fa8d..c328701a6adf7 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -116,6 +116,9 @@ tests: - class: "org.elasticsearch.xpack.searchablesnapshots.FrozenSearchableSnapshotsIntegTests" issue: "https://github.com/elastic/elasticsearch/issues/110408" method: "testCreateAndRestorePartialSearchableSnapshot" +- class: "org.elasticsearch.xpack.security.role.RoleWithDescriptionRestIT" + issue: "https://github.com/elastic/elasticsearch/issues/110416" + method: "testCreateOrUpdateRoleWithDescription" # Examples: # From 2d148e2215c81ead29f836b7e838846c8106914e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20Fred=C3=A9n?= <109296772+jfreden@users.noreply.github.com> Date: Wed, 3 Jul 2024 13:06:05 +0200 Subject: [PATCH 152/216] AwaitsFix: https://github.com/elastic/elasticsearch/issues/110417 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index c328701a6adf7..bf4640fff53c8 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -119,6 +119,9 @@ tests: - class: "org.elasticsearch.xpack.security.role.RoleWithDescriptionRestIT" issue: "https://github.com/elastic/elasticsearch/issues/110416" method: "testCreateOrUpdateRoleWithDescription" +- class: "org.elasticsearch.xpack.security.role.RoleWithDescriptionRestIT" + issue: "https://github.com/elastic/elasticsearch/issues/110417" + method: "testCreateOrUpdateRoleWithDescription" # Examples: # From 406b969c62a739b01caca0c6d6a505e01fde0617 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Wed, 3 Jul 2024 14:03:12 +0200 Subject: [PATCH 153/216] [Inference API] Add Google Vertex AI reranking docs (#110390) --- .../service-google-vertex-ai.asciidoc | 26 ++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/docs/reference/inference/service-google-vertex-ai.asciidoc b/docs/reference/inference/service-google-vertex-ai.asciidoc index 1e7e2b185a296..640553ab74626 100644 --- a/docs/reference/inference/service-google-vertex-ai.asciidoc +++ b/docs/reference/inference/service-google-vertex-ai.asciidoc @@ -25,6 +25,7 @@ include::inference-shared.asciidoc[tag=task-type] -- Available task types: +* `rerank` * `text_embedding`. -- @@ -79,12 +80,19 @@ More information about the rate limits for Google Vertex AI can be found in the (Optional, object) include::inference-shared.asciidoc[tag=task-settings] + +.`task_settings` for the `rerank` task type +[%collapsible%closed] +===== +`top_n`::: +(optional, boolean) +Specifies the number of the top n documents, which should be returned. +===== ++ .`task_settings` for the `text_embedding` task type [%collapsible%closed] ===== `auto_truncate`::: (optional, boolean) -For `googlevertexai` service only. Specifies if the API truncates inputs longer than the maximum token length automatically. ===== @@ -109,3 +117,19 @@ PUT _inference/text_embedding/google_vertex_ai_embeddings } ------------------------------------------------------------ // TEST[skip:TBD] + +The next example shows how to create an {infer} endpoint called +`google_vertex_ai_rerank` to perform a `rerank` task type. + +[source,console] +------------------------------------------------------------ +PUT _inference/rerank/google_vertex_ai_rerank +{ + "service": "googlevertexai", + "service_settings": { + "service_account_json": "", + "project_id": "" + } +} +------------------------------------------------------------ +// TEST[skip:TBD] From 822b187af48f9a5560ad365743998315038dad85 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 3 Jul 2024 08:20:06 -0400 Subject: [PATCH 154/216] ESQL: Drop unused Node subclasses (#110419) And merge NodeSubclassTests into EsqlNodeSubclassTests. --- .../core/expression/function/Functions.java | 24 - .../esql/core/plan/logical/Aggregate.java | 79 -- .../esql/core/plan/logical/EsRelation.java | 113 --- .../xpack/esql/core/plan/logical/Project.java | 83 -- .../core/plan/logical/UnresolvedRelation.java | 108 --- .../xpack/esql/core/tree/Node.java | 2 +- .../esql/core/tree/NodeSubclassTests.java | 708 +----------------- .../xpack/esql/core/TestUtils.java | 6 - x-pack/plugin/esql/build.gradle | 4 - .../resources/forbidden/ql-signatures.txt | 5 - .../esql/tree/EsqlNodeSubclassTests.java | 698 ++++++++++++++++- 11 files changed, 672 insertions(+), 1158 deletions(-) delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/Functions.java delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/Aggregate.java delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/EsRelation.java delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/Project.java delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/UnresolvedRelation.java delete mode 100644 x-pack/plugin/esql/src/main/resources/forbidden/ql-signatures.txt diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/Functions.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/Functions.java deleted file mode 100644 index 46f9d8399503d..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/Functions.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.function; - -import org.elasticsearch.xpack.esql.core.expression.Expression; - -/** - * @deprecated for removal - */ -@Deprecated -public abstract class Functions { - - /** - * @deprecated for removal - */ - @Deprecated - public static boolean isAggregate(Expression e) { - throw new IllegalStateException("Should never reach this code"); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/Aggregate.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/Aggregate.java deleted file mode 100644 index 3fcfd61e21b45..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/Aggregate.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.plan.logical; - -import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; -import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.Expressions; -import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -import java.util.List; -import java.util.Objects; - -public class Aggregate extends UnaryPlan { - - private final List groupings; - private final List aggregates; - - public Aggregate(Source source, LogicalPlan child, List groupings, List aggregates) { - super(source, child); - this.groupings = groupings; - this.aggregates = aggregates; - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, Aggregate::new, child(), groupings, aggregates); - } - - @Override - public Aggregate replaceChild(LogicalPlan newChild) { - return new Aggregate(source(), newChild, groupings, aggregates); - } - - public List groupings() { - return groupings; - } - - public List aggregates() { - return aggregates; - } - - @Override - public boolean expressionsResolved() { - return Resolvables.resolved(groupings) && Resolvables.resolved(aggregates); - } - - @Override - public List output() { - return Expressions.asAttributes(aggregates); - } - - @Override - public int hashCode() { - return Objects.hash(groupings, aggregates, child()); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - Aggregate other = (Aggregate) obj; - return Objects.equals(groupings, other.groupings) - && Objects.equals(aggregates, other.aggregates) - && Objects.equals(child(), other.child()); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/EsRelation.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/EsRelation.java deleted file mode 100644 index 2998988837253..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/EsRelation.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.plan.logical; - -import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; -import org.elasticsearch.xpack.esql.core.index.EsIndex; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.NodeUtils; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.EsField; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Objects; - -public class EsRelation extends LeafPlan { - - private final EsIndex index; - private final List attrs; - private final boolean frozen; - - public EsRelation(Source source, EsIndex index, boolean frozen) { - this(source, index, flatten(source, index.mapping()), frozen); - } - - public EsRelation(Source source, EsIndex index, List attributes) { - this(source, index, attributes, false); - } - - public EsRelation(Source source, EsIndex index, List attributes, boolean frozen) { - super(source); - this.index = index; - this.attrs = attributes; - this.frozen = frozen; - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, EsRelation::new, index, attrs, frozen); - } - - private static List flatten(Source source, Map mapping) { - return flatten(source, mapping, null); - } - - private static List flatten(Source source, Map mapping, FieldAttribute parent) { - List list = new ArrayList<>(); - - for (Entry entry : mapping.entrySet()) { - String name = entry.getKey(); - EsField t = entry.getValue(); - - if (t != null) { - FieldAttribute f = new FieldAttribute(source, parent, parent != null ? parent.name() + "." + name : name, t); - list.add(f); - // object or nested - if (t.getProperties().isEmpty() == false) { - list.addAll(flatten(source, t.getProperties(), f)); - } - } - } - return list; - } - - public EsIndex index() { - return index; - } - - public boolean frozen() { - return frozen; - } - - @Override - public List output() { - return attrs; - } - - @Override - public boolean expressionsResolved() { - return true; - } - - @Override - public int hashCode() { - return Objects.hash(index, frozen); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - EsRelation other = (EsRelation) obj; - return Objects.equals(index, other.index) && frozen == other.frozen; - } - - @Override - public String nodeString() { - return nodeName() + "[" + index + "]" + NodeUtils.limitedToString(attrs); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/Project.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/Project.java deleted file mode 100644 index b9070f546d8de..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/Project.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.plan.logical; - -import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; -import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.Expressions; -import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.core.expression.function.Functions; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -import java.util.List; -import java.util.Objects; - -/** - * A {@code Project} is a {@code Plan} with one child. In {@code SELECT x FROM y}, the "SELECT" statement is a Project. - */ -public class Project extends UnaryPlan { - - private final List projections; - - public Project(Source source, LogicalPlan child, List projections) { - super(source, child); - this.projections = projections; - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, Project::new, child(), projections); - } - - @Override - public Project replaceChild(LogicalPlan newChild) { - return new Project(source(), newChild, projections); - } - - public List projections() { - return projections; - } - - public Project withProjections(List projections) { - return new Project(source(), child(), projections); - } - - @Override - public boolean resolved() { - return super.resolved() && Expressions.anyMatch(projections, Functions::isAggregate) == false; - } - - @Override - public boolean expressionsResolved() { - return Resolvables.resolved(projections); - } - - @Override - public List output() { - return Expressions.asAttributes(projections); - } - - @Override - public int hashCode() { - return Objects.hash(projections, child()); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - Project other = (Project) obj; - - return Objects.equals(projections, other.projections) && Objects.equals(child(), other.child()); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/UnresolvedRelation.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/UnresolvedRelation.java deleted file mode 100644 index d969ad02a4eac..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/UnresolvedRelation.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.plan.logical; - -import org.elasticsearch.xpack.esql.core.capabilities.Unresolvable; -import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.plan.TableIdentifier; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static java.util.Collections.singletonList; - -public class UnresolvedRelation extends LeafPlan implements Unresolvable { - - private final TableIdentifier table; - private final boolean frozen; - private final String alias; - private final String unresolvedMsg; - - public UnresolvedRelation(Source source, TableIdentifier table, String alias, boolean frozen) { - this(source, table, alias, frozen, null); - } - - public UnresolvedRelation(Source source, TableIdentifier table, String alias, boolean frozen, String unresolvedMessage) { - super(source); - this.table = table; - this.alias = alias; - this.frozen = frozen; - this.unresolvedMsg = unresolvedMessage == null ? "Unknown index [" + table.index() + "]" : unresolvedMessage; - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, UnresolvedRelation::new, table, alias, frozen, unresolvedMsg); - } - - public TableIdentifier table() { - return table; - } - - public String alias() { - return alias; - } - - public boolean frozen() { - return frozen; - } - - @Override - public boolean resolved() { - return false; - } - - @Override - public boolean expressionsResolved() { - return false; - } - - @Override - public List output() { - return Collections.emptyList(); - } - - @Override - public String unresolvedMessage() { - return unresolvedMsg; - } - - @Override - public int hashCode() { - return Objects.hash(source(), table, alias, unresolvedMsg); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - UnresolvedRelation other = (UnresolvedRelation) obj; - return Objects.equals(table, other.table) - && Objects.equals(alias, other.alias) - && Objects.equals(frozen, other.frozen) - && Objects.equals(unresolvedMsg, other.unresolvedMsg); - } - - @Override - public List nodeProperties() { - return singletonList(table); - } - - @Override - public String toString() { - return UNRESOLVED_PREFIX + table.index(); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/tree/Node.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/tree/Node.java index f42d454ef00bd..b1fc7d59c784d 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/tree/Node.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/tree/Node.java @@ -254,7 +254,7 @@ public T transformPropertiesUp(Class typeToken, Function T transformNodeProps(Class typeToken, Function rule) { return info().transform(rule, typeToken); diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/tree/NodeSubclassTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/tree/NodeSubclassTests.java index fae5e349712df..d4065810dabc3 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/tree/NodeSubclassTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/tree/NodeSubclassTests.java @@ -6,713 +6,21 @@ */ package org.elasticsearch.xpack.esql.core.tree; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.PathUtils; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.enrich.EnrichPolicy; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; -import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttributeTests; -import org.elasticsearch.xpack.esql.core.expression.function.Function; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.FullTextPredicate; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.In; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.Like; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.LikePattern; -import org.elasticsearch.xpack.esql.core.tree.NodeTests.ChildrenAreAProperty; -import org.elasticsearch.xpack.esql.core.tree.NodeTests.Dummy; -import org.elasticsearch.xpack.esql.core.tree.NodeTests.NoChildren; -import org.mockito.exceptions.base.MockitoException; - -import java.io.IOException; -import java.lang.reflect.Constructor; -import java.lang.reflect.Method; -import java.lang.reflect.Modifier; -import java.lang.reflect.ParameterizedType; -import java.lang.reflect.Type; -import java.lang.reflect.WildcardType; -import java.nio.file.FileVisitResult; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.SimpleFileVisitor; -import java.nio.file.attribute.BasicFileAttributes; -import java.time.ZoneId; -import java.util.ArrayList; -import java.util.Collection; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.function.Predicate; -import java.util.function.Supplier; -import java.util.jar.JarEntry; -import java.util.jar.JarInputStream; -import static java.util.Arrays.asList; -import static java.util.Collections.emptyList; -import static org.mockito.Mockito.mock; +import java.util.function.Function; /** - * Looks for all subclasses of {@link Node} and verifies that they - * implement {@link Node#info()} and - * {@link Node#replaceChildren(List)} sanely. It'd be better if - * each subclass had its own test case that verified those methods - * and any other interesting things that that they do but we're a - * long way from that and this gets the job done for now. - *

    - * This test attempts to use reflection to create believeable nodes - * and manipulate them in believeable ways with as little knowledge - * of the actual subclasses as possible. This is problematic because - * it is possible, for example, for nodes to stackoverflow because - * they can contain themselves. So this class - * does have some {@link Node}-subclass-specific - * knowledge. As little as I could get away with though. - *

    - * When there are actual tests for a subclass of {@linkplain Node} - * then this class will do two things: - *

      - *
    • Skip running any tests for that subclass entirely. - *
    • Delegate to that test to build nodes of that type when a - * node of that type is called for. - *
    + * Shim to expose protected methods to ESQL proper's NodeSubclassTests. */ -public class NodeSubclassTests> extends ESTestCase { - - private static final List> CLASSES_WITH_MIN_TWO_CHILDREN = asList(In.class); - - private final Class subclass; - - public NodeSubclassTests(Class subclass) { - this.subclass = subclass; - } - - public void testInfoParameters() throws Exception { - Constructor ctor = longestCtor(subclass); - Object[] nodeCtorArgs = ctorArgs(ctor); - T node = ctor.newInstance(nodeCtorArgs); - /* - * The count should be the same size as the longest constructor - * by convention. If it isn't then we're missing something. - */ - int expectedCount = ctor.getParameterCount(); - /* - * Except the first `Location` argument of the ctor is implicit - * in the parameters and not included. - */ - expectedCount -= 1; - assertEquals(expectedCount, node.info().properties().size()); - } - - /** - * Test {@link Node#transformPropertiesOnly(Class, java.util.function.Function)} - * implementation on {@link #subclass} which tests the implementation of - * {@link Node#info()}. And tests the actual {@link NodeInfo} subclass - * implementations in the process. - */ - public void testTransform() throws Exception { - Constructor ctor = longestCtor(subclass); - Object[] nodeCtorArgs = ctorArgs(ctor); - T node = ctor.newInstance(nodeCtorArgs); - - Type[] argTypes = ctor.getGenericParameterTypes(); - // start at 1 because we can't change Location. - for (int changedArgOffset = 1; changedArgOffset < ctor.getParameterCount(); changedArgOffset++) { - Object originalArgValue = nodeCtorArgs[changedArgOffset]; - - Type changedArgType = argTypes[changedArgOffset]; - Object changedArgValue = randomValueOtherThanMaxTries( - nodeCtorArgs[changedArgOffset], - () -> makeArg(changedArgType), - // JoinType has only 1 permitted enum element. Limit the number of retries. - 3 - ); - - B transformed = node.transformNodeProps(Object.class, prop -> Objects.equals(prop, originalArgValue) ? changedArgValue : prop); - - if (node.children().contains(originalArgValue) || node.children().equals(originalArgValue)) { - if (node.children().equals(emptyList()) && originalArgValue.equals(emptyList())) { - /* - * If the children are an empty list and the value - * we want to change is an empty list they'll be - * equal to one another so they'll come on this branch. - * This case is rare and hard to reason about so we're - * just going to assert nothing here and hope to catch - * it when we write non-reflection hack tests. - */ - continue; - } - // Transformation shouldn't apply to children. - assertSame(node, transformed); - } else { - assertTransformedOrReplacedChildren(node, transformed, ctor, nodeCtorArgs, changedArgOffset, changedArgValue); - } - } - } - - /** - * Test {@link Node#replaceChildren(List)} implementation on {@link #subclass}. - */ - public void testReplaceChildren() throws Exception { - Constructor ctor = longestCtor(subclass); - Object[] nodeCtorArgs = ctorArgs(ctor); - T node = ctor.newInstance(nodeCtorArgs); - - Type[] argTypes = ctor.getGenericParameterTypes(); - // start at 1 because we can't change Location. - for (int changedArgOffset = 1; changedArgOffset < ctor.getParameterCount(); changedArgOffset++) { - Object originalArgValue = nodeCtorArgs[changedArgOffset]; - Type changedArgType = argTypes[changedArgOffset]; - - if (originalArgValue instanceof Collection col) { - - if (col.isEmpty() || col instanceof EnumSet) { - /* - * We skip empty lists here because they'll spuriously - * pass the conditions below if statements even if they don't - * have anything to do with children. This might cause us to - * ignore the case where a parameter gets copied into the - * children and just happens to be empty but I don't really - * know another way. - */ - - continue; - } - - if (col instanceof List originalList && node.children().equals(originalList)) { - // The arg we're looking at *is* the children - @SuppressWarnings("unchecked") // we pass a reasonable type so get reasonable results - List newChildren = (List) makeListOfSameSizeOtherThan(changedArgType, originalList); - B transformed = node.replaceChildren(newChildren); - assertTransformedOrReplacedChildren(node, transformed, ctor, nodeCtorArgs, changedArgOffset, newChildren); - } else if (false == col.isEmpty() && node.children().containsAll(col)) { - // The arg we're looking at is a collection contained within the children - List originalList = (List) originalArgValue; - - // First make the new children - @SuppressWarnings("unchecked") // we pass a reasonable type so get reasonable results - List newCollection = (List) makeListOfSameSizeOtherThan(changedArgType, originalList); - - // Now merge that list of children into the original list of children - List originalChildren = node.children(); - List newChildren = new ArrayList<>(originalChildren.size()); - int originalOffset = 0; - for (int i = 0; i < originalChildren.size(); i++) { - if (originalOffset < originalList.size() && originalChildren.get(i).equals(originalList.get(originalOffset))) { - newChildren.add(newCollection.get(originalOffset)); - originalOffset++; - } else { - newChildren.add(originalChildren.get(i)); - } - } - - // Finally! We can assert..... - B transformed = node.replaceChildren(newChildren); - assertTransformedOrReplacedChildren(node, transformed, ctor, nodeCtorArgs, changedArgOffset, newCollection); - } else { - // The arg we're looking at has nothing to do with the children - } - } else { - if (node.children().contains(originalArgValue)) { - // The arg we're looking at is one of the children - List newChildren = new ArrayList<>(node.children()); - @SuppressWarnings("unchecked") // makeArg produced reasonable values - B newChild = (B) randomValueOtherThan(nodeCtorArgs[changedArgOffset], () -> makeArg(changedArgType)); - newChildren.replaceAll(e -> Objects.equals(originalArgValue, e) ? newChild : e); - B transformed = node.replaceChildren(newChildren); - assertTransformedOrReplacedChildren(node, transformed, ctor, nodeCtorArgs, changedArgOffset, newChild); - } else { - // The arg we're looking at has nothing to do with the children - } - } - } - } - - private void assertTransformedOrReplacedChildren( - T node, - B transformed, - Constructor ctor, - Object[] nodeCtorArgs, - int changedArgOffset, - Object changedArgValue - ) throws Exception { - if (node instanceof Function) { - /* - * Functions have a weaker definition of transform then other - * things: - * - * Transforming using the way we did above should only change - * the one property of the node that we intended to transform. - */ - assertEquals(node.source(), transformed.source()); - List op = node.nodeProperties(); - List tp = transformed.nodeProperties(); - for (int p = 0; p < op.size(); p++) { - if (p == changedArgOffset - 1) { // -1 because location isn't in the list - assertEquals(changedArgValue, tp.get(p)); - } else { - assertEquals(op.get(p), tp.get(p)); - } - } - } else { - /* - * The stronger assertion for all non-Functions: transforming - * a node changes *only* the transformed value such that you - * can rebuild a copy of the node using its constructor changing - * only one argument and it'll be *equal* to the result of the - * transformation. - */ - Type[] argTypes = ctor.getGenericParameterTypes(); - Object[] args = new Object[argTypes.length]; - for (int i = 0; i < argTypes.length; i++) { - args[i] = nodeCtorArgs[i] == nodeCtorArgs[changedArgOffset] ? changedArgValue : nodeCtorArgs[i]; - } - T reflectionTransformed = ctor.newInstance(args); - assertEquals(reflectionTransformed, transformed); - } - } - - /** - * Find the longest constructor of the given class. - * By convention, for all subclasses of {@link Node}, - * this constructor should have "all" of the state of - * the node. All other constructors should all delegate - * to this constructor. - */ - static Constructor longestCtor(Class clazz) { - Constructor longest = null; - for (Constructor ctor : clazz.getConstructors()) { - if (longest == null || longest.getParameterCount() < ctor.getParameterCount()) { - @SuppressWarnings("unchecked") // Safe because the ctor has to be a ctor for T - Constructor castCtor = (Constructor) ctor; - longest = castCtor; - } - } - if (longest == null) { - throw new IllegalArgumentException("Couldn't find any constructors for [" + clazz.getName() + "]"); - } - return longest; - } - - /** - * Scans the {@code .class} files to identify all classes and - * checks if they are subclasses of {@link Node}. - */ - @ParametersFactory - @SuppressWarnings("rawtypes") - public static List nodeSubclasses() throws IOException { - return subclassesOf(Node.class, CLASSNAME_FILTER).stream() - .filter(c -> testClassFor(c) == null) - .map(c -> new Object[] { c }) - .toList(); - } - - /** - * Build a list of arguments to use when calling - * {@code ctor} that make sense when {@code ctor} - * builds subclasses of {@link Node}. - */ - private Object[] ctorArgs(Constructor> ctor) throws Exception { - Type[] argTypes = ctor.getGenericParameterTypes(); - Object[] args = new Object[argTypes.length]; - for (int i = 0; i < argTypes.length; i++) { - final int currentArgIndex = i; - args[i] = randomValueOtherThanMany(candidate -> { - for (int a = 0; a < currentArgIndex; a++) { - if (Objects.equals(args[a], candidate)) { - return true; - } - } - return false; - }, () -> { - try { - return makeArg(ctor.getDeclaringClass(), argTypes[currentArgIndex]); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - } - return args; - } - - /** - * Make an argument to feed the {@link #subclass}'s ctor. - */ - protected Object makeArg(Type argType) { - try { - return makeArg(subclass, argType); - } catch (Exception e) { - // Wrap to make `randomValueOtherThan` happy. - throw new RuntimeException(e); - } - } - - /** - * Make an argument to feed to the constructor for {@code toBuildClass}. - */ - @SuppressWarnings("unchecked") - private Object makeArg(Class> toBuildClass, Type argType) throws Exception { - - if (argType instanceof ParameterizedType pt) { - if (pt.getRawType() == Map.class) { - return makeMap(toBuildClass, pt); - } - if (pt.getRawType() == List.class) { - return makeList(toBuildClass, pt); - } - if (pt.getRawType() == Set.class) { - return makeSet(toBuildClass, pt); - } - if (pt.getRawType() == EnumSet.class) { - @SuppressWarnings("rawtypes") - Enum enm = (Enum) makeArg(toBuildClass, pt.getActualTypeArguments()[0]); - return EnumSet.of(enm); - } - Object obj = pluggableMakeParameterizedArg(toBuildClass, pt); - if (obj != null) { - return obj; - } - throw new IllegalArgumentException("Unsupported parameterized type [" + pt + "], for " + toBuildClass.getSimpleName()); - } - if (argType instanceof WildcardType wt) { - if (wt.getLowerBounds().length > 0 || wt.getUpperBounds().length > 1) { - throw new IllegalArgumentException("Unsupported wildcard type [" + wt + "]"); - } - return makeArg(toBuildClass, wt.getUpperBounds()[0]); - } - Class argClass = (Class) argType; - - /* - * Sometimes all of the required type information isn't in the ctor - * so we have to hard code it here. - */ - if (toBuildClass == FieldAttribute.class) { - // `parent` is nullable. - if (argClass == FieldAttribute.class && randomBoolean()) { - return null; - } - } else if (toBuildClass == ChildrenAreAProperty.class) { - /* - * While any subclass of DummyFunction will do here we want to prevent - * stack overflow so we use the one without children. - */ - if (argClass == Dummy.class) { - return makeNode(NoChildren.class); - } - } else if (FullTextPredicate.class.isAssignableFrom(toBuildClass)) { - /* - * FullTextPredicate analyzes its string arguments on - * construction so they have to be valid. - */ - if (argClass == String.class) { - int size = between(0, 5); - StringBuilder b = new StringBuilder(); - for (int i = 0; i < size; i++) { - if (i != 0) { - b.append(';'); - } - b.append(randomAlphaOfLength(5)).append('=').append(randomAlphaOfLength(5)); - } - return b.toString(); - } - } else if (toBuildClass == Like.class) { - - if (argClass == LikePattern.class) { - return new LikePattern(randomAlphaOfLength(16), randomFrom('\\', '|', '/', '`')); - } - - } else { - Object postProcess = pluggableMakeArg(toBuildClass, argClass); - if (postProcess != null) { - return postProcess; - } - } - if (Expression.class == argClass) { - /* - * Rather than use any old subclass of expression lets - * use a simple one. Without this we're very prone to - * stackoverflow errors while building the tree. - */ - return UnresolvedAttributeTests.randomUnresolvedAttribute(); - } - if (EnrichPolicy.class == argClass) { - List enrichFields = randomSubsetOf(List.of("e1", "e2", "e3")); - return new EnrichPolicy(randomFrom("match", "range"), null, List.of(), randomFrom("m1", "m2"), enrichFields); - } - - if (Node.class.isAssignableFrom(argClass)) { - /* - * Rather than attempting to mock subclasses of node - * and emulate them we just try and instantiate an - * appropriate subclass - */ - @SuppressWarnings("unchecked") // safe because this is the lowest possible bounds for Node - Class> asNodeSubclass = (Class>) argType; - return makeNode(asNodeSubclass); - } - - if (argClass.isEnum()) { - // Can't mock enums but luckily we can just pick one - return randomFrom(argClass.getEnumConstants()); - } - if (argClass == boolean.class) { - // Can't mock primitives.... - return randomBoolean(); - } - if (argClass == int.class) { - return randomInt(); - } - if (argClass == String.class) { - // Nor strings - return randomAlphaOfLength(5); - } - if (argClass == Source.class) { - // Location is final and can't be mocked but we have a handy method to generate ones. - return SourceTests.randomSource(); - } - if (argClass == ZoneId.class) { - // ZoneId is a sealed class (cannot be mocked) starting with Java 19 - return randomZone(); - } - try { - return mock(argClass); - } catch (MockitoException e) { - throw new RuntimeException("failed to mock [" + argClass.getName() + "] for [" + toBuildClass.getName() + "]", e); - } - } - - protected Object pluggableMakeArg(Class> toBuildClass, Class argClass) throws Exception { - return null; - } - - protected Object pluggableMakeParameterizedArg(Class> toBuildClass, ParameterizedType pt) { - return null; - } - - private List makeList(Class> toBuildClass, ParameterizedType listType) throws Exception { - return makeList(toBuildClass, listType, randomSizeForCollection(toBuildClass)); - } - - private List makeList(Class> toBuildClass, ParameterizedType listType, int size) throws Exception { - List list = new ArrayList<>(); - for (int i = 0; i < size; i++) { - list.add(makeArg(toBuildClass, listType.getActualTypeArguments()[0])); - } - return list; - } - - private Set makeSet(Class> toBuildClass, ParameterizedType listType) throws Exception { - return makeSet(toBuildClass, listType, randomSizeForCollection(toBuildClass)); - } - - private Set makeSet(Class> toBuildClass, ParameterizedType listType, int size) throws Exception { - Set list = new HashSet<>(); - for (int i = 0; i < size; i++) { - list.add(makeArg(toBuildClass, listType.getActualTypeArguments()[0])); - } - return list; - } - - private Object makeMap(Class> toBuildClass, ParameterizedType pt) throws Exception { - Map map = new HashMap<>(); - int size = randomSizeForCollection(toBuildClass); - while (map.size() < size) { - Object key = makeArg(toBuildClass, pt.getActualTypeArguments()[0]); - Object value = makeArg(toBuildClass, pt.getActualTypeArguments()[1]); - map.put(key, value); - } - return map; - } - - private int randomSizeForCollection(Class> toBuildClass) { - int minCollectionLength = 0; - int maxCollectionLength = 10; - - if (hasAtLeastTwoChildren(toBuildClass)) { - minCollectionLength = 2; - } - return between(minCollectionLength, maxCollectionLength); - } - - protected boolean hasAtLeastTwoChildren(Class> toBuildClass) { - return CLASSES_WITH_MIN_TWO_CHILDREN.stream().anyMatch(toBuildClass::equals); - } - - private List makeListOfSameSizeOtherThan(Type listType, List original) throws Exception { - if (original.isEmpty()) { - throw new IllegalArgumentException("Can't make a different empty list"); - } - return randomValueOtherThan(original, () -> { - try { - return makeList(subclass, (ParameterizedType) listType, original.size()); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - - } - - public > T makeNode(Class nodeClass) throws Exception { - if (Modifier.isAbstract(nodeClass.getModifiers())) { - nodeClass = randomFrom(innerSubclassesOf(nodeClass)); - } - Class testSubclassFor = testClassFor(nodeClass); - if (testSubclassFor != null) { - // Delegate to the test class for a node if there is one - Method m = testSubclassFor.getMethod("random" + Strings.capitalize(nodeClass.getSimpleName())); - assert Modifier.isStatic(m.getModifiers()) : "Expected static method, got:" + m; - return nodeClass.cast(m.invoke(null)); - } - Constructor ctor = longestCtor(nodeClass); - Object[] nodeCtorArgs = ctorArgs(ctor); - return ctor.newInstance(nodeCtorArgs); - } - - /** - * Cache of subclasses. We use a cache because it significantly speeds up - * the test. - */ - private static final Map, Set> subclassCache = new HashMap<>(); - - private static final Predicate CLASSNAME_FILTER = className -> { - // filter the class that are not interested - // (and IDE folders like eclipse) - if (className.startsWith("org.elasticsearch.xpack.esql.core") == false - && className.startsWith("org.elasticsearch.xpack.sql") == false - && className.startsWith("org.elasticsearch.xpack.eql") == false) { - return false; - } - return true; - }; - - protected Predicate pluggableClassNameFilter() { - return CLASSNAME_FILTER; - } - - private Set> innerSubclassesOf(Class clazz) throws IOException { - return subclassesOf(clazz, pluggableClassNameFilter()); - } - - public static Set> subclassesOf(Class clazz) throws IOException { - return subclassesOf(clazz, CLASSNAME_FILTER); - } - - /** - * Find all subclasses of a particular class. - */ - public static Set> subclassesOf(Class clazz, Predicate classNameFilter) throws IOException { - @SuppressWarnings("unchecked") // The map is built this way - Set> lookup = (Set>) subclassCache.get(clazz); - if (lookup != null) { - return lookup; - } - Set> results = new LinkedHashSet<>(); - String[] paths = System.getProperty("java.class.path").split(System.getProperty("path.separator")); - for (String path : paths) { - Path root = PathUtils.get(path); - int rootLength = root.toString().length() + 1; - - // load classes from jar files - // NIO FileSystem API is not used since it trips the SecurityManager - // https://bugs.openjdk.java.net/browse/JDK-8160798 - // so iterate the jar "by hand" - if (path.endsWith(".jar") && path.contains("x-pack-ql")) { - try (JarInputStream jar = jarStream(root)) { - JarEntry je = null; - while ((je = jar.getNextJarEntry()) != null) { - String name = je.getName(); - if (name.endsWith(".class")) { - String className = name.substring(0, name.length() - ".class".length()).replace("/", "."); - maybeLoadClass(clazz, className, root + "!/" + name, classNameFilter, results); - } - } - } - } - // for folders, just use the FileSystems API - else { - Files.walkFileTree(root, new SimpleFileVisitor<>() { - @Override - public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { - if (Files.isRegularFile(file) && file.getFileName().toString().endsWith(".class")) { - String fileName = file.toString(); - // Chop off the root and file extension - String className = fileName.substring(rootLength, fileName.length() - ".class".length()); - // Go from "path" style to class style - className = className.replace(PathUtils.getDefaultFileSystem().getSeparator(), "."); - maybeLoadClass(clazz, className, fileName, classNameFilter, results); - } - return FileVisitResult.CONTINUE; - } - }); - } - } - subclassCache.put(clazz, results); - return results; - } - - @SuppressForbidden(reason = "test reads from jar") - private static JarInputStream jarStream(Path path) throws IOException { - return new JarInputStream(path.toUri().toURL().openStream()); - } - - /** - * Load classes from predefined packages (hack to limit the scope) and if they match the hierarchy, add them to the cache - */ - private static void maybeLoadClass( - Class clazz, - String className, - String location, - Predicate classNameFilter, - Set> results - ) throws IOException { - if (classNameFilter.test(className) == false) { - return; - } - - Class c; - try { - c = Class.forName(className); - } catch (ClassNotFoundException e) { - throw new IOException("Couldn't load " + location, e); - } - - if (false == Modifier.isAbstract(c.getModifiers()) && false == c.isAnonymousClass() && clazz.isAssignableFrom(c)) { - Class s = c.asSubclass(clazz); - results.add(s); - } - } - - /** - * The test class for some subclass of node or {@code null} - * if there isn't such a class or it doesn't extend - * {@link AbstractNodeTestCase}. - */ - protected static Class testClassFor(Class nodeSubclass) { - String testClassName = nodeSubclass.getName() + "Tests"; - try { - Class c = Class.forName(testClassName); - if (AbstractNodeTestCase.class.isAssignableFrom(c)) { - return c; - } - return null; - } catch (ClassNotFoundException e) { - return null; - } - } - - private static T randomValueOtherThanManyMaxTries(Predicate input, Supplier randomSupplier, int maxTries) { - int[] maxTriesHolder = { maxTries }; - Predicate inputWithMaxTries = t -> input.test(t) && maxTriesHolder[0]-- > 0; +public class NodeSubclassTests extends ESTestCase { - return ESTestCase.randomValueOtherThanMany(inputWithMaxTries, randomSupplier); + // TODO once Node has been move to ESQL proper remove this shim and these methods. + protected final NodeInfo info(Node node) { + return node.info(); } - public static T randomValueOtherThanMaxTries(T input, Supplier randomSupplier, int maxTries) { - return randomValueOtherThanManyMaxTries(v -> Objects.equals(input, v), randomSupplier, maxTries); + protected final > T transformNodeProps(Node n, Class typeToken, Function rule) { + return n.transformNodeProps(typeToken, rule); } } diff --git a/x-pack/plugin/esql-core/test-fixtures/src/main/java/org/elasticsearch/xpack/esql/core/TestUtils.java b/x-pack/plugin/esql-core/test-fixtures/src/main/java/org/elasticsearch/xpack/esql/core/TestUtils.java index 35d73f87f2ceb..5f774ad9dd60e 100644 --- a/x-pack/plugin/esql-core/test-fixtures/src/main/java/org/elasticsearch/xpack/esql/core/TestUtils.java +++ b/x-pack/plugin/esql-core/test-fixtures/src/main/java/org/elasticsearch/xpack/esql/core/TestUtils.java @@ -36,8 +36,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLikePattern; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardLike; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardPattern; -import org.elasticsearch.xpack.esql.core.index.EsIndex; -import org.elasticsearch.xpack.esql.core.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.core.session.Configuration; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -165,10 +163,6 @@ public static FieldAttribute fieldAttribute(String name, DataType type) { return new FieldAttribute(EMPTY, name, new EsField(name, type, emptyMap(), randomBoolean())); } - public static EsRelation relation() { - return new EsRelation(EMPTY, new EsIndex(randomAlphaOfLength(8), emptyMap()), randomBoolean()); - } - // // Common methods / assertions // diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index c213afae8b01c..1694115aaa71d 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -310,7 +310,3 @@ tasks.named('stringTemplates').configure { it.outputFile = "org/elasticsearch/xpack/esql/enrich/EnrichResultBuilderForBoolean.java" } } - -tasks.withType(CheckForbiddenApisTask).configureEach { - signaturesFiles += files('src/main/resources/forbidden/ql-signatures.txt') -} diff --git a/x-pack/plugin/esql/src/main/resources/forbidden/ql-signatures.txt b/x-pack/plugin/esql/src/main/resources/forbidden/ql-signatures.txt deleted file mode 100644 index 5371b35f4e033..0000000000000 --- a/x-pack/plugin/esql/src/main/resources/forbidden/ql-signatures.txt +++ /dev/null @@ -1,5 +0,0 @@ -org.elasticsearch.xpack.esql.core.plan.logical.Aggregate @ use @org.elasticsearch.xpack.esql.plan.logical.Aggregate instead -org.elasticsearch.xpack.esql.core.plan.logical.EsRelation @ use @org.elasticsearch.xpack.esql.plan.logical.EsRelation instead -org.elasticsearch.xpack.esql.core.plan.logical.Project @ use @org.elasticsearch.xpack.esql.plan.logical.Project instead -org.elasticsearch.xpack.esql.core.plan.logical.UnresolvedRelation @ use @org.elasticsearch.xpack.esql.plan.logical.UnresolvedRelation instead -org.elasticsearch.xpack.esql.core.expression.function.Functions @ use @org.elasticsearch.xpack.esql.expression.function.Functions instead diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java index adacc80ea12d2..9e2262e218236 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java @@ -9,20 +9,34 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.common.Strings; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.PathUtils; +import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.dissect.DissectParser; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.core.capabilities.UnresolvedException; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.Order; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; +import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttributeTests; import org.elasticsearch.xpack.esql.core.expression.UnresolvedNamedExpression; +import org.elasticsearch.xpack.esql.core.expression.function.Function; import org.elasticsearch.xpack.esql.core.expression.function.UnresolvedFunction; +import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.FullTextPredicate; +import org.elasticsearch.xpack.esql.core.expression.predicate.regex.Like; +import org.elasticsearch.xpack.esql.core.expression.predicate.regex.LikePattern; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.core.tree.AbstractNodeTestCase; import org.elasticsearch.xpack.esql.core.tree.Node; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.NodeSubclassTests; +import org.elasticsearch.xpack.esql.core.tree.NodeTests; import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.tree.SourceTests; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; @@ -36,18 +50,85 @@ import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.StatsType; import org.elasticsearch.xpack.esql.plan.physical.OutputExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; +import org.mockito.exceptions.base.MockitoException; import java.io.IOException; +import java.lang.reflect.Constructor; +import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.lang.reflect.ParameterizedType; +import java.lang.reflect.Type; +import java.lang.reflect.WildcardType; +import java.nio.file.FileVisitResult; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.attribute.BasicFileAttributes; +import java.time.ZoneId; +import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.EnumSet; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.List; +import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.function.Consumer; import java.util.function.Predicate; +import java.util.function.Supplier; +import java.util.jar.JarEntry; +import java.util.jar.JarInputStream; + +import static java.util.Collections.emptyList; +import static org.mockito.Mockito.mock; + +/** + * Looks for all subclasses of {@link Node} and verifies that they + * implement {@code Node.info} and + * {@link Node#replaceChildren(List)} sanely. It'd be better if + * each subclass had its own test case that verified those methods + * and any other interesting things that they do, but we're a + * long way from that and this gets the job done for now. + *

    + * This test attempts to use reflection to create believable nodes + * and manipulate them in believable ways with as little knowledge + * of the actual subclasses as possible. This is problematic because + * it is possible, for example, for nodes to stackoverflow because + * they can contain themselves. So this class + * does have some {@link Node}-subclass-specific + * knowledge. As little as I could get away with though. + *

    + * When there are actual tests for a subclass of {@linkplain Node} + * then this class will do two things: + *

      + *
    • Skip running any tests for that subclass entirely. + *
    • Delegate to that test to build nodes of that type when a + * node of that type is called for. + *
    + */ +public class EsqlNodeSubclassTests> extends NodeSubclassTests { + private static final Predicate CLASSNAME_FILTER = className -> { + boolean esqlCore = className.startsWith("org.elasticsearch.xpack.esql.core") != false; + boolean esqlProper = className.startsWith("org.elasticsearch.xpack.esql") != false; + return esqlCore || esqlProper; + }; + + /** + * Scans the {@code .class} files to identify all classes and checks if + * they are subclasses of {@link Node}. + */ + @ParametersFactory(argumentFormatting = "%1s") + @SuppressWarnings("rawtypes") + public static List nodeSubclasses() throws IOException { + return subclassesOf(Node.class, CLASSNAME_FILTER).stream() + .filter(c -> testClassFor(c) == null) + .map(c -> new Object[] { c }) + .toList(); + } -public class EsqlNodeSubclassTests> extends NodeSubclassTests { private static final List> CLASSES_WITH_MIN_TWO_CHILDREN = List.of(Concat.class, CIDRMatch.class); // List of classes that are "unresolved" NamedExpression subclasses, therefore not suitable for use with logical/physical plan nodes. @@ -58,13 +139,276 @@ public class EsqlNodeSubclassTests> extends NodeS UnresolvedNamedExpression.class ); + private final Class subclass; + public EsqlNodeSubclassTests(Class subclass) { - super(subclass); + this.subclass = subclass; + } + + public void testInfoParameters() throws Exception { + Constructor ctor = longestCtor(subclass); + Object[] nodeCtorArgs = ctorArgs(ctor); + T node = ctor.newInstance(nodeCtorArgs); + /* + * The count should be the same size as the longest constructor + * by convention. If it isn't then we're missing something. + */ + int expectedCount = ctor.getParameterCount(); + /* + * Except the first `Location` argument of the ctor is implicit + * in the parameters and not included. + */ + expectedCount -= 1; + assertEquals(expectedCount, info(node).properties().size()); + } + + /** + * Test {@code Node.transformPropertiesOnly} + * implementation on {@link #subclass} which tests the implementation of + * {@code Node.info}. And tests the actual {@link NodeInfo} subclass + * implementations in the process. + */ + public void testTransform() throws Exception { + Constructor ctor = longestCtor(subclass); + Object[] nodeCtorArgs = ctorArgs(ctor); + T node = ctor.newInstance(nodeCtorArgs); + + Type[] argTypes = ctor.getGenericParameterTypes(); + // start at 1 because we can't change Location. + for (int changedArgOffset = 1; changedArgOffset < ctor.getParameterCount(); changedArgOffset++) { + Object originalArgValue = nodeCtorArgs[changedArgOffset]; + + Type changedArgType = argTypes[changedArgOffset]; + Object changedArgValue = randomValueOtherThanMaxTries( + nodeCtorArgs[changedArgOffset], + () -> makeArg(changedArgType), + // JoinType has only 1 permitted enum element. Limit the number of retries. + 3 + ); + + B transformed = transformNodeProps(node, Object.class, prop -> Objects.equals(prop, originalArgValue) ? changedArgValue : prop); + + if (node.children().contains(originalArgValue) || node.children().equals(originalArgValue)) { + if (node.children().equals(emptyList()) && originalArgValue.equals(emptyList())) { + /* + * If the children are an empty list and the value + * we want to change is an empty list they'll be + * equal to one another so they'll come on this branch. + * This case is rare and hard to reason about so we're + * just going to assert nothing here and hope to catch + * it when we write non-reflection hack tests. + */ + continue; + } + // Transformation shouldn't apply to children. + assertSame(node, transformed); + } else { + assertTransformedOrReplacedChildren(node, transformed, ctor, nodeCtorArgs, changedArgOffset, changedArgValue); + } + } + } + + /** + * Test {@link Node#replaceChildren(List)} implementation on {@link #subclass}. + */ + public void testReplaceChildren() throws Exception { + Constructor ctor = longestCtor(subclass); + Object[] nodeCtorArgs = ctorArgs(ctor); + T node = ctor.newInstance(nodeCtorArgs); + + Type[] argTypes = ctor.getGenericParameterTypes(); + // start at 1 because we can't change Location. + for (int changedArgOffset = 1; changedArgOffset < ctor.getParameterCount(); changedArgOffset++) { + Object originalArgValue = nodeCtorArgs[changedArgOffset]; + Type changedArgType = argTypes[changedArgOffset]; + + if (originalArgValue instanceof Collection col) { + + if (col.isEmpty() || col instanceof EnumSet) { + /* + * We skip empty lists here because they'll spuriously + * pass the conditions below if statements even if they don't + * have anything to do with children. This might cause us to + * ignore the case where a parameter gets copied into the + * children and just happens to be empty but I don't really + * know another way. + */ + + continue; + } + + if (col instanceof List originalList && node.children().equals(originalList)) { + // The arg we're looking at *is* the children + @SuppressWarnings("unchecked") // we pass a reasonable type so get reasonable results + List newChildren = (List) makeListOfSameSizeOtherThan(changedArgType, originalList); + B transformed = node.replaceChildren(newChildren); + assertTransformedOrReplacedChildren(node, transformed, ctor, nodeCtorArgs, changedArgOffset, newChildren); + } else if (false == col.isEmpty() && node.children().containsAll(col)) { + // The arg we're looking at is a collection contained within the children + List originalList = (List) originalArgValue; + + // First make the new children + @SuppressWarnings("unchecked") // we pass a reasonable type so get reasonable results + List newCollection = (List) makeListOfSameSizeOtherThan(changedArgType, originalList); + + // Now merge that list of children into the original list of children + List originalChildren = node.children(); + List newChildren = new ArrayList<>(originalChildren.size()); + int originalOffset = 0; + for (int i = 0; i < originalChildren.size(); i++) { + if (originalOffset < originalList.size() && originalChildren.get(i).equals(originalList.get(originalOffset))) { + newChildren.add(newCollection.get(originalOffset)); + originalOffset++; + } else { + newChildren.add(originalChildren.get(i)); + } + } + + // Finally! We can assert..... + B transformed = node.replaceChildren(newChildren); + assertTransformedOrReplacedChildren(node, transformed, ctor, nodeCtorArgs, changedArgOffset, newCollection); + } else { + // The arg we're looking at has nothing to do with the children + } + } else { + if (node.children().contains(originalArgValue)) { + // The arg we're looking at is one of the children + List newChildren = new ArrayList<>(node.children()); + @SuppressWarnings("unchecked") // makeArg produced reasonable values + B newChild = (B) randomValueOtherThan(nodeCtorArgs[changedArgOffset], () -> makeArg(changedArgType)); + newChildren.replaceAll(e -> Objects.equals(originalArgValue, e) ? newChild : e); + B transformed = node.replaceChildren(newChildren); + assertTransformedOrReplacedChildren(node, transformed, ctor, nodeCtorArgs, changedArgOffset, newChild); + } else { + // The arg we're looking at has nothing to do with the children + } + } + } + } + + /** + * Build a list of arguments to use when calling + * {@code ctor} that make sense when {@code ctor} + * builds subclasses of {@link Node}. + */ + private Object[] ctorArgs(Constructor> ctor) throws Exception { + Type[] argTypes = ctor.getGenericParameterTypes(); + Object[] args = new Object[argTypes.length]; + for (int i = 0; i < argTypes.length; i++) { + final int currentArgIndex = i; + args[i] = randomValueOtherThanMany(candidate -> { + for (int a = 0; a < currentArgIndex; a++) { + if (Objects.equals(args[a], candidate)) { + return true; + } + } + return false; + }, () -> { + try { + return makeArg(ctor.getDeclaringClass(), argTypes[currentArgIndex]); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + return args; + } + + /** + * Make an argument to feed the {@link #subclass}'s ctor. + */ + protected Object makeArg(Type argType) { + try { + return makeArg(subclass, argType); + } catch (Exception e) { + // Wrap to make `randomValueOtherThan` happy. + throw new RuntimeException(e); + } } - @Override - protected Object pluggableMakeArg(Class> toBuildClass, Class argClass) throws Exception { - if (argClass == Dissect.Parser.class) { + /** + * Make an argument to feed to the constructor for {@code toBuildClass}. + */ + @SuppressWarnings("unchecked") + private Object makeArg(Class> toBuildClass, Type argType) throws Exception { + + if (argType instanceof ParameterizedType pt) { + if (pt.getRawType() == Map.class) { + return makeMap(toBuildClass, pt); + } + if (pt.getRawType() == List.class) { + return makeList(toBuildClass, pt); + } + if (pt.getRawType() == Set.class) { + return makeSet(toBuildClass, pt); + } + if (pt.getRawType() == EnumSet.class) { + @SuppressWarnings("rawtypes") + Enum enm = (Enum) makeArg(toBuildClass, pt.getActualTypeArguments()[0]); + return EnumSet.of(enm); + } + if (toBuildClass == OutputExec.class && pt.getRawType() == Consumer.class) { + // pageConsumer just needs a BiConsumer. But the consumer has to have reasonable + // `equals` for randomValueOtherThan, so we just ensure that a new instance is + // created each time which uses Object::equals identity. + return new Consumer() { + @Override + public void accept(Page page) { + // do nothing + } + }; + } + + throw new IllegalArgumentException("Unsupported parameterized type [" + pt + "], for " + toBuildClass.getSimpleName()); + } + if (argType instanceof WildcardType wt) { + if (wt.getLowerBounds().length > 0 || wt.getUpperBounds().length > 1) { + throw new IllegalArgumentException("Unsupported wildcard type [" + wt + "]"); + } + return makeArg(toBuildClass, wt.getUpperBounds()[0]); + } + Class argClass = (Class) argType; + + /* + * Sometimes all of the required type information isn't in the ctor + * so we have to hard code it here. + */ + if (toBuildClass == FieldAttribute.class) { + // `parent` is nullable. + if (argClass == FieldAttribute.class && randomBoolean()) { + return null; + } + } else if (toBuildClass == NodeTests.ChildrenAreAProperty.class) { + /* + * While any subclass of DummyFunction will do here we want to prevent + * stack overflow so we use the one without children. + */ + if (argClass == NodeTests.Dummy.class) { + return makeNode(NodeTests.NoChildren.class); + } + } else if (FullTextPredicate.class.isAssignableFrom(toBuildClass)) { + /* + * FullTextPredicate analyzes its string arguments on + * construction so they have to be valid. + */ + if (argClass == String.class) { + int size = between(0, 5); + StringBuilder b = new StringBuilder(); + for (int i = 0; i < size; i++) { + if (i != 0) { + b.append(';'); + } + b.append(randomAlphaOfLength(5)).append('=').append(randomAlphaOfLength(5)); + } + return b.toString(); + } + } else if (toBuildClass == Like.class) { + + if (argClass == LikePattern.class) { + return new LikePattern(randomAlphaOfLength(16), randomFrom('\\', '|', '/', '`')); + } + + } else if (argClass == Dissect.Parser.class) { // Dissect.Parser is a record / final, cannot be mocked String pattern = randomDissectPattern(); String appendSeparator = randomAlphaOfLength(16); @@ -86,47 +430,203 @@ protected Object pluggableMakeArg(Class> toBuildClass, Class enrichFields = randomSubsetOf(List.of("e1", "e2", "e3")); + return new EnrichPolicy(randomFrom("match", "range"), null, List.of(), randomFrom("m1", "m2"), enrichFields); + } - return null; + if (Node.class.isAssignableFrom(argClass)) { + /* + * Rather than attempting to mock subclasses of node + * and emulate them we just try and instantiate an + * appropriate subclass + */ + @SuppressWarnings("unchecked") // safe because this is the lowest possible bounds for Node + Class> asNodeSubclass = (Class>) argType; + return makeNode(asNodeSubclass); + } + + if (argClass.isEnum()) { + // Can't mock enums but luckily we can just pick one + return randomFrom(argClass.getEnumConstants()); + } + if (argClass == boolean.class) { + // Can't mock primitives.... + return randomBoolean(); + } + if (argClass == int.class) { + return randomInt(); + } + if (argClass == String.class) { + // Nor strings + return randomAlphaOfLength(5); + } + if (argClass == Source.class) { + // Location is final and can't be mocked but we have a handy method to generate ones. + return SourceTests.randomSource(); + } + if (argClass == ZoneId.class) { + // ZoneId is a sealed class (cannot be mocked) starting with Java 19 + return randomZone(); + } + try { + return mock(argClass); + } catch (MockitoException e) { + throw new RuntimeException("failed to mock [" + argClass.getName() + "] for [" + toBuildClass.getName() + "]", e); + } } - @Override - protected Object pluggableMakeParameterizedArg(Class> toBuildClass, ParameterizedType pt) { - if (toBuildClass == OutputExec.class && pt.getRawType() == Consumer.class) { - // pageConsumer just needs a BiConsumer. But the consumer has to have reasonable - // `equals` for randomValueOtherThan, so we just ensure that a new instance is - // created each time which uses Object::equals identity. - return new Consumer() { - @Override - public void accept(Page page) { - // do nothing - } - }; + private List makeList(Class> toBuildClass, ParameterizedType listType) throws Exception { + return makeList(toBuildClass, listType, randomSizeForCollection(toBuildClass)); + } + + private List makeList(Class> toBuildClass, ParameterizedType listType, int size) throws Exception { + List list = new ArrayList<>(); + for (int i = 0; i < size; i++) { + list.add(makeArg(toBuildClass, listType.getActualTypeArguments()[0])); } - return null; + return list; } - @Override - protected boolean hasAtLeastTwoChildren(Class> toBuildClass) { - return CLASSES_WITH_MIN_TWO_CHILDREN.stream().anyMatch(toBuildClass::equals); + private Set makeSet(Class> toBuildClass, ParameterizedType listType) throws Exception { + return makeSet(toBuildClass, listType, randomSizeForCollection(toBuildClass)); } - static final Predicate CLASSNAME_FILTER = className -> (className.startsWith("org.elasticsearch.xpack.esql.core") != false - || className.startsWith("org.elasticsearch.xpack.esql") != false); + private Set makeSet(Class> toBuildClass, ParameterizedType listType, int size) throws Exception { + Set list = new HashSet<>(); + for (int i = 0; i < size; i++) { + list.add(makeArg(toBuildClass, listType.getActualTypeArguments()[0])); + } + return list; + } - @Override - protected Predicate pluggableClassNameFilter() { - return CLASSNAME_FILTER; + private Object makeMap(Class> toBuildClass, ParameterizedType pt) throws Exception { + Map map = new HashMap<>(); + int size = randomSizeForCollection(toBuildClass); + while (map.size() < size) { + Object key = makeArg(toBuildClass, pt.getActualTypeArguments()[0]); + Object value = makeArg(toBuildClass, pt.getActualTypeArguments()[1]); + map.put(key, value); + } + return map; } - /** Scans the {@code .class} files to identify all classes and checks if they are subclasses of {@link Node}. */ - @ParametersFactory(argumentFormatting = "%1s") - @SuppressWarnings("rawtypes") - public static List nodeSubclasses() throws IOException { - return subclassesOf(Node.class, CLASSNAME_FILTER).stream() - .filter(c -> testClassFor(c) == null) - .map(c -> new Object[] { c }) - .toList(); + private int randomSizeForCollection(Class> toBuildClass) { + int minCollectionLength = 0; + int maxCollectionLength = 10; + + if (hasAtLeastTwoChildren(toBuildClass)) { + minCollectionLength = 2; + } + return between(minCollectionLength, maxCollectionLength); + } + + private List makeListOfSameSizeOtherThan(Type listType, List original) throws Exception { + if (original.isEmpty()) { + throw new IllegalArgumentException("Can't make a different empty list"); + } + return randomValueOtherThan(original, () -> { + try { + return makeList(subclass, (ParameterizedType) listType, original.size()); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + + } + + public > T makeNode(Class nodeClass) throws Exception { + if (Modifier.isAbstract(nodeClass.getModifiers())) { + nodeClass = randomFrom(innerSubclassesOf(nodeClass)); + } + Class testSubclassFor = testClassFor(nodeClass); + if (testSubclassFor != null) { + // Delegate to the test class for a node if there is one + Method m = testSubclassFor.getMethod("random" + Strings.capitalize(nodeClass.getSimpleName())); + assert Modifier.isStatic(m.getModifiers()) : "Expected static method, got:" + m; + return nodeClass.cast(m.invoke(null)); + } + Constructor ctor = longestCtor(nodeClass); + Object[] nodeCtorArgs = ctorArgs(ctor); + return ctor.newInstance(nodeCtorArgs); + } + + private void assertTransformedOrReplacedChildren( + T node, + B transformed, + Constructor ctor, + Object[] nodeCtorArgs, + int changedArgOffset, + Object changedArgValue + ) throws Exception { + if (node instanceof Function) { + /* + * Functions have a weaker definition of transform then other + * things: + * + * Transforming using the way we did above should only change + * the one property of the node that we intended to transform. + */ + assertEquals(node.source(), transformed.source()); + List op = node.nodeProperties(); + List tp = transformed.nodeProperties(); + for (int p = 0; p < op.size(); p++) { + if (p == changedArgOffset - 1) { // -1 because location isn't in the list + assertEquals(changedArgValue, tp.get(p)); + } else { + assertEquals(op.get(p), tp.get(p)); + } + } + } else { + /* + * The stronger assertion for all non-Functions: transforming + * a node changes *only* the transformed value such that you + * can rebuild a copy of the node using its constructor changing + * only one argument and it'll be *equal* to the result of the + * transformation. + */ + Type[] argTypes = ctor.getGenericParameterTypes(); + Object[] args = new Object[argTypes.length]; + for (int i = 0; i < argTypes.length; i++) { + args[i] = nodeCtorArgs[i] == nodeCtorArgs[changedArgOffset] ? changedArgValue : nodeCtorArgs[i]; + } + T reflectionTransformed = ctor.newInstance(args); + assertEquals(reflectionTransformed, transformed); + } + } + + /** + * Find the longest constructor of the given class. + * By convention, for all subclasses of {@link Node}, + * this constructor should have "all" of the state of + * the node. All other constructors should all delegate + * to this constructor. + */ + static Constructor longestCtor(Class clazz) { + Constructor longest = null; + for (Constructor ctor : clazz.getConstructors()) { + if (longest == null || longest.getParameterCount() < ctor.getParameterCount()) { + @SuppressWarnings("unchecked") // Safe because the ctor has to be a ctor for T + Constructor castCtor = (Constructor) ctor; + longest = castCtor; + } + } + if (longest == null) { + throw new IllegalArgumentException("Couldn't find any constructors for [" + clazz.getName() + "]"); + } + return longest; + } + + private boolean hasAtLeastTwoChildren(Class> toBuildClass) { + return CLASSES_WITH_MIN_TWO_CHILDREN.stream().anyMatch(toBuildClass::equals); } static boolean isPlanNodeClass(Class> toBuildClass) { @@ -172,4 +672,132 @@ static EsQueryExec.FieldSort randomFieldSort() { static FieldAttribute field(String name, DataType type) { return new FieldAttribute(Source.EMPTY, name, new EsField(name, type, Collections.emptyMap(), false)); } + + public static Set> subclassesOf(Class clazz) throws IOException { + return subclassesOf(clazz, CLASSNAME_FILTER); + } + + private Set> innerSubclassesOf(Class clazz) throws IOException { + return subclassesOf(clazz, CLASSNAME_FILTER); + } + + /** + * Cache of subclasses. We use a cache because it significantly speeds up + * the test. + */ + private static final Map, Set> subclassCache = new HashMap<>(); + + /** + * Find all subclasses of a particular class. + */ + public static Set> subclassesOf(Class clazz, Predicate classNameFilter) throws IOException { + @SuppressWarnings("unchecked") // The map is built this way + Set> lookup = (Set>) subclassCache.get(clazz); + if (lookup != null) { + return lookup; + } + Set> results = new LinkedHashSet<>(); + String[] paths = System.getProperty("java.class.path").split(System.getProperty("path.separator")); + for (String path : paths) { + Path root = PathUtils.get(path); + int rootLength = root.toString().length() + 1; + + // load classes from jar files + // NIO FileSystem API is not used since it trips the SecurityManager + // https://bugs.openjdk.java.net/browse/JDK-8160798 + // so iterate the jar "by hand" + if (path.endsWith(".jar") && path.contains("x-pack-ql")) { + try (JarInputStream jar = jarStream(root)) { + JarEntry je = null; + while ((je = jar.getNextJarEntry()) != null) { + String name = je.getName(); + if (name.endsWith(".class")) { + String className = name.substring(0, name.length() - ".class".length()).replace("/", "."); + maybeLoadClass(clazz, className, root + "!/" + name, classNameFilter, results); + } + } + } + } + // for folders, just use the FileSystems API + else { + Files.walkFileTree(root, new SimpleFileVisitor<>() { + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { + if (Files.isRegularFile(file) && file.getFileName().toString().endsWith(".class")) { + String fileName = file.toString(); + // Chop off the root and file extension + String className = fileName.substring(rootLength, fileName.length() - ".class".length()); + // Go from "path" style to class style + className = className.replace(PathUtils.getDefaultFileSystem().getSeparator(), "."); + maybeLoadClass(clazz, className, fileName, classNameFilter, results); + } + return FileVisitResult.CONTINUE; + } + }); + } + } + subclassCache.put(clazz, results); + return results; + } + + @SuppressForbidden(reason = "test reads from jar") + private static JarInputStream jarStream(Path path) throws IOException { + return new JarInputStream(path.toUri().toURL().openStream()); + } + + /** + * Load classes from predefined packages (hack to limit the scope) and if they match the hierarchy, add them to the cache + */ + private static void maybeLoadClass( + Class clazz, + String className, + String location, + Predicate classNameFilter, + Set> results + ) throws IOException { + if (classNameFilter.test(className) == false) { + return; + } + + Class c; + try { + c = Class.forName(className); + } catch (ClassNotFoundException e) { + throw new IOException("Couldn't load " + location, e); + } + + if (false == Modifier.isAbstract(c.getModifiers()) && false == c.isAnonymousClass() && clazz.isAssignableFrom(c)) { + Class s = c.asSubclass(clazz); + results.add(s); + } + } + + /** + * The test class for some subclass of node or {@code null} + * if there isn't such a class or it doesn't extend + * {@link AbstractNodeTestCase}. + */ + protected static Class testClassFor(Class nodeSubclass) { + String testClassName = nodeSubclass.getName() + "Tests"; + try { + Class c = Class.forName(testClassName); + if (AbstractNodeTestCase.class.isAssignableFrom(c)) { + return c; + } + return null; + } catch (ClassNotFoundException e) { + return null; + } + } + + private static T randomValueOtherThanManyMaxTries(Predicate input, Supplier randomSupplier, int maxTries) { + int[] maxTriesHolder = { maxTries }; + Predicate inputWithMaxTries = t -> input.test(t) && maxTriesHolder[0]-- > 0; + + return ESTestCase.randomValueOtherThanMany(inputWithMaxTries, randomSupplier); + } + + public static T randomValueOtherThanMaxTries(T input, Supplier randomSupplier, int maxTries) { + return randomValueOtherThanManyMaxTries(v -> Objects.equals(input, v), randomSupplier, maxTries); + } } From 69ee6d731867113e93f4658261497ba6989b6ec3 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 3 Jul 2024 08:20:34 -0400 Subject: [PATCH 155/216] ESQL: Add javadocs for some of DataType (#110396) This adds some javadoc to a few of the methods on `DataType`. That's important because `DataType` is pretty central to how ESQL works and is referenced in tons of code. The method `isInteger` especially wants an explanation - it's true for all whole numbers. --- .../xpack/esql/core/type/DataType.java | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java index 0b43d517b8f1e..2dc141dd1bac0 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java @@ -254,18 +254,31 @@ public String esType() { return esType; } + /** + * The name we give to types on the response. + */ public String outputType() { return esType == null ? "unsupported" : esType; } + /** + * Does this data type represent whole numbers? As in, numbers without a decimal point. + * Like {@code int} or {@code long}. See {@link #isRational} for numbers with a decimal point. + */ public boolean isInteger() { return isInteger; } + /** + * Does this data type represent rational numbers (like floating point)? + */ public boolean isRational() { return isRational; } + /** + * Does this data type represent any number? + */ public boolean isNumeric() { return isInteger || isRational; } From c5e8173f2a89dd3f9f709ecd1c385572acae23d0 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 3 Jul 2024 14:14:49 +0100 Subject: [PATCH 156/216] Stricter failure handling in `TransportGetSnapshotsAction` (#107191) Today if there's a failure during a multi-repo get-snapshots request then we record a per-repository failure but allow the rest of the request to proceed. This is trappy for clients, it means that they must always remember to check the `failures` response field or else risk missing some results. It's also a pain for the implementation because it means we have to collect the per-repository results separately first before adding them to the final results set just in case the last one triggers a failure. This commit drops this leniency and bubbles all failures straight up to the top level. --- docs/changelog/107191.yaml | 17 +++++++++ .../snapshots/GetSnapshotsIT.java | 18 ++++++---- .../snapshots/SnapshotStatusApisIT.java | 10 ++---- .../snapshots/get/GetSnapshotsRequest.java | 8 ----- .../snapshots/get/GetSnapshotsResponse.java | 2 ++ .../get/TransportGetSnapshotsAction.java | 36 ++++++++----------- 6 files changed, 47 insertions(+), 44 deletions(-) create mode 100644 docs/changelog/107191.yaml diff --git a/docs/changelog/107191.yaml b/docs/changelog/107191.yaml new file mode 100644 index 0000000000000..5ef6297c0f3f1 --- /dev/null +++ b/docs/changelog/107191.yaml @@ -0,0 +1,17 @@ +pr: 107191 +summary: Stricter failure handling in multi-repo get-snapshots request handling +area: Snapshot/Restore +type: bug +issues: [] +highlight: + title: Stricter failure handling in multi-repo get-snapshots request handling + body: | + If a multi-repo get-snapshots request encounters a failure in one of the + targeted repositories then earlier versions of Elasticsearch would proceed + as if the faulty repository did not exist, except for a per-repository + failure report in a separate section of the response body. This makes it + impossible to paginate the results properly in the presence of failures. In + versions 8.15.0 and later this API's failure handling behaviour has been + made stricter, reporting an overall failure if any targeted repository's + contents cannot be listed. + notable: true diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/GetSnapshotsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/GetSnapshotsIT.java index 7c5f38fee02a9..1130ddaa74f38 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/GetSnapshotsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/GetSnapshotsIT.java @@ -31,10 +31,8 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.in; -import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; public class GetSnapshotsIT extends AbstractSnapshotIntegTestCase { @@ -314,6 +312,7 @@ public void testExcludePatterns() throws Exception { assertThat( clusterAdmin().prepareGetSnapshots(TEST_REQUEST_TIMEOUT, matchAllPattern()) .setSnapshots("non-existing*", otherPrefixSnapshot1, "-o*") + .setIgnoreUnavailable(true) .get() .getSnapshots(), empty() @@ -586,12 +585,17 @@ public void testRetrievingSnapshotsWhenRepositoryIsMissing() throws Exception { final List snapshotNames = createNSnapshots(repoName, randomIntBetween(1, 10)); snapshotNames.sort(String::compareTo); - final GetSnapshotsResponse response = clusterAdmin().prepareGetSnapshots(TEST_REQUEST_TIMEOUT, repoName, missingRepoName) + final var oneRepoFuture = clusterAdmin().prepareGetSnapshots(TEST_REQUEST_TIMEOUT, repoName, missingRepoName) .setSort(SnapshotSortKey.NAME) - .get(); - assertThat(response.getSnapshots().stream().map(info -> info.snapshotId().getName()).toList(), equalTo(snapshotNames)); - assertTrue(response.getFailures().containsKey(missingRepoName)); - assertThat(response.getFailures().get(missingRepoName), instanceOf(RepositoryMissingException.class)); + .setIgnoreUnavailable(randomBoolean()) + .execute(); + expectThrows(RepositoryMissingException.class, oneRepoFuture::actionGet); + + final var multiRepoFuture = clusterAdmin().prepareGetSnapshots(TEST_REQUEST_TIMEOUT, repoName, missingRepoName) + .setSort(SnapshotSortKey.NAME) + .setIgnoreUnavailable(randomBoolean()) + .execute(); + expectThrows(RepositoryMissingException.class, multiRepoFuture::actionGet); } // Create a snapshot that is guaranteed to have a unique start time and duration for tests around ordering by either. diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStatusApisIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStatusApisIT.java index 600a3953d9bda..b155ef73783eb 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStatusApisIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStatusApisIT.java @@ -52,7 +52,6 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.oneOf; @@ -395,16 +394,13 @@ public void testGetSnapshotsMultipleRepos() throws Exception { } logger.info("--> specify all snapshot names with ignoreUnavailable=false"); - GetSnapshotsResponse getSnapshotsResponse2 = client.admin() + final var failingFuture = client.admin() .cluster() .prepareGetSnapshots(TEST_REQUEST_TIMEOUT, randomFrom("_all", "repo*")) .setIgnoreUnavailable(false) .setSnapshots(snapshotList.toArray(new String[0])) - .get(); - - for (String repo : repoList) { - assertThat(getSnapshotsResponse2.getFailures().get(repo), instanceOf(SnapshotMissingException.class)); - } + .execute(); + expectThrows(SnapshotMissingException.class, failingFuture::actionGet); logger.info("--> specify all snapshot names with ignoreUnavailable=true"); GetSnapshotsResponse getSnapshotsResponse3 = client.admin() diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java index 8ef828d07d8b0..7c797444fc458 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java @@ -15,7 +15,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.regex.Regex; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.search.sort.SortOrder; @@ -220,13 +219,6 @@ public String[] policies() { return policies; } - public boolean isSingleRepositoryRequest() { - return repositories.length == 1 - && repositories[0] != null - && "_all".equals(repositories[0]) == false - && Regex.isSimpleMatchPattern(repositories[0]) == false; - } - /** * Returns the names of the snapshots. * diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java index 85c2ff2806ace..f7dedc21f93b6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.xcontent.ToXContent; @@ -33,6 +34,7 @@ public class GetSnapshotsResponse extends ActionResponse implements ChunkedToXCo private final List snapshots; + @UpdateForV9 // always empty, can be dropped private final Map failures; @Nullable diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index dd08746236fed..ff5fdbaa787fe 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -8,7 +8,6 @@ package org.elasticsearch.action.admin.cluster.snapshots.get; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; @@ -120,10 +119,14 @@ protected void masterOperation( ) { assert task instanceof CancellableTask : task + " not cancellable"; + final var resolvedRepositories = ResolvedRepositories.resolve(state, request.repositories()); + if (resolvedRepositories.hasMissingRepositories()) { + throw new RepositoryMissingException(String.join(", ", resolvedRepositories.missing())); + } + new GetSnapshotsOperation( (CancellableTask) task, - ResolvedRepositories.resolve(state, request.repositories()), - request.isSingleRepositoryRequest() == false, + resolvedRepositories.repositoryMetadata(), request.snapshots(), request.ignoreUnavailable(), request.policies(), @@ -151,7 +154,6 @@ private class GetSnapshotsOperation { // repositories private final List repositories; - private final boolean isMultiRepoRequest; // snapshots selection private final SnapshotNamePredicate snapshotNamePredicate; @@ -179,7 +181,6 @@ private class GetSnapshotsOperation { private final GetSnapshotInfoExecutor getSnapshotInfoExecutor; // results - private final Map failuresByRepository = ConcurrentCollections.newConcurrentMap(); private final Queue> allSnapshotInfos = ConcurrentCollections.newQueue(); /** @@ -195,8 +196,7 @@ private class GetSnapshotsOperation { GetSnapshotsOperation( CancellableTask cancellableTask, - ResolvedRepositories resolvedRepositories, - boolean isMultiRepoRequest, + List repositories, String[] snapshots, boolean ignoreUnavailable, String[] policies, @@ -211,8 +211,7 @@ private class GetSnapshotsOperation { boolean indices ) { this.cancellableTask = cancellableTask; - this.repositories = resolvedRepositories.repositoryMetadata(); - this.isMultiRepoRequest = isMultiRepoRequest; + this.repositories = repositories; this.ignoreUnavailable = ignoreUnavailable; this.sortBy = sortBy; this.order = order; @@ -232,10 +231,6 @@ private class GetSnapshotsOperation { threadPool.info(ThreadPool.Names.SNAPSHOT_META).getMax(), cancellableTask::isCancelled ); - - for (final var missingRepo : resolvedRepositories.missing()) { - failuresByRepository.put(missingRepo, new RepositoryMissingException(missingRepo)); - } } void getMultipleReposSnapshotInfo(ActionListener listener) { @@ -249,6 +244,10 @@ void getMultipleReposSnapshotInfo(ActionListener listener) continue; } + if (listeners.isFailing()) { + return; + } + SubscribableListener .newForked(repositoryDataListener -> { @@ -261,14 +260,7 @@ void getMultipleReposSnapshotInfo(ActionListener listener) .andThen((l, repositoryData) -> loadSnapshotInfos(repoName, repositoryData, l)) - .addListener(listeners.acquire().delegateResponse((l, e) -> { - if (isMultiRepoRequest && e instanceof ElasticsearchException elasticsearchException) { - failuresByRepository.put(repoName, elasticsearchException); - l.onResponse(null); - } else { - l.onFailure(e); - } - })); + .addListener(listeners.acquire()); } } }) @@ -503,7 +495,7 @@ private GetSnapshotsResponse buildResponse() { } return new GetSnapshotsResponse( snapshotInfos, - failuresByRepository, + null, remaining > 0 ? sortBy.encodeAfterQueryParam(snapshotInfos.get(snapshotInfos.size() - 1)) : null, totalCount.get(), remaining From b3233aac11e563c054480b3da165f41ddd24dd58 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Wed, 3 Jul 2024 15:22:32 +0200 Subject: [PATCH 157/216] ES|QL: Fix possible ClassCastException with ReplaceMissingFieldWithNull (#110373) Fix ReplaceMissingFieldWithNull by explicitly listing the commands that can be optimized replacing missing FieldAttributed with NULL Literals. Also adding a unit test that demonstrates possible scenarios where introducing a new command can lead to `ClassCastException` with `ReplaceMissingFieldWithNull` local optimization rule and an integration test that covers https://github.com/elastic/elasticsearch/issues/109974 Fixes #110150 --- .../optimizer/LocalLogicalPlanOptimizer.java | 26 ++- .../LocalLogicalPlanOptimizerTests.java | 48 +++++ .../test/esql/170_no_replicas.yml | 181 ++++++++++++++++++ 3 files changed, 240 insertions(+), 15 deletions(-) create mode 100644 x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/170_no_replicas.yml diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java index 90ce68cb55b64..ba5e8316a666c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java @@ -38,8 +38,8 @@ import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; -import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.Project; +import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; import org.elasticsearch.xpack.esql.plan.logical.TopN; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.planner.AbstractPhysicalOperationProviders; @@ -163,20 +163,16 @@ else if (plan instanceof Project project) { plan = new Eval(project.source(), project.child(), new ArrayList<>(nullLiteral.values())); plan = new Project(project.source(), plan, newProjections); } - } else if (plan instanceof MvExpand) { - // We cannot replace the target (NamedExpression) with a Literal - // https://github.com/elastic/elasticsearch/issues/109974 - // Unfortunately we cannot remove the MvExpand right away, or we'll lose the output field (layout problems) - // TODO but this could be a follow-up optimization - return plan; - } - // otherwise transform fields in place - else { - plan = plan.transformExpressionsOnlyUp( - FieldAttribute.class, - f -> stats.exists(f.qualifiedName()) ? f : Literal.of(f, null) - ); - } + } else if (plan instanceof Eval + || plan instanceof Filter + || plan instanceof OrderBy + || plan instanceof RegexExtract + || plan instanceof TopN) { + plan = plan.transformExpressionsOnlyUp( + FieldAttribute.class, + f -> stats.exists(f.qualifiedName()) ? f : Literal.of(f, null) + ); + } return plan; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java index 7a3ed09d66f02..af6c065abbeee 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java @@ -14,8 +14,10 @@ import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.core.expression.Alias; +import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; +import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; @@ -25,6 +27,9 @@ import org.elasticsearch.xpack.esql.core.plan.logical.Filter; import org.elasticsearch.xpack.esql.core.plan.logical.Limit; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; @@ -36,6 +41,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.Project; +import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; @@ -214,6 +220,48 @@ public void testMissingFieldInMvExpand() { as(limit2.child(), EsRelation.class); } + public static class MockFieldAttributeCommand extends UnaryPlan { + public FieldAttribute field; + + public MockFieldAttributeCommand(Source source, LogicalPlan child, FieldAttribute field) { + super(source, child); + this.field = field; + } + + @Override + public UnaryPlan replaceChild(LogicalPlan newChild) { + return new MockFieldAttributeCommand(source(), newChild, field); + } + + @Override + public boolean expressionsResolved() { + return true; + } + + @Override + public List output() { + return List.of(field); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, MockFieldAttributeCommand::new, child(), field); + } + } + + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/110150") + public void testMissingFieldInNewCommand() { + var testStats = statsForMissingField("last_name"); + localPlan( + new MockFieldAttributeCommand( + EMPTY, + new Row(EMPTY, List.of()), + new FieldAttribute(EMPTY, "last_name", new EsField("last_name", DataType.KEYWORD, Map.of(), true)) + ), + testStats + ); + } + /** * Expects * EsqlProject[[x{r}#3]] diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/170_no_replicas.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/170_no_replicas.yml new file mode 100644 index 0000000000000..6ac5b2ca68d5c --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/170_no_replicas.yml @@ -0,0 +1,181 @@ +--- +setup: + - requires: + cluster_features: ["gte_v8.15.0"] + reason: "Planning bugs for locally missing fields fixed in v 8.15" + test_runner_features: allowed_warnings_regex + - do: + indices.create: + index: test1 + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + properties: + name1: + type: keyword + - do: + bulk: + index: "test1" + refresh: true + body: + - { "index": { } } + - { "name1": "1"} + - do: + indices.create: + index: test2 + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + properties: + name2: + type: keyword + - do: + bulk: + index: "test2" + refresh: true + body: + - { "index": { } } + - { "name2": "2"} + + - do: + indices.create: + index: test3 + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + properties: + name3: + type: keyword + - do: + bulk: + index: "test3" + refresh: true + body: + - { "index": { } } + - { "name3": "3"} + + - do: + indices.create: + index: test4 + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + properties: + name4: + type: keyword + - do: + bulk: + index: "test4" + refresh: true + body: + - { "index": { } } + - { "name4": "4"} + + - do: + indices.create: + index: test5 + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + properties: + name5: + type: keyword + - do: + bulk: + index: "test5" + refresh: true + body: + - { "index": { } } + - { "name5": "5"} + + - do: + indices.create: + index: test6 + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + properties: + name6: + type: keyword + - do: + bulk: + index: "test6" + refresh: true + body: + - { "index": { } } + - { "name6": "6"} + + - do: + indices.create: + index: test7 + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + properties: + name7: + type: keyword + - do: + bulk: + index: "test7" + refresh: true + body: + - { "index": { } } + - { "name7": "7"} + + - do: + indices.create: + index: test8 + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + properties: + name8: + type: keyword + - do: + bulk: + index: "test8" + refresh: true + body: + - { "index": { } } + - { "name8": "8"} + +--- +"Test From 1": + - do: + esql.query: + body: + query: 'FROM test* | MV_EXPAND name1 | KEEP name1 | SORT name1 NULLS LAST | LIMIT 1' + + - match: {columns.0.name: "name1"} + - match: {columns.0.type: "keyword"} + - length: { values: 1 } + - match: {values.0.0: "1"} + +--- +"Test From 5": + - do: + esql.query: + body: + query: 'FROM test* | MV_EXPAND name5 | KEEP name5 | SORT name5 NULLS LAST | LIMIT 1' + + - match: {columns.0.name: "name5"} + - match: {columns.0.type: "keyword"} + - length: { values: 1 } + - match: {values.0.0: "5"} + From c71bfef99a877ca2a789e9dd389f2ab8430bbfb7 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 3 Jul 2024 15:24:46 +0200 Subject: [PATCH 158/216] Speedup index math for BigArray implementations (#110421) Same as https://github.com/elastic/elasticsearch/pull/110377, provides a little speedup to this at times very hot code. --- .../common/util/BigDoubleArray.java | 34 +++++++++++------ .../common/util/BigFloatArray.java | 30 ++++++++++----- .../common/util/BigIntArray.java | 38 ++++++++++++------- .../common/util/BigLongArray.java | 38 ++++++++++++------- 4 files changed, 90 insertions(+), 50 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/util/BigDoubleArray.java b/server/src/main/java/org/elasticsearch/common/util/BigDoubleArray.java index 3135ebb293070..cfd44d82c757e 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BigDoubleArray.java +++ b/server/src/main/java/org/elasticsearch/common/util/BigDoubleArray.java @@ -36,23 +36,23 @@ final class BigDoubleArray extends AbstractBigByteArray implements DoubleArray { @Override public double get(long index) { - final int pageIndex = pageIndex(index); - final int indexInPage = indexInPage(index); + final int pageIndex = pageIdx(index); + final int indexInPage = idxInPage(index); return (double) VH_PLATFORM_NATIVE_DOUBLE.get(pages[pageIndex], indexInPage << 3); } @Override public void set(long index, double value) { - final int pageIndex = pageIndex(index); - final int indexInPage = indexInPage(index); + final int pageIndex = pageIdx(index); + final int indexInPage = idxInPage(index); final byte[] page = getPageForWriting(pageIndex); VH_PLATFORM_NATIVE_DOUBLE.set(page, indexInPage << 3, value); } @Override public double increment(long index, double inc) { - final int pageIndex = pageIndex(index); - final int indexInPage = indexInPage(index); + final int pageIndex = pageIdx(index); + final int indexInPage = idxInPage(index); final byte[] page = getPageForWriting(pageIndex); final double newVal = (double) VH_PLATFORM_NATIVE_DOUBLE.get(page, indexInPage << 3) + inc; VH_PLATFORM_NATIVE_DOUBLE.set(page, indexInPage << 3, newVal); @@ -69,16 +69,16 @@ public void fill(long fromIndex, long toIndex, double value) { if (fromIndex > toIndex) { throw new IllegalArgumentException(); } - final int fromPage = pageIndex(fromIndex); - final int toPage = pageIndex(toIndex - 1); + final int fromPage = pageIdx(fromIndex); + final int toPage = pageIdx(toIndex - 1); if (fromPage == toPage) { - fill(getPageForWriting(fromPage), indexInPage(fromIndex), indexInPage(toIndex - 1) + 1, value); + fill(getPageForWriting(fromPage), idxInPage(fromIndex), idxInPage(toIndex - 1) + 1, value); } else { - fill(getPageForWriting(fromPage), indexInPage(fromIndex), pageSize(), value); + fill(getPageForWriting(fromPage), idxInPage(fromIndex), DOUBLE_PAGE_SIZE, value); for (int i = fromPage + 1; i < toPage; ++i) { - fill(getPageForWriting(i), 0, pageSize(), value); + fill(getPageForWriting(i), 0, DOUBLE_PAGE_SIZE, value); } - fill(getPageForWriting(toPage), 0, indexInPage(toIndex - 1) + 1, value); + fill(getPageForWriting(toPage), 0, idxInPage(toIndex - 1) + 1, value); } } @@ -108,4 +108,14 @@ public void set(long index, byte[] buf, int offset, int len) { public void writeTo(StreamOutput out) throws IOException { writePages(out, size, pages, Double.BYTES); } + + private static final int PAGE_SHIFT = Integer.numberOfTrailingZeros(DOUBLE_PAGE_SIZE); + + private static int pageIdx(long index) { + return (int) (index >>> PAGE_SHIFT); + } + + private static int idxInPage(long index) { + return (int) (index & DOUBLE_PAGE_SIZE - 1); + } } diff --git a/server/src/main/java/org/elasticsearch/common/util/BigFloatArray.java b/server/src/main/java/org/elasticsearch/common/util/BigFloatArray.java index 380b2c8e12b34..704a47d60473f 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BigFloatArray.java +++ b/server/src/main/java/org/elasticsearch/common/util/BigFloatArray.java @@ -31,16 +31,16 @@ final class BigFloatArray extends AbstractBigByteArray implements FloatArray { @Override public void set(long index, float value) { - final int pageIndex = pageIndex(index); - final int indexInPage = indexInPage(index); + final int pageIndex = pageIdx(index); + final int indexInPage = idxInPage(index); final byte[] page = getPageForWriting(pageIndex); VH_PLATFORM_NATIVE_FLOAT.set(page, indexInPage << 2, value); } @Override public float get(long index) { - final int pageIndex = pageIndex(index); - final int indexInPage = indexInPage(index); + final int pageIndex = pageIdx(index); + final int indexInPage = idxInPage(index); return (float) VH_PLATFORM_NATIVE_FLOAT.get(pages[pageIndex], indexInPage << 2); } @@ -54,16 +54,16 @@ public void fill(long fromIndex, long toIndex, float value) { if (fromIndex > toIndex) { throw new IllegalArgumentException(); } - final int fromPage = pageIndex(fromIndex); - final int toPage = pageIndex(toIndex - 1); + final int fromPage = pageIdx(fromIndex); + final int toPage = pageIdx(toIndex - 1); if (fromPage == toPage) { - fill(getPageForWriting(fromPage), indexInPage(fromIndex), indexInPage(toIndex - 1) + 1, value); + fill(getPageForWriting(fromPage), idxInPage(fromIndex), idxInPage(toIndex - 1) + 1, value); } else { - fill(getPageForWriting(fromPage), indexInPage(fromIndex), pageSize(), value); + fill(getPageForWriting(fromPage), idxInPage(fromIndex), FLOAT_PAGE_SIZE, value); for (int i = fromPage + 1; i < toPage; ++i) { - fill(getPageForWriting(i), 0, pageSize(), value); + fill(getPageForWriting(i), 0, FLOAT_PAGE_SIZE, value); } - fill(getPageForWriting(toPage), 0, indexInPage(toIndex - 1) + 1, value); + fill(getPageForWriting(toPage), 0, idxInPage(toIndex - 1) + 1, value); } } @@ -83,4 +83,14 @@ public static long estimateRamBytes(final long size) { public void set(long index, byte[] buf, int offset, int len) { set(index, buf, offset, len, 2); } + + private static final int PAGE_SHIFT = Integer.numberOfTrailingZeros(FLOAT_PAGE_SIZE); + + private static int pageIdx(long index) { + return (int) (index >>> PAGE_SHIFT); + } + + private static int idxInPage(long index) { + return (int) (index & FLOAT_PAGE_SIZE - 1); + } } diff --git a/server/src/main/java/org/elasticsearch/common/util/BigIntArray.java b/server/src/main/java/org/elasticsearch/common/util/BigIntArray.java index 9ce9842c337c0..5e9bccebdd0b5 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BigIntArray.java +++ b/server/src/main/java/org/elasticsearch/common/util/BigIntArray.java @@ -40,15 +40,15 @@ public void writeTo(StreamOutput out) throws IOException { @Override public int get(long index) { - final int pageIndex = pageIndex(index); - final int indexInPage = indexInPage(index); + final int pageIndex = pageIdx(index); + final int indexInPage = idxInPage(index); return (int) VH_PLATFORM_NATIVE_INT.get(pages[pageIndex], indexInPage << 2); } @Override public int getAndSet(long index, int value) { - final int pageIndex = pageIndex(index); - final int indexInPage = indexInPage(index); + final int pageIndex = pageIdx(index); + final int indexInPage = idxInPage(index); final byte[] page = getPageForWriting(pageIndex); final int ret = (int) VH_PLATFORM_NATIVE_INT.get(page, indexInPage << 2); VH_PLATFORM_NATIVE_INT.set(page, indexInPage << 2, value); @@ -57,15 +57,15 @@ public int getAndSet(long index, int value) { @Override public void set(long index, int value) { - final int pageIndex = pageIndex(index); - final int indexInPage = indexInPage(index); + final int pageIndex = pageIdx(index); + final int indexInPage = idxInPage(index); VH_PLATFORM_NATIVE_INT.set(getPageForWriting(pageIndex), indexInPage << 2, value); } @Override public int increment(long index, int inc) { - final int pageIndex = pageIndex(index); - final int indexInPage = indexInPage(index); + final int pageIndex = pageIdx(index); + final int indexInPage = idxInPage(index); final byte[] page = getPageForWriting(pageIndex); final int newVal = (int) VH_PLATFORM_NATIVE_INT.get(page, indexInPage << 2) + inc; VH_PLATFORM_NATIVE_INT.set(page, indexInPage << 2, newVal); @@ -77,16 +77,16 @@ public void fill(long fromIndex, long toIndex, int value) { if (fromIndex > toIndex) { throw new IllegalArgumentException(); } - final int fromPage = pageIndex(fromIndex); - final int toPage = pageIndex(toIndex - 1); + final int fromPage = pageIdx(fromIndex); + final int toPage = pageIdx(toIndex - 1); if (fromPage == toPage) { - fill(getPageForWriting(fromPage), indexInPage(fromIndex), indexInPage(toIndex - 1) + 1, value); + fill(getPageForWriting(fromPage), idxInPage(fromIndex), idxInPage(toIndex - 1) + 1, value); } else { - fill(getPageForWriting(fromPage), indexInPage(fromIndex), pageSize(), value); + fill(getPageForWriting(fromPage), idxInPage(fromIndex), INT_PAGE_SIZE, value); for (int i = fromPage + 1; i < toPage; ++i) { - fill(getPageForWriting(i), 0, pageSize(), value); + fill(getPageForWriting(i), 0, INT_PAGE_SIZE, value); } - fill(getPageForWriting(toPage), 0, indexInPage(toIndex - 1) + 1, value); + fill(getPageForWriting(toPage), 0, idxInPage(toIndex - 1) + 1, value); } } @@ -116,4 +116,14 @@ public static long estimateRamBytes(final long size) { public void set(long index, byte[] buf, int offset, int len) { set(index, buf, offset, len, 2); } + + private static final int PAGE_SHIFT = Integer.numberOfTrailingZeros(INT_PAGE_SIZE); + + private static int pageIdx(long index) { + return (int) (index >>> PAGE_SHIFT); + } + + private static int idxInPage(long index) { + return (int) (index & INT_PAGE_SIZE - 1); + } } diff --git a/server/src/main/java/org/elasticsearch/common/util/BigLongArray.java b/server/src/main/java/org/elasticsearch/common/util/BigLongArray.java index 7d23e06f87658..aee57feca66f4 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BigLongArray.java +++ b/server/src/main/java/org/elasticsearch/common/util/BigLongArray.java @@ -35,15 +35,15 @@ final class BigLongArray extends AbstractBigByteArray implements LongArray { @Override public long get(long index) { - final int pageIndex = pageIndex(index); - final int indexInPage = indexInPage(index); + final int pageIndex = pageIdx(index); + final int indexInPage = idxInPage(index); return (long) VH_PLATFORM_NATIVE_LONG.get(pages[pageIndex], indexInPage << 3); } @Override public long getAndSet(long index, long value) { - final int pageIndex = pageIndex(index); - final int indexInPage = indexInPage(index); + final int pageIndex = pageIdx(index); + final int indexInPage = idxInPage(index); final byte[] page = getPageForWriting(pageIndex); final long ret = (long) VH_PLATFORM_NATIVE_LONG.get(page, indexInPage << 3); VH_PLATFORM_NATIVE_LONG.set(page, indexInPage << 3, value); @@ -52,16 +52,16 @@ public long getAndSet(long index, long value) { @Override public void set(long index, long value) { - final int pageIndex = pageIndex(index); - final int indexInPage = indexInPage(index); + final int pageIndex = pageIdx(index); + final int indexInPage = idxInPage(index); final byte[] page = getPageForWriting(pageIndex); VH_PLATFORM_NATIVE_LONG.set(page, indexInPage << 3, value); } @Override public long increment(long index, long inc) { - final int pageIndex = pageIndex(index); - final int indexInPage = indexInPage(index); + final int pageIndex = pageIdx(index); + final int indexInPage = idxInPage(index); final byte[] page = getPageForWriting(pageIndex); final long newVal = (long) VH_PLATFORM_NATIVE_LONG.get(page, indexInPage << 3) + inc; VH_PLATFORM_NATIVE_LONG.set(page, indexInPage << 3, newVal); @@ -81,16 +81,16 @@ public void fill(long fromIndex, long toIndex, long value) { if (fromIndex == toIndex) { return; // empty range } - final int fromPage = pageIndex(fromIndex); - final int toPage = pageIndex(toIndex - 1); + final int fromPage = pageIdx(fromIndex); + final int toPage = pageIdx(toIndex - 1); if (fromPage == toPage) { - fill(getPageForWriting(fromPage), indexInPage(fromIndex), indexInPage(toIndex - 1) + 1, value); + fill(getPageForWriting(fromPage), idxInPage(fromIndex), idxInPage(toIndex - 1) + 1, value); } else { - fill(getPageForWriting(fromPage), indexInPage(fromIndex), pageSize(), value); + fill(getPageForWriting(fromPage), idxInPage(fromIndex), LONG_PAGE_SIZE, value); for (int i = fromPage + 1; i < toPage; ++i) { - fill(getPageForWriting(i), 0, pageSize(), value); + fill(getPageForWriting(i), 0, LONG_PAGE_SIZE, value); } - fill(getPageForWriting(toPage), 0, indexInPage(toIndex - 1) + 1, value); + fill(getPageForWriting(toPage), 0, idxInPage(toIndex - 1) + 1, value); } } @@ -130,4 +130,14 @@ static void writePages(StreamOutput out, long size, byte[][] pages, int bytesPer remainedBytes -= len; } } + + private static final int PAGE_SHIFT = Integer.numberOfTrailingZeros(LONG_PAGE_SIZE); + + private static int pageIdx(long index) { + return (int) (index >>> PAGE_SHIFT); + } + + private static int idxInPage(long index) { + return (int) (index & LONG_PAGE_SIZE - 1); + } } From d2925db8a064e0a33617e8307575069c795ccc31 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 3 Jul 2024 06:58:03 -0700 Subject: [PATCH 159/216] Enable double rounding when test with more than 1 node (#110404) With this change, we will enable rounding for double values in the single-node QA module in serverless tests, while keeping it disabled in stateful tests. --- .../elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java index a3af3cbc8458b..6494695a484d4 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/EsqlSpecIT.java @@ -28,4 +28,10 @@ protected String getTestRestCluster() { public EsqlSpecIT(String fileName, String groupName, String testName, Integer lineNumber, CsvTestCase testCase, Mode mode) { super(fileName, groupName, testName, lineNumber, testCase, mode); } + + @Override + protected boolean enableRoundingDoubleValuesOnAsserting() { + // This suite runs with more than one node and three shards in serverless + return cluster.getNumNodes() > 1; + } } From db2c678d0bfea0636f6b3969e3d90f555e0afcc6 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 3 Jul 2024 09:59:23 -0400 Subject: [PATCH 160/216] ESQL: Merge function registries (#110402) This merges the `FunctionRegistry` from `esql-core` into the `EsqlFunctionRegistry` in `esql` proper in an effort to shave down the complexity we got by attempting to share lots of infrastructure with SQL and KQL. Since we no longer share we can compress these two together. --- .../expression/function/FunctionRegistry.java | 470 ------------------ .../function/FunctionRegistryTests.java | 209 -------- .../function/TestFunctionRegistry.java | 15 - .../xpack/esql/analysis/Analyzer.java | 14 +- .../xpack/esql/analysis/AnalyzerContext.java | 4 +- .../xpack/esql/execution/PlanExecutor.java | 3 +- .../function/EsqlFunctionRegistry.java | 442 +++++++++++++++- .../function/FunctionDefinition.java | 7 +- .../function/FunctionResolutionStrategy.java | 3 +- .../expression/function/OptionalArgument.java | 4 +- .../function/TwoOptionalArguments.java | 4 +- .../function/UnresolvedFunction.java | 5 +- .../function/aggregate/CountDistinct.java | 2 +- .../expression/function/aggregate/Rate.java | 2 +- .../expression/function/grouping/Bucket.java | 2 +- .../function/scalar/conditional/Greatest.java | 2 +- .../function/scalar/conditional/Least.java | 2 +- .../function/scalar/date/DateFormat.java | 2 +- .../function/scalar/date/DateParse.java | 2 +- .../function/scalar/ip/IpPrefix.java | 2 +- .../expression/function/scalar/math/Log.java | 2 +- .../function/scalar/math/Round.java | 2 +- .../function/scalar/multivalue/MvSlice.java | 2 +- .../function/scalar/multivalue/MvSort.java | 2 +- .../function/scalar/multivalue/MvZip.java | 2 +- .../function/scalar/nulls/Coalesce.java | 2 +- .../function/scalar/string/Locate.java | 2 +- .../function/scalar/string/Repeat.java | 2 +- .../function/scalar/string/Substring.java | 2 +- .../xpack/esql/parser/ExpressionBuilder.java | 4 +- .../xpack/esql/parser/LogicalPlanBuilder.java | 2 +- .../esql/plan/logical/meta/MetaFunctions.java | 3 +- .../xpack/esql/planner/Mapper.java | 6 +- .../xpack/esql/session/EsqlSession.java | 6 +- .../elasticsearch/xpack/esql/CsvTests.java | 3 +- .../xpack/esql/analysis/ParsingTests.java | 2 +- .../function/AbstractFunctionTestCase.java | 1 - .../function/EsqlFunctionRegistryTests.java | 131 ++++- .../expression/function/RailRoadDiagram.java | 1 - .../function/UnresolvedFunctionTests.java | 2 +- .../optimizer/PhysicalPlanOptimizerTests.java | 5 +- .../xpack/esql/parser/ExpressionTests.java | 4 +- .../esql/parser/StatementParserTests.java | 4 +- .../esql/plugin/DataNodeRequestTests.java | 3 +- .../esql/tree/EsqlNodeSubclassTests.java | 2 +- 45 files changed, 608 insertions(+), 785 deletions(-) delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/FunctionRegistry.java delete mode 100644 x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/FunctionRegistryTests.java delete mode 100644 x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/TestFunctionRegistry.java rename x-pack/plugin/{esql-core/src/main/java/org/elasticsearch/xpack/esql/core => esql/src/main/java/org/elasticsearch/xpack/esql}/expression/function/FunctionDefinition.java (87%) rename x-pack/plugin/{esql-core/src/main/java/org/elasticsearch/xpack/esql/core => esql/src/main/java/org/elasticsearch/xpack/esql}/expression/function/FunctionResolutionStrategy.java (91%) rename x-pack/plugin/{esql-core/src/main/java/org/elasticsearch/xpack/esql/core => esql/src/main/java/org/elasticsearch/xpack/esql}/expression/function/OptionalArgument.java (71%) rename x-pack/plugin/{esql-core/src/main/java/org/elasticsearch/xpack/esql/core => esql/src/main/java/org/elasticsearch/xpack/esql}/expression/function/TwoOptionalArguments.java (71%) rename x-pack/plugin/{esql-core/src/main/java/org/elasticsearch/xpack/esql/core => esql/src/main/java/org/elasticsearch/xpack/esql}/expression/function/UnresolvedFunction.java (97%) rename x-pack/plugin/{esql-core/src/test/java/org/elasticsearch/xpack/esql/core => esql/src/test/java/org/elasticsearch/xpack/esql}/expression/function/UnresolvedFunctionTests.java (99%) diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/FunctionRegistry.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/FunctionRegistry.java deleted file mode 100644 index d3210ad6c2e6a..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/FunctionRegistry.java +++ /dev/null @@ -1,470 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.function; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.util.CollectionUtils; -import org.elasticsearch.xpack.esql.core.ParsingException; -import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.session.Configuration; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.util.Check; - -import java.util.Arrays; -import java.util.Collection; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Map.Entry; -import java.util.function.BiFunction; -import java.util.regex.Pattern; -import java.util.stream.Collectors; - -import static java.util.Collections.emptyList; -import static java.util.Collections.unmodifiableList; -import static java.util.stream.Collectors.toList; - -public class FunctionRegistry { - - // Translation table for error messaging in the following function - private static final String[] NUM_NAMES = { "zero", "one", "two", "three", "four", "five", }; - - // list of functions grouped by type of functions (aggregate, statistics, math etc) and ordered alphabetically inside each group - // a single function will have one entry for itself with its name associated to its instance and, also, one entry for each alias - // it has with the alias name associated to the FunctionDefinition instance - private final Map defs = new LinkedHashMap<>(); - private final Map aliases = new HashMap<>(); - - public FunctionRegistry() {} - - /** - * Register the given function definitions with this registry. - */ - @SuppressWarnings("this-escape") - public FunctionRegistry(FunctionDefinition... functions) { - register(functions); - } - - @SuppressWarnings("this-escape") - public FunctionRegistry(FunctionDefinition[]... groupFunctions) { - register(groupFunctions); - } - - /** - * Returns a function registry that includes functions available exclusively in the snapshot build. - */ - public FunctionRegistry snapshotRegistry() { - return this; - } - - protected void register(FunctionDefinition[]... groupFunctions) { - for (FunctionDefinition[] group : groupFunctions) { - register(group); - } - } - - protected void register(FunctionDefinition... functions) { - // temporary map to hold [function_name/alias_name : function instance] - Map batchMap = new HashMap<>(); - for (FunctionDefinition f : functions) { - batchMap.put(f.name(), f); - for (String alias : f.aliases()) { - Object old = batchMap.put(alias, f); - if (old != null || defs.containsKey(alias)) { - throw new QlIllegalArgumentException( - "alias [" - + alias - + "] is used by " - + "[" - + (old != null ? old : defs.get(alias).name()) - + "] and [" - + f.name() - + "]" - ); - } - aliases.put(alias, f.name()); - } - } - // sort the temporary map by key name and add it to the global map of functions - defs.putAll( - batchMap.entrySet() - .stream() - .sorted(Map.Entry.comparingByKey()) - .collect( - Collectors.< - Entry, - String, - FunctionDefinition, - LinkedHashMap>toMap( - Map.Entry::getKey, - Map.Entry::getValue, - (oldValue, newValue) -> oldValue, - LinkedHashMap::new - ) - ) - ); - } - - public FunctionDefinition resolveFunction(String functionName) { - FunctionDefinition def = defs.get(functionName); - if (def == null) { - throw new QlIllegalArgumentException("Cannot find function {}; this should have been caught during analysis", functionName); - } - return def; - } - - private String normalize(String name) { - return name.toLowerCase(Locale.ROOT); - } - - public String resolveAlias(String alias) { - String normalized = normalize(alias); - return aliases.getOrDefault(normalized, normalized); - } - - public boolean functionExists(String functionName) { - return defs.containsKey(functionName); - } - - public Collection listFunctions() { - // It is worth double checking if we need this copy. These are immutable anyway. - return defs.values(); - } - - public Collection listFunctions(String pattern) { - // It is worth double checking if we need this copy. These are immutable anyway. - Pattern p = Strings.hasText(pattern) ? Pattern.compile(normalize(pattern)) : null; - return defs.entrySet() - .stream() - .filter(e -> p == null || p.matcher(e.getKey()).matches()) - .map(e -> cloneDefinition(e.getKey(), e.getValue())) - .collect(toList()); - } - - protected FunctionDefinition cloneDefinition(String name, FunctionDefinition definition) { - return new FunctionDefinition(name, emptyList(), definition.clazz(), definition.builder()); - } - - protected interface FunctionBuilder { - Function build(Source source, List children, Configuration cfg); - } - - /** - * Main method to register a function. - * - * @param names Must always have at least one entry which is the method's primary name - */ - @SuppressWarnings("overloads") - protected static FunctionDefinition def(Class function, FunctionBuilder builder, String... names) { - Check.isTrue(names.length > 0, "At least one name must be provided for the function"); - String primaryName = names[0]; - List aliases = Arrays.asList(names).subList(1, names.length); - FunctionDefinition.Builder realBuilder = (uf, cfg, extras) -> { - if (CollectionUtils.isEmpty(extras) == false) { - throw new ParsingException( - uf.source(), - "Unused parameters {} detected when building [{}]", - Arrays.toString(extras), - primaryName - ); - } - try { - return builder.build(uf.source(), uf.children(), cfg); - } catch (QlIllegalArgumentException e) { - throw new ParsingException(e, uf.source(), "error building [{}]: {}", primaryName, e.getMessage()); - } - }; - return new FunctionDefinition(primaryName, unmodifiableList(aliases), function, realBuilder); - } - - /** - * Build a {@linkplain FunctionDefinition} for a no-argument function. - */ - protected static FunctionDefinition def( - Class function, - java.util.function.Function ctorRef, - String... names - ) { - FunctionBuilder builder = (source, children, cfg) -> { - if (false == children.isEmpty()) { - throw new QlIllegalArgumentException("expects no arguments"); - } - return ctorRef.apply(source); - }; - return def(function, builder, names); - } - - /** - * Build a {@linkplain FunctionDefinition} for a unary function. - */ - @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do - public static FunctionDefinition def( - Class function, - BiFunction ctorRef, - String... names - ) { - FunctionBuilder builder = (source, children, cfg) -> { - if (children.size() != 1) { - throw new QlIllegalArgumentException("expects exactly one argument"); - } - return ctorRef.apply(source, children.get(0)); - }; - return def(function, builder, names); - } - - /** - * Build a {@linkplain FunctionDefinition} for multi-arg/n-ary function. - */ - @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do - protected FunctionDefinition def(Class function, NaryBuilder ctorRef, String... names) { - FunctionBuilder builder = (source, children, cfg) -> { return ctorRef.build(source, children); }; - return def(function, builder, names); - } - - protected interface NaryBuilder { - T build(Source source, List children); - } - - /** - * Build a {@linkplain FunctionDefinition} for a binary function. - */ - @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do - protected static FunctionDefinition def(Class function, BinaryBuilder ctorRef, String... names) { - FunctionBuilder builder = (source, children, cfg) -> { - boolean isBinaryOptionalParamFunction = OptionalArgument.class.isAssignableFrom(function); - if (isBinaryOptionalParamFunction && (children.size() > 2 || children.size() < 1)) { - throw new QlIllegalArgumentException("expects one or two arguments"); - } else if (isBinaryOptionalParamFunction == false && children.size() != 2) { - throw new QlIllegalArgumentException("expects exactly two arguments"); - } - - return ctorRef.build(source, children.get(0), children.size() == 2 ? children.get(1) : null); - }; - return def(function, builder, names); - } - - protected interface BinaryBuilder { - T build(Source source, Expression left, Expression right); - } - - /** - * Build a {@linkplain FunctionDefinition} for a ternary function. - */ - @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do - protected static FunctionDefinition def(Class function, TernaryBuilder ctorRef, String... names) { - FunctionBuilder builder = (source, children, cfg) -> { - boolean hasMinimumTwo = OptionalArgument.class.isAssignableFrom(function); - if (hasMinimumTwo && (children.size() > 3 || children.size() < 2)) { - throw new QlIllegalArgumentException("expects two or three arguments"); - } else if (hasMinimumTwo == false && children.size() != 3) { - throw new QlIllegalArgumentException("expects exactly three arguments"); - } - return ctorRef.build(source, children.get(0), children.get(1), children.size() == 3 ? children.get(2) : null); - }; - return def(function, builder, names); - } - - protected interface TernaryBuilder { - T build(Source source, Expression one, Expression two, Expression three); - } - - /** - * Build a {@linkplain FunctionDefinition} for a quaternary function. - */ - @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do - protected static FunctionDefinition def(Class function, QuaternaryBuilder ctorRef, String... names) { - FunctionBuilder builder = (source, children, cfg) -> { - if (OptionalArgument.class.isAssignableFrom(function)) { - if (children.size() > 4 || children.size() < 3) { - throw new QlIllegalArgumentException("expects three or four arguments"); - } - } else if (TwoOptionalArguments.class.isAssignableFrom(function)) { - if (children.size() > 4 || children.size() < 2) { - throw new QlIllegalArgumentException("expects minimum two, maximum four arguments"); - } - } else if (children.size() != 4) { - throw new QlIllegalArgumentException("expects exactly four arguments"); - } - return ctorRef.build( - source, - children.get(0), - children.get(1), - children.size() > 2 ? children.get(2) : null, - children.size() > 3 ? children.get(3) : null - ); - }; - return def(function, builder, names); - } - - protected interface QuaternaryBuilder { - T build(Source source, Expression one, Expression two, Expression three, Expression four); - } - - /** - * Build a {@linkplain FunctionDefinition} for a quinary function. - */ - @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do - protected static FunctionDefinition def( - Class function, - QuinaryBuilder ctorRef, - int numOptionalParams, - String... names - ) { - FunctionBuilder builder = (source, children, cfg) -> { - final int NUM_TOTAL_PARAMS = 5; - boolean hasOptionalParams = OptionalArgument.class.isAssignableFrom(function); - if (hasOptionalParams && (children.size() > NUM_TOTAL_PARAMS || children.size() < NUM_TOTAL_PARAMS - numOptionalParams)) { - throw new QlIllegalArgumentException( - "expects between " - + NUM_NAMES[NUM_TOTAL_PARAMS - numOptionalParams] - + " and " - + NUM_NAMES[NUM_TOTAL_PARAMS] - + " arguments" - ); - } else if (hasOptionalParams == false && children.size() != NUM_TOTAL_PARAMS) { - throw new QlIllegalArgumentException("expects exactly " + NUM_NAMES[NUM_TOTAL_PARAMS] + " arguments"); - } - return ctorRef.build( - source, - children.size() > 0 ? children.get(0) : null, - children.size() > 1 ? children.get(1) : null, - children.size() > 2 ? children.get(2) : null, - children.size() > 3 ? children.get(3) : null, - children.size() > 4 ? children.get(4) : null - ); - }; - return def(function, builder, names); - } - - protected interface QuinaryBuilder { - T build(Source source, Expression one, Expression two, Expression three, Expression four, Expression five); - } - - /** - * Build a {@linkplain FunctionDefinition} for functions with a mandatory argument followed by a varidic list. - */ - @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do - protected static FunctionDefinition def(Class function, UnaryVariadicBuilder ctorRef, String... names) { - FunctionBuilder builder = (source, children, cfg) -> { - boolean hasMinimumOne = OptionalArgument.class.isAssignableFrom(function); - if (hasMinimumOne && children.size() < 1) { - throw new QlIllegalArgumentException("expects at least one argument"); - } else if (hasMinimumOne == false && children.size() < 2) { - throw new QlIllegalArgumentException("expects at least two arguments"); - } - return ctorRef.build(source, children.get(0), children.subList(1, children.size())); - }; - return def(function, builder, names); - } - - protected interface UnaryVariadicBuilder { - T build(Source source, Expression exp, List variadic); - } - - /** - * Build a {@linkplain FunctionDefinition} for a no-argument function that is configuration aware. - */ - @SuppressWarnings("overloads") - protected static FunctionDefinition def(Class function, ConfigurationAwareBuilder ctorRef, String... names) { - FunctionBuilder builder = (source, children, cfg) -> { - if (false == children.isEmpty()) { - throw new QlIllegalArgumentException("expects no arguments"); - } - return ctorRef.build(source, cfg); - }; - return def(function, builder, names); - } - - protected interface ConfigurationAwareBuilder { - T build(Source source, Configuration configuration); - } - - /** - * Build a {@linkplain FunctionDefinition} for a one-argument function that is configuration aware. - */ - @SuppressWarnings("overloads") - public static FunctionDefinition def( - Class function, - UnaryConfigurationAwareBuilder ctorRef, - String... names - ) { - FunctionBuilder builder = (source, children, cfg) -> { - if (children.size() > 1) { - throw new QlIllegalArgumentException("expects exactly one argument"); - } - Expression ex = children.size() == 1 ? children.get(0) : null; - return ctorRef.build(source, ex, cfg); - }; - return def(function, builder, names); - } - - public interface UnaryConfigurationAwareBuilder { - T build(Source source, Expression exp, Configuration configuration); - } - - /** - * Build a {@linkplain FunctionDefinition} for a binary function that is configuration aware. - */ - @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do - protected static FunctionDefinition def( - Class function, - BinaryConfigurationAwareBuilder ctorRef, - String... names - ) { - FunctionBuilder builder = (source, children, cfg) -> { - boolean isBinaryOptionalParamFunction = OptionalArgument.class.isAssignableFrom(function); - if (isBinaryOptionalParamFunction && (children.size() > 2 || children.size() < 1)) { - throw new QlIllegalArgumentException("expects one or two arguments"); - } else if (isBinaryOptionalParamFunction == false && children.size() != 2) { - throw new QlIllegalArgumentException("expects exactly two arguments"); - } - return ctorRef.build(source, children.get(0), children.size() == 2 ? children.get(1) : null, cfg); - }; - return def(function, builder, names); - } - - protected interface BinaryConfigurationAwareBuilder { - T build(Source source, Expression left, Expression right, Configuration configuration); - } - - /** - * Build a {@linkplain FunctionDefinition} for a ternary function that is configuration aware. - */ - @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do - protected FunctionDefinition def(Class function, TernaryConfigurationAwareBuilder ctorRef, String... names) { - FunctionBuilder builder = (source, children, cfg) -> { - boolean hasMinimumTwo = OptionalArgument.class.isAssignableFrom(function); - if (hasMinimumTwo && (children.size() > 3 || children.size() < 2)) { - throw new QlIllegalArgumentException("expects two or three arguments"); - } else if (hasMinimumTwo == false && children.size() != 3) { - throw new QlIllegalArgumentException("expects exactly three arguments"); - } - return ctorRef.build(source, children.get(0), children.get(1), children.size() == 3 ? children.get(2) : null, cfg); - }; - return def(function, builder, names); - } - - protected interface TernaryConfigurationAwareBuilder { - T build(Source source, Expression one, Expression two, Expression three, Configuration configuration); - } - - // - // Utility method for extra argument extraction. - // - protected static Boolean asBool(Object[] extras) { - if (CollectionUtils.isEmpty(extras)) { - return null; - } - if (extras.length != 1 || (extras[0] instanceof Boolean) == false) { - throw new QlIllegalArgumentException("Invalid number and types of arguments given to function definition"); - } - return (Boolean) extras[0]; - } -} diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/FunctionRegistryTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/FunctionRegistryTests.java deleted file mode 100644 index 8d39cc74779f2..0000000000000 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/FunctionRegistryTests.java +++ /dev/null @@ -1,209 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.function; - -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.esql.core.ParsingException; -import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.tree.SourceTests; -import org.elasticsearch.xpack.esql.core.type.DataType; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; - -import static java.util.Collections.emptyList; -import static org.elasticsearch.xpack.esql.core.TestUtils.randomConfiguration; -import static org.elasticsearch.xpack.esql.core.expression.function.FunctionRegistry.def; -import static org.elasticsearch.xpack.esql.core.expression.function.FunctionResolutionStrategy.DEFAULT; -import static org.hamcrest.Matchers.endsWith; -import static org.hamcrest.Matchers.is; -import static org.mockito.Mockito.mock; - -public class FunctionRegistryTests extends ESTestCase { - - public void testNoArgFunction() { - UnresolvedFunction ur = uf(DEFAULT); - FunctionRegistry r = new FunctionRegistry(defineDummyNoArgFunction()); - FunctionDefinition def = r.resolveFunction(ur.name()); - assertEquals(ur.source(), ur.buildResolved(randomConfiguration(), def).source()); - } - - public static FunctionDefinition defineDummyNoArgFunction() { - return def(DummyFunction.class, DummyFunction::new, "dummy_function"); - } - - public void testUnaryFunction() { - UnresolvedFunction ur = uf(DEFAULT, mock(Expression.class)); - FunctionRegistry r = new FunctionRegistry(defineDummyUnaryFunction(ur)); - FunctionDefinition def = r.resolveFunction(ur.name()); - assertEquals(ur.source(), ur.buildResolved(randomConfiguration(), def).source()); - - // No children aren't supported - ParsingException e = expectThrows(ParsingException.class, () -> uf(DEFAULT).buildResolved(randomConfiguration(), def)); - assertThat(e.getMessage(), endsWith("expects exactly one argument")); - - // Multiple children aren't supported - e = expectThrows( - ParsingException.class, - () -> uf(DEFAULT, mock(Expression.class), mock(Expression.class)).buildResolved(randomConfiguration(), def) - ); - assertThat(e.getMessage(), endsWith("expects exactly one argument")); - } - - public static FunctionDefinition defineDummyUnaryFunction(UnresolvedFunction ur) { - return def(DummyFunction.class, (Source l, Expression e) -> { - assertSame(e, ur.children().get(0)); - return new DummyFunction(l); - }, "dummy_function"); - } - - public void testBinaryFunction() { - UnresolvedFunction ur = uf(DEFAULT, mock(Expression.class), mock(Expression.class)); - FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, (Source l, Expression lhs, Expression rhs) -> { - assertSame(lhs, ur.children().get(0)); - assertSame(rhs, ur.children().get(1)); - return new DummyFunction(l); - }, "dummy_function")); - FunctionDefinition def = r.resolveFunction(ur.name()); - assertEquals(ur.source(), ur.buildResolved(randomConfiguration(), def).source()); - - // No children aren't supported - ParsingException e = expectThrows(ParsingException.class, () -> uf(DEFAULT).buildResolved(randomConfiguration(), def)); - assertThat(e.getMessage(), endsWith("expects exactly two arguments")); - - // One child isn't supported - e = expectThrows(ParsingException.class, () -> uf(DEFAULT, mock(Expression.class)).buildResolved(randomConfiguration(), def)); - assertThat(e.getMessage(), endsWith("expects exactly two arguments")); - - // Many children aren't supported - e = expectThrows( - ParsingException.class, - () -> uf(DEFAULT, mock(Expression.class), mock(Expression.class), mock(Expression.class)).buildResolved( - randomConfiguration(), - def - ) - ); - assertThat(e.getMessage(), endsWith("expects exactly two arguments")); - } - - public void testAliasNameIsTheSameAsAFunctionName() { - FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, DummyFunction::new, "DUMMY_FUNCTION", "ALIAS")); - QlIllegalArgumentException iae = expectThrows( - QlIllegalArgumentException.class, - () -> r.register(def(DummyFunction2.class, DummyFunction2::new, "DUMMY_FUNCTION2", "DUMMY_FUNCTION")) - ); - assertEquals("alias [DUMMY_FUNCTION] is used by [DUMMY_FUNCTION] and [DUMMY_FUNCTION2]", iae.getMessage()); - } - - public void testDuplicateAliasInTwoDifferentFunctionsFromTheSameBatch() { - QlIllegalArgumentException iae = expectThrows( - QlIllegalArgumentException.class, - () -> new FunctionRegistry( - def(DummyFunction.class, DummyFunction::new, "DUMMY_FUNCTION", "ALIAS"), - def(DummyFunction2.class, DummyFunction2::new, "DUMMY_FUNCTION2", "ALIAS") - ) - ); - assertEquals("alias [ALIAS] is used by [DUMMY_FUNCTION(ALIAS)] and [DUMMY_FUNCTION2]", iae.getMessage()); - } - - public void testDuplicateAliasInTwoDifferentFunctionsFromTwoDifferentBatches() { - FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, DummyFunction::new, "DUMMY_FUNCTION", "ALIAS")); - QlIllegalArgumentException iae = expectThrows( - QlIllegalArgumentException.class, - () -> r.register(def(DummyFunction2.class, DummyFunction2::new, "DUMMY_FUNCTION2", "ALIAS")) - ); - assertEquals("alias [ALIAS] is used by [DUMMY_FUNCTION] and [DUMMY_FUNCTION2]", iae.getMessage()); - } - - public void testFunctionResolving() { - UnresolvedFunction ur = uf(DEFAULT, mock(Expression.class)); - FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, (Source l, Expression e) -> { - assertSame(e, ur.children().get(0)); - return new DummyFunction(l); - }, "dummy_function", "dummy_func")); - - // Resolve by primary name - FunctionDefinition def = r.resolveFunction(r.resolveAlias("DuMMy_FuncTIon")); - assertEquals(ur.source(), ur.buildResolved(randomConfiguration(), def).source()); - - def = r.resolveFunction(r.resolveAlias("Dummy_Function")); - assertEquals(ur.source(), ur.buildResolved(randomConfiguration(), def).source()); - - def = r.resolveFunction(r.resolveAlias("dummy_function")); - assertEquals(ur.source(), ur.buildResolved(randomConfiguration(), def).source()); - - def = r.resolveFunction(r.resolveAlias("DUMMY_FUNCTION")); - assertEquals(ur.source(), ur.buildResolved(randomConfiguration(), def).source()); - - // Resolve by alias - def = r.resolveFunction(r.resolveAlias("DumMy_FunC")); - assertEquals(ur.source(), ur.buildResolved(randomConfiguration(), def).source()); - - def = r.resolveFunction(r.resolveAlias("dummy_func")); - assertEquals(ur.source(), ur.buildResolved(randomConfiguration(), def).source()); - - def = r.resolveFunction(r.resolveAlias("DUMMY_FUNC")); - assertEquals(ur.source(), ur.buildResolved(randomConfiguration(), def).source()); - - // Not resolved - QlIllegalArgumentException e = expectThrows( - QlIllegalArgumentException.class, - () -> r.resolveFunction(r.resolveAlias("DummyFunction")) - ); - assertThat(e.getMessage(), is("Cannot find function dummyfunction; this should have been caught during analysis")); - - e = expectThrows(QlIllegalArgumentException.class, () -> r.resolveFunction(r.resolveAlias("dummyFunction"))); - assertThat(e.getMessage(), is("Cannot find function dummyfunction; this should have been caught during analysis")); - } - - public static UnresolvedFunction uf(FunctionResolutionStrategy resolutionStrategy, Expression... children) { - return new UnresolvedFunction(SourceTests.randomSource(), "dummy_function", resolutionStrategy, Arrays.asList(children)); - } - - public static class DummyFunction extends ScalarFunction { - public DummyFunction(Source source) { - super(source, emptyList()); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - throw new UnsupportedOperationException(); - } - - @Override - public String getWriteableName() { - throw new UnsupportedOperationException(); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this); - } - - @Override - public Expression replaceChildren(List newChildren) { - throw new UnsupportedOperationException("this type of node doesn't have any children to replace"); - } - - @Override - public DataType dataType() { - return null; - } - } - - public static class DummyFunction2 extends DummyFunction { - public DummyFunction2(Source source) { - super(source); - } - } -} diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/TestFunctionRegistry.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/TestFunctionRegistry.java deleted file mode 100644 index 3d17a6ea79624..0000000000000 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/TestFunctionRegistry.java +++ /dev/null @@ -1,15 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.core.expression.function; - -public class TestFunctionRegistry extends FunctionRegistry { - - public TestFunctionRegistry(FunctionDefinition... definitions) { - super(definitions); - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 0d556efbea5db..4fcd37faa311a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -33,9 +33,6 @@ import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedStar; -import org.elasticsearch.xpack.esql.core.expression.function.FunctionDefinition; -import org.elasticsearch.xpack.esql.core.expression.function.FunctionRegistry; -import org.elasticsearch.xpack.esql.core.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; @@ -58,6 +55,8 @@ import org.elasticsearch.xpack.esql.expression.NamedExpressions; import org.elasticsearch.xpack.esql.expression.UnresolvedNamePattern; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; +import org.elasticsearch.xpack.esql.expression.function.FunctionDefinition; +import org.elasticsearch.xpack.esql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.AbstractConvertFunction; @@ -867,7 +866,7 @@ private static class ResolveFunctions extends ParameterizedAnalyzerRule resolveFunction(uf, context.configuration(), snapshotRegistry) @@ -877,7 +876,7 @@ protected LogicalPlan rule(LogicalPlan plan, AnalyzerContext context) { public static org.elasticsearch.xpack.esql.core.expression.function.Function resolveFunction( UnresolvedFunction uf, Configuration configuration, - FunctionRegistry functionRegistry + EsqlFunctionRegistry functionRegistry ) { org.elasticsearch.xpack.esql.core.expression.function.Function f = null; if (uf.analyzed()) { @@ -926,10 +925,7 @@ private BitSet gatherPreAnalysisMetrics(LogicalPlan plan, BitSet b) { private static class ImplicitCasting extends ParameterizedRule { @Override public LogicalPlan apply(LogicalPlan plan, AnalyzerContext context) { - return plan.transformExpressionsUp( - ScalarFunction.class, - e -> ImplicitCasting.cast(e, (EsqlFunctionRegistry) context.functionRegistry()) - ); + return plan.transformExpressionsUp(ScalarFunction.class, e -> ImplicitCasting.cast(e, context.functionRegistry())); } private static Expression cast(ScalarFunction f, EsqlFunctionRegistry registry) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/AnalyzerContext.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/AnalyzerContext.java index c488aa2261d51..5585a3f117d2f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/AnalyzerContext.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/AnalyzerContext.java @@ -7,13 +7,13 @@ package org.elasticsearch.xpack.esql.analysis; -import org.elasticsearch.xpack.esql.core.expression.function.FunctionRegistry; import org.elasticsearch.xpack.esql.core.index.IndexResolution; +import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; public record AnalyzerContext( EsqlConfiguration configuration, - FunctionRegistry functionRegistry, + EsqlFunctionRegistry functionRegistry, IndexResolution indexResolution, EnrichResolution enrichResolution ) {} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java index f4979fa9928db..df67f4609c33e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java @@ -11,7 +11,6 @@ import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; import org.elasticsearch.xpack.esql.analysis.PreAnalyzer; import org.elasticsearch.xpack.esql.analysis.Verifier; -import org.elasticsearch.xpack.esql.core.expression.function.FunctionRegistry; import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolver; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; @@ -30,7 +29,7 @@ public class PlanExecutor { private final IndexResolver indexResolver; private final PreAnalyzer preAnalyzer; - private final FunctionRegistry functionRegistry; + private final EsqlFunctionRegistry functionRegistry; private final Mapper mapper; private final Metrics metrics; private final Verifier verifier; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index d65dc1d6b397f..9a4236cbd96fd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -8,11 +8,16 @@ package org.elasticsearch.xpack.esql.expression.function; import org.elasticsearch.Build; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.xpack.esql.core.ParsingException; +import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; +import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.function.Function; -import org.elasticsearch.xpack.esql.core.expression.function.FunctionDefinition; -import org.elasticsearch.xpack.esql.core.expression.function.FunctionRegistry; import org.elasticsearch.xpack.esql.core.session.Configuration; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.util.Check; import org.elasticsearch.xpack.esql.expression.function.aggregate.Avg; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.expression.function.aggregate.CountDistinct; @@ -122,13 +127,19 @@ import java.lang.reflect.Constructor; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.function.BiFunction; +import java.util.regex.Pattern; import java.util.stream.Collectors; +import static java.util.Collections.emptyList; +import static java.util.Collections.unmodifiableList; +import static java.util.stream.Collectors.toList; import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_POINT; import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_SHAPE; @@ -145,7 +156,7 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.UNSUPPORTED; import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; -public final class EsqlFunctionRegistry extends FunctionRegistry { +public class EsqlFunctionRegistry { private static final Map, List> dataTypesForStringLiteralConversion = new LinkedHashMap<>(); @@ -173,6 +184,15 @@ public final class EsqlFunctionRegistry extends FunctionRegistry { } } + // Translation table for error messaging in the following function + private static final String[] NUM_NAMES = { "zero", "one", "two", "three", "four", "five", }; + + // list of functions grouped by type of functions (aggregate, statistics, math etc) and ordered alphabetically inside each group + // a single function will have one entry for itself with its name associated to its instance and, also, one entry for each alias + // it has with the alias name associated to the FunctionDefinition instance + private final Map defs = new LinkedHashMap<>(); + private final Map aliases = new HashMap<>(); + private SnapshotFunctionRegistry snapshotRegistry = null; public EsqlFunctionRegistry() { @@ -184,6 +204,42 @@ public EsqlFunctionRegistry() { register(functions); } + public FunctionDefinition resolveFunction(String functionName) { + FunctionDefinition def = defs.get(functionName); + if (def == null) { + throw new QlIllegalArgumentException("Cannot find function {}; this should have been caught during analysis", functionName); + } + return def; + } + + private String normalize(String name) { + return name.toLowerCase(Locale.ROOT); + } + + public String resolveAlias(String alias) { + String normalized = normalize(alias); + return aliases.getOrDefault(normalized, normalized); + } + + public boolean functionExists(String functionName) { + return defs.containsKey(functionName); + } + + public Collection listFunctions() { + // It is worth double checking if we need this copy. These are immutable anyway. + return defs.values(); + } + + public Collection listFunctions(String pattern) { + // It is worth double checking if we need this copy. These are immutable anyway. + Pattern p = Strings.hasText(pattern) ? Pattern.compile(normalize(pattern)) : null; + return defs.entrySet() + .stream() + .filter(e -> p == null || p.matcher(e.getKey()).matches()) + .map(e -> cloneDefinition(e.getKey(), e.getValue())) + .collect(toList()); + } + private FunctionDefinition[][] functions() { return new FunctionDefinition[][] { // grouping functions @@ -313,14 +369,13 @@ private static FunctionDefinition[][] snapshotFunctions() { return new FunctionDefinition[][] { new FunctionDefinition[] { def(Rate.class, Rate::withUnresolvedTimestamp, "rate") } }; } - @Override - public FunctionRegistry snapshotRegistry() { + public EsqlFunctionRegistry snapshotRegistry() { if (Build.current().isSnapshot() == false) { return this; } var snapshotRegistry = this.snapshotRegistry; if (snapshotRegistry == null) { - snapshotRegistry = new SnapshotFunctionRegistry(functions(), snapshotFunctions()); + snapshotRegistry = new SnapshotFunctionRegistry(); this.snapshotRegistry = snapshotRegistry; } return snapshotRegistry; @@ -464,13 +519,380 @@ public List getDataTypeForStringLiteralConversion(Class batchMap = new HashMap<>(); + for (FunctionDefinition f : functions) { + batchMap.put(f.name(), f); + for (String alias : f.aliases()) { + Object old = batchMap.put(alias, f); + if (old != null || defs.containsKey(alias)) { + throw new QlIllegalArgumentException( + "alias [" + + alias + + "] is used by " + + "[" + + (old != null ? old : defs.get(alias).name()) + + "] and [" + + f.name() + + "]" + ); + } + aliases.put(alias, f.name()); + } + } + // sort the temporary map by key name and add it to the global map of functions + defs.putAll( + batchMap.entrySet() + .stream() + .sorted(Map.Entry.comparingByKey()) + .collect( + Collectors.< + Map.Entry, + String, + FunctionDefinition, + LinkedHashMap>toMap( + Map.Entry::getKey, + Map.Entry::getValue, + (oldValue, newValue) -> oldValue, + LinkedHashMap::new + ) + ) + ); + } + + protected FunctionDefinition cloneDefinition(String name, FunctionDefinition definition) { + return new FunctionDefinition(name, emptyList(), definition.clazz(), definition.builder()); + } + + protected interface FunctionBuilder { + Function build(Source source, List children, Configuration cfg); + } + + /** + * Main method to register a function. + * + * @param names Must always have at least one entry which is the method's primary name + */ + @SuppressWarnings("overloads") + protected static FunctionDefinition def(Class function, FunctionBuilder builder, String... names) { + Check.isTrue(names.length > 0, "At least one name must be provided for the function"); + String primaryName = names[0]; + List aliases = Arrays.asList(names).subList(1, names.length); + FunctionDefinition.Builder realBuilder = (uf, cfg, extras) -> { + if (CollectionUtils.isEmpty(extras) == false) { + throw new ParsingException( + uf.source(), + "Unused parameters {} detected when building [{}]", + Arrays.toString(extras), + primaryName + ); + } + try { + return builder.build(uf.source(), uf.children(), cfg); + } catch (QlIllegalArgumentException e) { + throw new ParsingException(e, uf.source(), "error building [{}]: {}", primaryName, e.getMessage()); + } + }; + return new FunctionDefinition(primaryName, unmodifiableList(aliases), function, realBuilder); + } + + /** + * Build a {@linkplain FunctionDefinition} for a no-argument function. + */ + public static FunctionDefinition def( + Class function, + java.util.function.Function ctorRef, + String... names + ) { + FunctionBuilder builder = (source, children, cfg) -> { + if (false == children.isEmpty()) { + throw new QlIllegalArgumentException("expects no arguments"); + } + return ctorRef.apply(source); + }; + return def(function, builder, names); + } + + /** + * Build a {@linkplain FunctionDefinition} for a unary function. + */ + @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do + public static FunctionDefinition def( + Class function, + BiFunction ctorRef, + String... names + ) { + FunctionBuilder builder = (source, children, cfg) -> { + if (children.size() != 1) { + throw new QlIllegalArgumentException("expects exactly one argument"); + } + return ctorRef.apply(source, children.get(0)); + }; + return def(function, builder, names); + } + + /** + * Build a {@linkplain FunctionDefinition} for multi-arg/n-ary function. + */ + @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do + protected FunctionDefinition def(Class function, NaryBuilder ctorRef, String... names) { + FunctionBuilder builder = (source, children, cfg) -> { return ctorRef.build(source, children); }; + return def(function, builder, names); + } + + protected interface NaryBuilder { + T build(Source source, List children); + } + + /** + * Build a {@linkplain FunctionDefinition} for a binary function. + */ + @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do + public static FunctionDefinition def(Class function, BinaryBuilder ctorRef, String... names) { + FunctionBuilder builder = (source, children, cfg) -> { + boolean isBinaryOptionalParamFunction = OptionalArgument.class.isAssignableFrom(function); + if (isBinaryOptionalParamFunction && (children.size() > 2 || children.size() < 1)) { + throw new QlIllegalArgumentException("expects one or two arguments"); + } else if (isBinaryOptionalParamFunction == false && children.size() != 2) { + throw new QlIllegalArgumentException("expects exactly two arguments"); + } + + return ctorRef.build(source, children.get(0), children.size() == 2 ? children.get(1) : null); + }; + return def(function, builder, names); + } + + public interface BinaryBuilder { + T build(Source source, Expression left, Expression right); + } + + /** + * Build a {@linkplain FunctionDefinition} for a ternary function. + */ + @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do + protected static FunctionDefinition def(Class function, TernaryBuilder ctorRef, String... names) { + FunctionBuilder builder = (source, children, cfg) -> { + boolean hasMinimumTwo = OptionalArgument.class.isAssignableFrom(function); + if (hasMinimumTwo && (children.size() > 3 || children.size() < 2)) { + throw new QlIllegalArgumentException("expects two or three arguments"); + } else if (hasMinimumTwo == false && children.size() != 3) { + throw new QlIllegalArgumentException("expects exactly three arguments"); + } + return ctorRef.build(source, children.get(0), children.get(1), children.size() == 3 ? children.get(2) : null); + }; + return def(function, builder, names); + } + + protected interface TernaryBuilder { + T build(Source source, Expression one, Expression two, Expression three); + } + + /** + * Build a {@linkplain FunctionDefinition} for a quaternary function. + */ + @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do + protected static FunctionDefinition def(Class function, QuaternaryBuilder ctorRef, String... names) { + FunctionBuilder builder = (source, children, cfg) -> { + if (OptionalArgument.class.isAssignableFrom(function)) { + if (children.size() > 4 || children.size() < 3) { + throw new QlIllegalArgumentException("expects three or four arguments"); + } + } else if (TwoOptionalArguments.class.isAssignableFrom(function)) { + if (children.size() > 4 || children.size() < 2) { + throw new QlIllegalArgumentException("expects minimum two, maximum four arguments"); + } + } else if (children.size() != 4) { + throw new QlIllegalArgumentException("expects exactly four arguments"); + } + return ctorRef.build( + source, + children.get(0), + children.get(1), + children.size() > 2 ? children.get(2) : null, + children.size() > 3 ? children.get(3) : null + ); + }; + return def(function, builder, names); + } + + protected interface QuaternaryBuilder { + T build(Source source, Expression one, Expression two, Expression three, Expression four); + } + + /** + * Build a {@linkplain FunctionDefinition} for a quinary function. + */ + @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do + protected static FunctionDefinition def( + Class function, + QuinaryBuilder ctorRef, + int numOptionalParams, + String... names + ) { + FunctionBuilder builder = (source, children, cfg) -> { + final int NUM_TOTAL_PARAMS = 5; + boolean hasOptionalParams = OptionalArgument.class.isAssignableFrom(function); + if (hasOptionalParams && (children.size() > NUM_TOTAL_PARAMS || children.size() < NUM_TOTAL_PARAMS - numOptionalParams)) { + throw new QlIllegalArgumentException( + "expects between " + + NUM_NAMES[NUM_TOTAL_PARAMS - numOptionalParams] + + " and " + + NUM_NAMES[NUM_TOTAL_PARAMS] + + " arguments" + ); + } else if (hasOptionalParams == false && children.size() != NUM_TOTAL_PARAMS) { + throw new QlIllegalArgumentException("expects exactly " + NUM_NAMES[NUM_TOTAL_PARAMS] + " arguments"); + } + return ctorRef.build( + source, + children.size() > 0 ? children.get(0) : null, + children.size() > 1 ? children.get(1) : null, + children.size() > 2 ? children.get(2) : null, + children.size() > 3 ? children.get(3) : null, + children.size() > 4 ? children.get(4) : null + ); + }; + return def(function, builder, names); + } + + protected interface QuinaryBuilder { + T build(Source source, Expression one, Expression two, Expression three, Expression four, Expression five); + } + + /** + * Build a {@linkplain FunctionDefinition} for functions with a mandatory argument followed by a varidic list. + */ + @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do + protected static FunctionDefinition def(Class function, UnaryVariadicBuilder ctorRef, String... names) { + FunctionBuilder builder = (source, children, cfg) -> { + boolean hasMinimumOne = OptionalArgument.class.isAssignableFrom(function); + if (hasMinimumOne && children.size() < 1) { + throw new QlIllegalArgumentException("expects at least one argument"); + } else if (hasMinimumOne == false && children.size() < 2) { + throw new QlIllegalArgumentException("expects at least two arguments"); + } + return ctorRef.build(source, children.get(0), children.subList(1, children.size())); + }; + return def(function, builder, names); + } + + protected interface UnaryVariadicBuilder { + T build(Source source, Expression exp, List variadic); + } + + /** + * Build a {@linkplain FunctionDefinition} for a no-argument function that is configuration aware. + */ + @SuppressWarnings("overloads") + protected static FunctionDefinition def(Class function, ConfigurationAwareBuilder ctorRef, String... names) { + FunctionBuilder builder = (source, children, cfg) -> { + if (false == children.isEmpty()) { + throw new QlIllegalArgumentException("expects no arguments"); + } + return ctorRef.build(source, cfg); + }; + return def(function, builder, names); + } + + protected interface ConfigurationAwareBuilder { + T build(Source source, Configuration configuration); + } + + /** + * Build a {@linkplain FunctionDefinition} for a one-argument function that is configuration aware. + */ + @SuppressWarnings("overloads") + public static FunctionDefinition def( + Class function, + UnaryConfigurationAwareBuilder ctorRef, + String... names + ) { + FunctionBuilder builder = (source, children, cfg) -> { + if (children.size() > 1) { + throw new QlIllegalArgumentException("expects exactly one argument"); + } + Expression ex = children.size() == 1 ? children.get(0) : null; + return ctorRef.build(source, ex, cfg); + }; + return def(function, builder, names); + } + + public interface UnaryConfigurationAwareBuilder { + T build(Source source, Expression exp, Configuration configuration); + } + + /** + * Build a {@linkplain FunctionDefinition} for a binary function that is configuration aware. + */ + @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do + protected static FunctionDefinition def( + Class function, + BinaryConfigurationAwareBuilder ctorRef, + String... names + ) { + FunctionBuilder builder = (source, children, cfg) -> { + boolean isBinaryOptionalParamFunction = OptionalArgument.class.isAssignableFrom(function); + if (isBinaryOptionalParamFunction && (children.size() > 2 || children.size() < 1)) { + throw new QlIllegalArgumentException("expects one or two arguments"); + } else if (isBinaryOptionalParamFunction == false && children.size() != 2) { + throw new QlIllegalArgumentException("expects exactly two arguments"); + } + return ctorRef.build(source, children.get(0), children.size() == 2 ? children.get(1) : null, cfg); + }; + return def(function, builder, names); + } + + protected interface BinaryConfigurationAwareBuilder { + T build(Source source, Expression left, Expression right, Configuration configuration); + } + + /** + * Build a {@linkplain FunctionDefinition} for a ternary function that is configuration aware. + */ + @SuppressWarnings("overloads") // These are ambiguous if you aren't using ctor references but we always do + protected FunctionDefinition def(Class function, TernaryConfigurationAwareBuilder ctorRef, String... names) { + FunctionBuilder builder = (source, children, cfg) -> { + boolean hasMinimumTwo = OptionalArgument.class.isAssignableFrom(function); + if (hasMinimumTwo && (children.size() > 3 || children.size() < 2)) { + throw new QlIllegalArgumentException("expects two or three arguments"); + } else if (hasMinimumTwo == false && children.size() != 3) { + throw new QlIllegalArgumentException("expects exactly three arguments"); + } + return ctorRef.build(source, children.get(0), children.get(1), children.size() == 3 ? children.get(2) : null, cfg); + }; + return def(function, builder, names); + } + + protected interface TernaryConfigurationAwareBuilder { + T build(Source source, Expression one, Expression two, Expression three, Configuration configuration); + } + + // + // Utility method for extra argument extraction. + // + protected static Boolean asBool(Object[] extras) { + if (CollectionUtils.isEmpty(extras)) { + return null; + } + if (extras.length != 1 || (extras[0] instanceof Boolean) == false) { + throw new QlIllegalArgumentException("Invalid number and types of arguments given to function definition"); } + return (Boolean) extras[0]; } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/FunctionDefinition.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionDefinition.java similarity index 87% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/FunctionDefinition.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionDefinition.java index 09f68c5c9b4a3..d93fc077dece4 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/FunctionDefinition.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionDefinition.java @@ -4,8 +4,9 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.esql.core.expression.function; +package org.elasticsearch.xpack.esql.expression.function; +import org.elasticsearch.xpack.esql.core.expression.function.Function; import org.elasticsearch.xpack.esql.core.session.Configuration; import java.util.List; @@ -14,7 +15,7 @@ public class FunctionDefinition { /** - * Converts an {@link UnresolvedFunction} into the a proper {@link Function}. + * Converts an {@link UnresolvedFunction} into a proper {@link Function}. *

    * Provides the basic signature (unresolved function + runtime configuration object) while * allowing extensions through the vararg extras which subclasses should expand for their @@ -49,7 +50,7 @@ public Class clazz() { return clazz; } - protected Builder builder() { + public Builder builder() { return builder; } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/FunctionResolutionStrategy.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionResolutionStrategy.java similarity index 91% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/FunctionResolutionStrategy.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionResolutionStrategy.java index a23112267dcf4..4e7f47db0b252 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/FunctionResolutionStrategy.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionResolutionStrategy.java @@ -5,8 +5,9 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.core.expression.function; +package org.elasticsearch.xpack.esql.expression.function; +import org.elasticsearch.xpack.esql.core.expression.function.Function; import org.elasticsearch.xpack.esql.core.session.Configuration; /** diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/OptionalArgument.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/OptionalArgument.java similarity index 71% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/OptionalArgument.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/OptionalArgument.java index 90d1d06337330..ba80395281203 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/OptionalArgument.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/OptionalArgument.java @@ -5,11 +5,11 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.core.expression.function; +package org.elasticsearch.xpack.esql.expression.function; /** * Marker interface indicating that a function accepts one optional argument (typically the last one). - * This is used by the {@link FunctionRegistry} to perform validation of function declaration. + * This is used by the {@link EsqlFunctionRegistry} to perform validation of function declaration. */ public interface OptionalArgument { diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/TwoOptionalArguments.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/TwoOptionalArguments.java similarity index 71% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/TwoOptionalArguments.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/TwoOptionalArguments.java index 78684f034f448..38bb23285e491 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/TwoOptionalArguments.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/TwoOptionalArguments.java @@ -5,11 +5,11 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.core.expression.function; +package org.elasticsearch.xpack.esql.expression.function; /** * Marker interface indicating that a function accepts two optional arguments (the last two). - * This is used by the {@link FunctionRegistry} to perform validation of function declaration. + * This is used by the {@link EsqlFunctionRegistry} to perform validation of function declaration. */ public interface TwoOptionalArguments { diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/UnresolvedFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnresolvedFunction.java similarity index 97% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/UnresolvedFunction.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnresolvedFunction.java index 49791e5820e7a..ab3475635ddbd 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/UnresolvedFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/UnresolvedFunction.java @@ -4,13 +4,14 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.esql.core.expression.function; +package org.elasticsearch.xpack.esql.expression.function; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.capabilities.Unresolvable; import org.elasticsearch.xpack.esql.core.capabilities.UnresolvedException; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Nullability; +import org.elasticsearch.xpack.esql.core.expression.function.Function; import org.elasticsearch.xpack.esql.core.session.Configuration; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -56,7 +57,7 @@ public String getWriteableName() { * * @see #withMessage(String) */ - UnresolvedFunction( + public UnresolvedFunction( Source source, String name, FunctionResolutionStrategy resolutionStrategy, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java index b2c3ae41686a9..f52c162ae5d7b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java @@ -19,13 +19,13 @@ import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToLong; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvCount; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java index 227bea0789366..620a3759d9b19 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java @@ -18,11 +18,11 @@ import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; -import org.elasticsearch.xpack.esql.core.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.planner.ToAggregator; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java index 7e6f3999bf11e..b8b084066af34 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java @@ -21,13 +21,13 @@ import org.elasticsearch.xpack.esql.core.expression.Foldables; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; -import org.elasticsearch.xpack.esql.core.expression.function.TwoOptionalArguments; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.TwoOptionalArguments; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Floor; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java index d6fe76b119cb5..7c0427a95d478 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java @@ -17,12 +17,12 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; -import org.elasticsearch.xpack.esql.core.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMax; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java index 221a7d466da71..272e65106e7de 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java @@ -17,12 +17,12 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; -import org.elasticsearch.xpack.esql.core.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMin; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java index 8662116fe5b67..84a1a6e77ea73 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java @@ -16,13 +16,13 @@ import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.core.session.Configuration; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlConfigurationFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java index 10551cae9eba2..eb710e72882b1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java @@ -17,12 +17,12 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.core.InvalidArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefix.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefix.java index ba51e5a9c4c0d..60b464b26750a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefix.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/IpPrefix.java @@ -16,12 +16,12 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; -import org.elasticsearch.xpack.esql.core.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java index 348bbaf1fe85c..da11d1e77885b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java @@ -13,12 +13,12 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java index 07953a478e2f0..7223615294446 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java @@ -15,13 +15,13 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.math.Maths; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java index 2b3afe093fa96..3728f4305d5c7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java @@ -22,13 +22,13 @@ import org.elasticsearch.xpack.esql.core.InvalidArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; -import org.elasticsearch.xpack.esql.core.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java index aa41c58cef894..444c0e319fc6a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java @@ -32,12 +32,12 @@ import org.elasticsearch.xpack.esql.core.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZip.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZip.java index b53ead40d1e57..fd3b9e7664dff 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZip.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZip.java @@ -18,13 +18,13 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.Nullability; -import org.elasticsearch.xpack.esql.core.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java index e1553fa29fac9..30c6abc5398e3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java @@ -22,12 +22,12 @@ import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; -import org.elasticsearch.xpack.esql.core.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java index 5d7bb97469db6..54d8c32d4d467 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Locate.java @@ -15,12 +15,12 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Repeat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Repeat.java index 2404beb6ffb5a..3ff28e08f4ce1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Repeat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Repeat.java @@ -16,12 +16,12 @@ import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java index c243e8383b47f..7e03b3e821f20 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java @@ -16,12 +16,12 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; -import org.elasticsearch.xpack.esql.core.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.OptionalArgument; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index 7b0b1b166af30..9769d286b484d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -25,8 +25,6 @@ import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedStar; -import org.elasticsearch.xpack.esql.core.expression.function.FunctionResolutionStrategy; -import org.elasticsearch.xpack.esql.core.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; @@ -42,6 +40,8 @@ import org.elasticsearch.xpack.esql.expression.Order; import org.elasticsearch.xpack.esql.expression.UnresolvedNamePattern; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; +import org.elasticsearch.xpack.esql.expression.function.FunctionResolutionStrategy; +import org.elasticsearch.xpack.esql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index 266a89b9bbf81..fee51c40a2525 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -29,7 +29,6 @@ import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedStar; -import org.elasticsearch.xpack.esql.core.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.esql.core.parser.ParserUtils; import org.elasticsearch.xpack.esql.core.plan.TableIdentifier; import org.elasticsearch.xpack.esql.core.plan.logical.Filter; @@ -40,6 +39,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.expression.UnresolvedNamePattern; +import org.elasticsearch.xpack.esql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.esql.parser.EsqlBaseParser.MetadataOptionContext; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Dissect; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/meta/MetaFunctions.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/meta/MetaFunctions.java index 6356b2644e67a..f137cf392f8ad 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/meta/MetaFunctions.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/meta/MetaFunctions.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; -import org.elasticsearch.xpack.esql.core.expression.function.FunctionRegistry; import org.elasticsearch.xpack.esql.core.plan.logical.LeafPlan; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -49,7 +48,7 @@ public List output() { return attributes; } - public List> values(FunctionRegistry functionRegistry) { + public List> values(EsqlFunctionRegistry functionRegistry) { List> rows = new ArrayList<>(); for (var def : functionRegistry.listFunctions(null)) { EsqlFunctionRegistry.FunctionDescription signature = EsqlFunctionRegistry.description(def); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java index 7cd2bf5729ca7..5ba2a205d52d0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java @@ -9,13 +9,13 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; -import org.elasticsearch.xpack.esql.core.expression.function.FunctionRegistry; import org.elasticsearch.xpack.esql.core.plan.logical.BinaryPlan; import org.elasticsearch.xpack.esql.core.plan.logical.Filter; import org.elasticsearch.xpack.esql.core.plan.logical.Limit; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; +import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Enrich; @@ -58,10 +58,10 @@ public class Mapper { - private final FunctionRegistry functionRegistry; + private final EsqlFunctionRegistry functionRegistry; private final boolean localMode; // non-coordinator (data node) mode - public Mapper(FunctionRegistry functionRegistry) { + public Mapper(EsqlFunctionRegistry functionRegistry) { this.functionRegistry = functionRegistry; localMode = false; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 44c08fc5fd60b..3119b328e8074 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -28,7 +28,6 @@ import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedStar; -import org.elasticsearch.xpack.esql.core.expression.function.FunctionRegistry; import org.elasticsearch.xpack.esql.core.index.IndexResolution; import org.elasticsearch.xpack.esql.core.index.MappingException; import org.elasticsearch.xpack.esql.core.plan.TableIdentifier; @@ -38,6 +37,7 @@ import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolver; import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; import org.elasticsearch.xpack.esql.expression.UnresolvedNamePattern; +import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer; import org.elasticsearch.xpack.esql.optimizer.PhysicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.PhysicalPlanOptimizer; @@ -77,7 +77,7 @@ public class EsqlSession { private final PreAnalyzer preAnalyzer; private final Verifier verifier; - private final FunctionRegistry functionRegistry; + private final EsqlFunctionRegistry functionRegistry; private final LogicalPlanOptimizer logicalPlanOptimizer; private final Mapper mapper; @@ -89,7 +89,7 @@ public EsqlSession( IndexResolver indexResolver, EnrichPolicyResolver enrichPolicyResolver, PreAnalyzer preAnalyzer, - FunctionRegistry functionRegistry, + EsqlFunctionRegistry functionRegistry, LogicalPlanOptimizer logicalPlanOptimizer, Mapper mapper, Verifier verifier diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index b67840aae3bcb..b63a24556c31f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -55,7 +55,6 @@ import org.elasticsearch.xpack.esql.core.CsvSpecReader; import org.elasticsearch.xpack.esql.core.SpecReader; import org.elasticsearch.xpack.esql.core.expression.Expressions; -import org.elasticsearch.xpack.esql.core.expression.function.FunctionRegistry; import org.elasticsearch.xpack.esql.core.index.EsIndex; import org.elasticsearch.xpack.esql.core.index.IndexResolution; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; @@ -161,7 +160,7 @@ public class CsvTests extends ESTestCase { private final EsqlConfiguration configuration = EsqlTestUtils.configuration( new QueryPragmas(Settings.builder().put("page_size", randomPageSize()).build()) ); - private final FunctionRegistry functionRegistry = new EsqlFunctionRegistry(); + private final EsqlFunctionRegistry functionRegistry = new EsqlFunctionRegistry(); private final EsqlParser parser = new EsqlParser(); private final Mapper mapper = new Mapper(functionRegistry); private final PhysicalPlanOptimizer physicalPlanOptimizer = new TestPhysicalPlanOptimizer(new PhysicalOptimizerContext(configuration)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java index 27a42f79e39ff..8dfd8eee58c24 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java @@ -13,13 +13,13 @@ import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.esql.core.ParsingException; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.function.FunctionDefinition; import org.elasticsearch.xpack.esql.core.index.EsIndex; import org.elasticsearch.xpack.esql.core.index.IndexResolution; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.TypesTests; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; +import org.elasticsearch.xpack.esql.expression.function.FunctionDefinition; import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index d057dc6ff4320..dc650e3fcd965 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -40,7 +40,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.expression.function.FunctionDefinition; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistryTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistryTests.java index 6e2ec0d904b27..94549f6dfbdec 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistryTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistryTests.java @@ -12,13 +12,9 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.ParsingException; import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; +import org.elasticsearch.xpack.esql.core.TestUtils; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.function.FunctionDefinition; -import org.elasticsearch.xpack.esql.core.expression.function.FunctionRegistry; -import org.elasticsearch.xpack.esql.core.expression.function.FunctionRegistryTests; -import org.elasticsearch.xpack.esql.core.expression.function.FunctionResolutionStrategy; -import org.elasticsearch.xpack.esql.core.expression.function.OptionalArgument; -import org.elasticsearch.xpack.esql.core.expression.function.UnresolvedFunction; +import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.esql.core.session.Configuration; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -26,22 +22,92 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlConfigurationFunction; +import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.function.Function; +import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.esql.EsqlTestUtils.randomConfiguration; -import static org.elasticsearch.xpack.esql.core.expression.function.FunctionRegistry.def; -import static org.elasticsearch.xpack.esql.core.expression.function.FunctionResolutionStrategy.DEFAULT; +import static org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry.def; +import static org.elasticsearch.xpack.esql.expression.function.FunctionResolutionStrategy.DEFAULT; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.mock; public class EsqlFunctionRegistryTests extends ESTestCase { + public void testNoArgFunction() { + UnresolvedFunction ur = uf(DEFAULT); + EsqlFunctionRegistry r = new EsqlFunctionRegistry(def(DummyFunction.class, DummyFunction::new, "dummyFunction")); + FunctionDefinition def = r.resolveFunction(ur.name()); + assertEquals(ur.source(), ur.buildResolved(TestUtils.randomConfiguration(), def).source()); + } + + public void testBinaryFunction() { + UnresolvedFunction ur = uf(DEFAULT, mock(Expression.class), mock(Expression.class)); + EsqlFunctionRegistry r = new EsqlFunctionRegistry(def(DummyFunction.class, (Source l, Expression lhs, Expression rhs) -> { + assertSame(lhs, ur.children().get(0)); + assertSame(rhs, ur.children().get(1)); + return new DummyFunction(l); + }, "dummyFunction")); + FunctionDefinition def = r.resolveFunction(ur.name()); + assertEquals(ur.source(), ur.buildResolved(TestUtils.randomConfiguration(), def).source()); + + // No children aren't supported + ParsingException e = expectThrows(ParsingException.class, () -> uf(DEFAULT).buildResolved(TestUtils.randomConfiguration(), def)); + assertThat(e.getMessage(), endsWith("expects exactly two arguments")); + + // One child isn't supported + e = expectThrows( + ParsingException.class, + () -> uf(DEFAULT, mock(Expression.class)).buildResolved(TestUtils.randomConfiguration(), def) + ); + assertThat(e.getMessage(), endsWith("expects exactly two arguments")); + + // Many children aren't supported + e = expectThrows( + ParsingException.class, + () -> uf(DEFAULT, mock(Expression.class), mock(Expression.class), mock(Expression.class)).buildResolved( + TestUtils.randomConfiguration(), + def + ) + ); + assertThat(e.getMessage(), endsWith("expects exactly two arguments")); + } + + public void testAliasNameIsTheSameAsAFunctionName() { + EsqlFunctionRegistry r = new EsqlFunctionRegistry(def(DummyFunction.class, DummyFunction::new, "DUMMY_FUNCTION", "ALIAS")); + QlIllegalArgumentException iae = expectThrows( + QlIllegalArgumentException.class, + () -> r.register(def(DummyFunction2.class, DummyFunction2::new, "DUMMY_FUNCTION2", "DUMMY_FUNCTION")) + ); + assertEquals("alias [DUMMY_FUNCTION] is used by [DUMMY_FUNCTION] and [DUMMY_FUNCTION2]", iae.getMessage()); + } + + public void testDuplicateAliasInTwoDifferentFunctionsFromTheSameBatch() { + QlIllegalArgumentException iae = expectThrows( + QlIllegalArgumentException.class, + () -> new EsqlFunctionRegistry( + def(DummyFunction.class, DummyFunction::new, "DUMMY_FUNCTION", "ALIAS"), + def(DummyFunction2.class, DummyFunction2::new, "DUMMY_FUNCTION2", "ALIAS") + ) + ); + assertEquals("alias [ALIAS] is used by [DUMMY_FUNCTION(ALIAS)] and [DUMMY_FUNCTION2]", iae.getMessage()); + } + + public void testDuplicateAliasInTwoDifferentFunctionsFromTwoDifferentBatches() { + EsqlFunctionRegistry r = new EsqlFunctionRegistry(def(DummyFunction.class, DummyFunction::new, "DUMMY_FUNCTION", "ALIAS")); + QlIllegalArgumentException iae = expectThrows( + QlIllegalArgumentException.class, + () -> r.register(def(DummyFunction2.class, DummyFunction2::new, "DUMMY_FUNCTION2", "ALIAS")) + ); + assertEquals("alias [ALIAS] is used by [DUMMY_FUNCTION] and [DUMMY_FUNCTION2]", iae.getMessage()); + } + public void testFunctionResolving() { UnresolvedFunction ur = uf(DEFAULT, mock(Expression.class)); - FunctionRegistry r = new EsqlFunctionRegistry(defineDummyFunction(ur, "dummyfunction", "dummyfunc")); + EsqlFunctionRegistry r = new EsqlFunctionRegistry(defineDummyFunction(ur, "dummyfunction", "dummyfunc")); // Resolve by primary name FunctionDefinition def; @@ -72,7 +138,7 @@ public void testFunctionResolving() { public void testUnaryFunction() { UnresolvedFunction ur = uf(DEFAULT, mock(Expression.class)); - FunctionRegistry r = new EsqlFunctionRegistry(defineDummyUnaryFunction(ur)); + EsqlFunctionRegistry r = new EsqlFunctionRegistry(defineDummyUnaryFunction(ur)); FunctionDefinition def = r.resolveFunction(ur.name()); // No children aren't supported @@ -90,8 +156,8 @@ public void testUnaryFunction() { public void testConfigurationOptionalFunction() { UnresolvedFunction ur = uf(DEFAULT, mock(Expression.class)); FunctionDefinition def; - FunctionRegistry r = new EsqlFunctionRegistry( - EsqlFunctionRegistry.def(DummyConfigurationOptionalArgumentFunction.class, (Source l, Expression e, Configuration c) -> { + EsqlFunctionRegistry r = new EsqlFunctionRegistry( + def(DummyConfigurationOptionalArgumentFunction.class, (Source l, Expression e, Configuration c) -> { assertSame(e, ur.children().get(0)); return new DummyConfigurationOptionalArgumentFunction(l, List.of(ur), c); }, "dummy") @@ -105,9 +171,9 @@ private static UnresolvedFunction uf(FunctionResolutionStrategy resolutionStrate } private static FunctionDefinition defineDummyFunction(UnresolvedFunction ur, String... names) { - return def(FunctionRegistryTests.DummyFunction.class, (Source l, Expression e) -> { + return def(DummyFunction.class, (Source l, Expression e) -> { assertSame(e, ur.children().get(0)); - return new FunctionRegistryTests.DummyFunction(l); + return new DummyFunction(l); }, names); } @@ -127,6 +193,43 @@ private String randomCapitalizedString(String input) { return output.toString(); } + public static class DummyFunction extends ScalarFunction { + public DummyFunction(Source source) { + super(source, emptyList()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException(); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this); + } + + @Override + public Expression replaceChildren(List newChildren) { + throw new UnsupportedOperationException("this type of node doesn't have any children to replace"); + } + + @Override + public DataType dataType() { + return null; + } + } + + public static class DummyFunction2 extends DummyFunction { + public DummyFunction2(Source source) { + super(source); + } + } + public static class DummyConfigurationOptionalArgumentFunction extends EsqlConfigurationFunction implements OptionalArgument { public DummyConfigurationOptionalArgumentFunction(Source source, List fields, Configuration configuration) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/RailRoadDiagram.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/RailRoadDiagram.java index 6ef370fd2da35..4e00fa9f41fbd 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/RailRoadDiagram.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/RailRoadDiagram.java @@ -20,7 +20,6 @@ import net.nextencia.rrdiagram.grammar.rrdiagram.RRText; import org.elasticsearch.common.util.LazyInitializable; -import org.elasticsearch.xpack.esql.core.expression.function.FunctionDefinition; import java.awt.Font; import java.awt.FontFormatException; diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/UnresolvedFunctionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/UnresolvedFunctionTests.java similarity index 99% rename from x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/UnresolvedFunctionTests.java rename to x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/UnresolvedFunctionTests.java index 9d29aaf63139f..7cb547876e532 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/function/UnresolvedFunctionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/UnresolvedFunctionTests.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.esql.core.expression.function; +package org.elasticsearch.xpack.esql.expression.function; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.AbstractNodeTestCase; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 96f401ba894a5..a418670e98eac 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -42,7 +42,6 @@ import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.Order; -import org.elasticsearch.xpack.esql.core.expression.function.FunctionRegistry; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; @@ -206,7 +205,7 @@ public void init() { parser = new EsqlParser(); logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)); physicalPlanOptimizer = new PhysicalPlanOptimizer(new PhysicalOptimizerContext(config)); - FunctionRegistry functionRegistry = new EsqlFunctionRegistry(); + EsqlFunctionRegistry functionRegistry = new EsqlFunctionRegistry(); mapper = new Mapper(functionRegistry); var enrichResolution = setupEnrichResolution(); // Most tests used data from the test index, so we load it here, and use it in the plan() function. @@ -238,7 +237,7 @@ public void init() { TestDataSource makeTestDataSource( String indexName, String mappingFileName, - FunctionRegistry functionRegistry, + EsqlFunctionRegistry functionRegistry, EnrichResolution enrichResolution ) { Map mapping = loadMapping(mappingFileName); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java index b24d9e6083b69..ac89298ffcfbb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedStar; -import org.elasticsearch.xpack.esql.core.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; @@ -21,6 +20,7 @@ import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.UnresolvedNamePattern; +import org.elasticsearch.xpack.esql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mul; @@ -42,7 +42,6 @@ import java.util.stream.IntStream; import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; -import static org.elasticsearch.xpack.esql.core.expression.function.FunctionResolutionStrategy.DEFAULT; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_PERIOD; import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; @@ -50,6 +49,7 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; import static org.elasticsearch.xpack.esql.core.type.DataType.TIME_DURATION; +import static org.elasticsearch.xpack.esql.expression.function.FunctionResolutionStrategy.DEFAULT; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 8dcc87608c85b..2e2ca4feafa41 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.Order; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; -import org.elasticsearch.xpack.esql.core.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.esql.core.plan.TableIdentifier; @@ -26,6 +25,7 @@ import org.elasticsearch.xpack.esql.core.plan.logical.Limit; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; +import org.elasticsearch.xpack.esql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; @@ -55,10 +55,10 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; import static org.elasticsearch.xpack.esql.core.expression.Literal.FALSE; import static org.elasticsearch.xpack.esql.core.expression.Literal.TRUE; -import static org.elasticsearch.xpack.esql.core.expression.function.FunctionResolutionStrategy.DEFAULT; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.expression.function.FunctionResolutionStrategy.DEFAULT; import static org.elasticsearch.xpack.esql.parser.ExpressionBuilder.breakIntoFragments; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.contains; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java index dde39b66664de..7454b25377594 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java @@ -19,7 +19,6 @@ import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; -import org.elasticsearch.xpack.esql.core.expression.function.FunctionRegistry; import org.elasticsearch.xpack.esql.core.index.EsIndex; import org.elasticsearch.xpack.esql.core.index.IndexResolution; import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; @@ -224,7 +223,7 @@ static LogicalPlan parse(String query) { static PhysicalPlan mapAndMaybeOptimize(LogicalPlan logicalPlan) { var physicalPlanOptimizer = new PhysicalPlanOptimizer(new PhysicalOptimizerContext(TEST_CFG)); - FunctionRegistry functionRegistry = new EsqlFunctionRegistry(); + EsqlFunctionRegistry functionRegistry = new EsqlFunctionRegistry(); var mapper = new Mapper(functionRegistry); var physical = mapper.map(logicalPlan); if (randomBoolean()) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java index 9e2262e218236..50fe272caa076 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttributeTests; import org.elasticsearch.xpack.esql.core.expression.UnresolvedNamedExpression; import org.elasticsearch.xpack.esql.core.expression.function.Function; -import org.elasticsearch.xpack.esql.core.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.FullTextPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.Like; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.LikePattern; @@ -39,6 +38,7 @@ import org.elasticsearch.xpack.esql.core.tree.SourceTests; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; +import org.elasticsearch.xpack.esql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pow; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; From 9da69f666d65266273ea8e25805bcb0be6ad72f6 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 3 Jul 2024 17:03:21 +0300 Subject: [PATCH 161/216] Move yml rest test for query roles (#110423) This moves the query roles yml rest test away from the file that's also ran in serverless (where the query-roles endpoint is not available). --- .../rest-api-spec/test/roles/10_basic.yml | 27 ------------------- .../test/roles/60_bulk_roles.yml | 14 ++++++++++ 2 files changed, 14 insertions(+), 27 deletions(-) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml index 50c26394efbf2..db4ea4e8b205d 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml @@ -87,20 +87,6 @@ teardown: - match: { admin_role.indices.0.names.0: "*" } - match: { admin_role.indices.0.privileges.0: "all" } - # query match_all roles - - do: - headers: - Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" - security.query_role: - body: > - { - "query": { "match_all": {} }, "sort": ["name"] - } - - match: { total: 2 } - - match: { count: 2 } - - match: { roles.0.name: "admin_role" } - - match: { roles.1.name: "backwards_role" } - - do: security.put_role: name: "role_with_description" @@ -118,16 +104,3 @@ teardown: name: "role_with_description" - match: { role_with_description.cluster.0: "manage_security" } - match: { role_with_description.description: "Allows all security-related operations such as CRUD operations on users and roles and cache clearing." } - - # query again for this last role - - do: - headers: - Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" - security.query_role: - body: > - { - "query": { "match_all": {} }, "sort": ["name"], "from": 2 - } - - match: { total: 3 } - - match: { count: 1 } - - match: { roles.0.name: "role_with_description" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/60_bulk_roles.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/60_bulk_roles.yml index e608e9e14972d..c7a707f437e0c 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/60_bulk_roles.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/60_bulk_roles.yml @@ -74,6 +74,20 @@ teardown: - match: { role_with_description.cluster.0: "manage_security" } - match: { role_with_description.description: "Allows all security-related operations such as CRUD operations on users and roles and cache clearing." } + # query match_all roles + - do: + headers: + Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" + security.query_role: + body: > + { + "query": { "match_all": {} }, "sort": ["name"] + } + - match: { total: 2 } + - match: { count: 2 } + - match: { roles.0.name: "admin_role" } + - match: { roles.1.name: "role_with_description" } + - do: security.bulk_delete_role: body: > From a939502f2362810dc1265dc89c5dec68de6abcce Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Wed, 3 Jul 2024 17:15:03 +0300 Subject: [PATCH 162/216] Add a unittest for synthetic source on disabled keyword (#110428) * Add test for nested array, fix sort on nested test. * Fix sort on nested test. * Add a unittest for synthetic source on disabled keyword --- .../index/mapper/KeywordFieldMapperTests.java | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java index 67cd92477eedb..833b0a60827d0 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java @@ -639,6 +639,19 @@ public void testKeywordFieldUtf8LongerThan32766SourceOnly() throws Exception { mapper.parse(source(b -> b.field("field", stringBuilder.toString()))); } + /** + * Test that we track the synthetic source if field is neither indexed nor has doc values nor stored + */ + public void testSyntheticSourceForDisabledField() throws Exception { + MapperService mapper = createMapperService( + syntheticSourceFieldMapping( + b -> b.field("type", "keyword").field("index", false).field("doc_values", false).field("store", false) + ) + ); + String value = randomAlphaOfLengthBetween(1, 20); + assertEquals("{\"field\":\"" + value + "\"}", syntheticSource(mapper.documentMapper(), b -> b.field("field", value))); + } + @Override protected boolean supportsIgnoreMalformed() { return false; From 17f1d6437034adf7925807a7f6229bbbe44ec6c5 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Wed, 3 Jul 2024 08:34:10 -0700 Subject: [PATCH 163/216] Improve reliability of RollupIndexerStateTests#testMultipleJobTriggering (#110397) --- muted-tests.yml | 3 --- .../xpack/rollup/job/RollupIndexerStateTests.java | 14 +++++--------- 2 files changed, 5 insertions(+), 12 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index bf4640fff53c8..63610b9ceb355 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -44,9 +44,6 @@ tests: - class: "org.elasticsearch.xpack.inference.InferenceCrudIT" issue: "https://github.com/elastic/elasticsearch/issues/109391" method: "testDeleteEndpointWhileReferencedByPipeline" -- class: "org.elasticsearch.xpack.rollup.job.RollupIndexerStateTests" - issue: "https://github.com/elastic/elasticsearch/issues/109627" - method: "testMultipleJobTriggering" - class: "org.elasticsearch.xpack.test.rest.XPackRestIT" issue: "https://github.com/elastic/elasticsearch/issues/109687" method: "test {p0=sql/translate/Translate SQL}" diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java index 24c034358be74..105711c4057a6 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java @@ -549,18 +549,14 @@ public void testMultipleJobTriggering() throws Exception { AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); final ThreadPool threadPool = new TestThreadPool(getTestName()); try { - final AtomicBoolean isAborted = new AtomicBoolean(false); - DelayedEmptyRollupIndexer indexer = new DelayedEmptyRollupIndexer(threadPool, job, state, null) { - @Override - protected void onAbort() { - isAborted.set(true); - } - }; + DelayedEmptyRollupIndexer indexer = new DelayedEmptyRollupIndexer(threadPool, job, state, null); indexer.start(); for (int i = 0; i < 5; i++) { final CountDownLatch latch = indexer.newLatch(); assertThat(indexer.getState(), equalTo(IndexerState.STARTED)); - assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis())); + // This may take more than one attempt due to a cleanup/transition phase + // that happens after state change to STARTED (`isJobFinishing`). + assertBusy(() -> indexer.maybeTriggerAsyncJob(System.currentTimeMillis())); assertThat(indexer.getState(), equalTo(IndexerState.INDEXING)); assertFalse(indexer.maybeTriggerAsyncJob(System.currentTimeMillis())); assertThat(indexer.getState(), equalTo(IndexerState.INDEXING)); @@ -570,7 +566,7 @@ protected void onAbort() { assertThat(indexer.getStats().getNumPages(), equalTo((long) i + 1)); } final CountDownLatch latch = indexer.newLatch(); - assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis())); + assertBusy(() -> indexer.maybeTriggerAsyncJob(System.currentTimeMillis())); assertThat(indexer.stop(), equalTo(IndexerState.STOPPING)); assertThat(indexer.getState(), Matchers.either(Matchers.is(IndexerState.STOPPING)).or(Matchers.is(IndexerState.STOPPED))); latch.countDown(); From 9087fc5de8ecbe96158cb7ce654c968be6b965eb Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Wed, 3 Jul 2024 11:59:16 -0400 Subject: [PATCH 164/216] [Inference API] Fix serialization for inference delete endpoint response (#110431) --- docs/changelog/110431.yaml | 5 +++++ .../action/DeleteInferenceEndpointAction.java | 10 ++++++++-- 2 files changed, 13 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/110431.yaml diff --git a/docs/changelog/110431.yaml b/docs/changelog/110431.yaml new file mode 100644 index 0000000000000..0dd93ef718ef9 --- /dev/null +++ b/docs/changelog/110431.yaml @@ -0,0 +1,5 @@ +pr: 110431 +summary: "[Inference API] Fix serialization for inference delete endpoint response" +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceEndpointAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceEndpointAction.java index 19542ef466156..dfb77ccd49fc2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceEndpointAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceEndpointAction.java @@ -113,13 +113,19 @@ public Response(boolean acknowledged, Set pipelineIds) { public Response(StreamInput in) throws IOException { super(in); - pipelineIds = in.readCollectionAsSet(StreamInput::readString); + if (in.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_ENHANCE_DELETE_ENDPOINT)) { + pipelineIds = in.readCollectionAsSet(StreamInput::readString); + } else { + pipelineIds = Set.of(); + } } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeCollection(pipelineIds, StreamOutput::writeString); + if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_ENHANCE_DELETE_ENDPOINT)) { + out.writeCollection(pipelineIds, StreamOutput::writeString); + } } @Override From 748dbd51e4cb493ed1779dfbe907ca750a43ccca Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Wed, 3 Jul 2024 09:52:21 -0700 Subject: [PATCH 165/216] [DOCS] Add serverless details in Elasticsearch security privileges (#109718) --- .../authorization/privileges.asciidoc | 115 ++++++++++++++---- 1 file changed, 89 insertions(+), 26 deletions(-) diff --git a/docs/reference/security/authorization/privileges.asciidoc b/docs/reference/security/authorization/privileges.asciidoc index be30db4d100bd..cc44c97a08129 100644 --- a/docs/reference/security/authorization/privileges.asciidoc +++ b/docs/reference/security/authorization/privileges.asciidoc @@ -1,6 +1,9 @@ -[role="xpack"] [[security-privileges]] === Security privileges +:frontmatter-description: A list of privileges that can be assigned to user roles. +:frontmatter-tags-products: [elasticsearch] +:frontmatter-tags-content-type: [reference] +:frontmatter-tags-user-goals: [secure] This section lists the privileges that you can assign to a role. @@ -19,16 +22,19 @@ See <> API for more informations. `create_snapshot`:: Privileges to create snapshots for existing repositories. Can also list and view details on existing repositories and snapshots. ++ +This privilege is not available in {serverless-full}. `cross_cluster_replication`:: Privileges to connect to <> for cross-cluster replication. + -- +This privilege is not available in {serverless-full}. + NOTE: This privilege should _not_ be directly granted. It is used internally by <> and <> to manage cross-cluster API keys. - -- `cross_cluster_search`:: @@ -36,14 +42,17 @@ Privileges to connect to <> and <> to manage cross-cluster API keys. - -- `grant_api_key`:: Privileges to create {es} API keys on behalf of other users. ++ +This privilege is not available in {serverless-full}. `manage`:: Builds on `monitor` and adds cluster operations that change values in the cluster. @@ -73,14 +82,37 @@ owned by other users. -- +`manage_autoscaling`:: +All operations related to managing autoscaling policies. ++ +This privilege is not available in {serverless-full}. + `manage_ccr`:: All {ccr} operations related to managing follower indices and auto-follow patterns. It also includes the authority to grant the privileges necessary to manage follower indices and auto-follow patterns. This privilege is necessary only on clusters that contain follower indices. ++ +This privilege is not available in {serverless-full}. + +`manage_data_frame_transforms`:: +All operations related to managing {transforms}. +deprecated[7.5] Use `manage_transform` instead. ++ +This privilege is not available in {serverless-full}. + +`manage_data_stream_global_retention`:: +All operations related to managing the data stream global retention settings. ++ +This privilege is not available in {serverless-full}. + +`manage_enrich`:: +All operations related to managing and executing enrich policies. `manage_ilm`:: -All {Ilm} operations related to managing policies. +All {ilm} operations related to managing policies. ++ +This privilege is not available in {serverless-full}. `manage_index_templates`:: All operations on index templates. @@ -112,6 +144,8 @@ Enables the use of {es} APIs <>, and <>) to initiate and manage OpenID Connect authentication on behalf of other users. ++ +This privilege is not available in {serverless-full}. `manage_own_api_key`:: All security-related operations on {es} API keys that are owned by the current @@ -129,10 +163,14 @@ All operations on ingest pipelines. `manage_rollup`:: All rollup operations, including creating, starting, stopping and deleting rollup jobs. ++ +This privilege is not available in {serverless-full}. `manage_saml`:: Enables the use of internal {es} APIs to initiate and manage SAML authentication on behalf of other users. ++ +This privilege is not available in {serverless-full}. `manage_search_application`:: All CRUD operations on <>. @@ -152,46 +190,45 @@ All security-related operations on {es} service accounts including <>, <>, <>, and <>. ++ +This privilege is not available in {serverless-full}. `manage_slm`:: All {slm} ({slm-init}) actions, including creating and updating policies and starting and stopping {slm-init}. ++ +This privilege is not available in {serverless-full}. `manage_token`:: All security-related operations on tokens that are generated by the {es} Token Service. ++ +This privilege is not available in {serverless-full}. `manage_transform`:: All operations related to managing {transforms}. -`manage_autoscaling`:: -All operations related to managing autoscaling policies. - -`manage_data_frame_transforms`:: -All operations related to managing {transforms}. -deprecated[7.5] Use `manage_transform` instead. - -`manage_enrich`:: -All operations related to managing and executing enrich policies. - -`manage_data_stream_global_retention`:: -All operations related to managing the data stream global retention settings. - `manage_watcher`:: All watcher operations, such as putting watches, executing, activate or acknowledging. + -- +This privilege is not available in {serverless-full}. + NOTE: Watches that were created prior to version 6.1 or created when the {security-features} were disabled run as a system user with elevated privileges, including permission to read and write all indices. Newer watches run with the security roles of the user who created or updated them. - -- `monitor`:: All cluster read-only operations, like cluster health and state, hot threads, node info, node and cluster stats, and pending cluster tasks. +`monitor_data_stream_global_retention`:: +Allows the retrieval of the data stream global retention settings. ++ +This privilege is not available in {serverless-full}. + `monitor_enrich`:: All read-only operations related to managing and executing enrich policies. @@ -205,31 +242,40 @@ model snapshots, or results. `monitor_rollup`:: All read-only rollup operations, such as viewing the list of historical and currently running rollup jobs and their capabilities. ++ +This privilege is not available in {serverless-full}. `monitor_snapshot`:: Privileges to list and view details on existing repositories and snapshots. ++ +This privilege is not available in {serverless-full}. `monitor_text_structure`:: All read-only operations related to the <>. ++ +This privilege is not available in {serverless-full}. `monitor_transform`:: All read-only operations related to {transforms}. -`monitor_data_stream_global_retention`:: -Allows the retrieval of the data stream global retention settings. - `monitor_watcher`:: All read-only watcher operations, such as getting a watch and watcher stats. ++ +This privilege is not available in {serverless-full}. `read_ccr`:: All read-only {ccr} operations, such as getting information about indices and metadata for leader indices in the cluster. It also includes the authority to check whether users have the appropriate privileges to follow leader indices. This privilege is necessary only on clusters that contain leader indices. ++ +This privilege is not available in {serverless-full}. `read_ilm`:: All read-only {Ilm} operations, such as getting policies and checking the status of {Ilm} ++ +This privilege is not available in {serverless-full}. `read_pipeline`:: Read-only access to ingest pipline (get, simulate). @@ -237,6 +283,8 @@ Read-only access to ingest pipline (get, simulate). `read_slm`:: All read-only {slm-init} actions, such as getting policies and checking the {slm-init} status. ++ +This privilege is not available in {serverless-full}. `read_security`:: All read-only security-related operations, such as getting users, user profiles, @@ -247,6 +295,8 @@ on all {es} API keys. `transport_client`:: All privileges necessary for a transport client to connect. Required by the remote cluster to enable <>. ++ +This privilege is not available in {serverless-full}. [[privileges-list-indices]] ==== Indices privileges @@ -320,16 +370,19 @@ Privileges to perform cross-cluster replication for indices located on <>. This privilege should only be used for the `privileges` field of <>. ++ +This privilege is not available in {serverless-full}. `cross_cluster_replication_internal`:: Privileges to perform supporting actions for cross-cluster replication from <>. + -- +This privilege is not available in {serverless-full}. + NOTE: This privilege should _not_ be directly granted. It is used internally by <> and <> to manage cross-cluster API keys. - -- `delete`:: @@ -356,24 +409,30 @@ All `monitor` privileges plus index and data stream administration (aliases, analyze, cache clear, close, delete, exists, flush, mapping, open, field capabilities, force merge, refresh, settings, search shards, validate query). +`manage_data_stream_lifecycle`:: +All <> operations relating to reading and managing the built-in lifecycle of a data stream. +This includes operations such as adding and removing a lifecycle from a data stream. + `manage_follow_index`:: All actions that are required to manage the lifecycle of a follower index, which includes creating a follower index, closing it, and converting it to a regular index. This privilege is necessary only on clusters that contain follower indices. ++ +This privilege is not available in {serverless-full}. `manage_ilm`:: All {Ilm} operations relating to managing the execution of policies of an index or data stream. This includes operations such as retrying policies and removing a policy from an index or data stream. - -`manage_data_stream_lifecycle`:: -All <> operations relating to reading and managing the built-in lifecycle of a data stream. -This includes operations such as adding and removing a lifecycle from a data stream. ++ +This privilege is not available in {serverless-full}. `manage_leader_index`:: All actions that are required to manage the lifecycle of a leader index, which includes <>. This privilege is necessary only on clusters that contain leader indices. ++ +This privilege is not available in {serverless-full}. `monitor`:: All actions that are required for monitoring (recovery, segments info, index @@ -386,6 +445,8 @@ clear_scroll, search, suggest, tv). `read_cross_cluster`:: Read-only access to the search action from a <>. ++ +This privilege is not available in {serverless-full}. `view_index_metadata`:: Read-only access to index and data stream metadata (aliases, exists, @@ -411,6 +472,8 @@ of user names. (You can also specify users as an array of strings or a YAML sequence.) For more information, see <>. +This privilege is not available in {serverless-full}. + [[application-privileges]] ==== Application privileges From f30a6d9f8cc9e08117c08a766c7721f605e4fee8 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 4 Jul 2024 03:04:38 +1000 Subject: [PATCH 166/216] Mute org.elasticsearch.test.rest.yaml.CcsCommonYamlTestSuiteIT test {p0=search.vectors/41_knn_search_half_byte_quantized/Test create, merge, and search cosine} #109978 --- muted-tests.yml | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 63610b9ceb355..d8eba8ad2dba6 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -4,8 +4,7 @@ tests: method: "testGuessIsDayFirstFromLocale" - class: "org.elasticsearch.test.rest.ClientYamlTestSuiteIT" issue: "https://github.com/elastic/elasticsearch/issues/108857" - method: "test {yaml=search/180_locale_dependent_mapping/Test Index and Search locale\ - \ dependent mappings / dates}" + method: "test {yaml=search/180_locale_dependent_mapping/Test Index and Search locale dependent mappings / dates}" - class: "org.elasticsearch.upgrades.SearchStatesIT" issue: "https://github.com/elastic/elasticsearch/issues/108991" method: "testCanMatch" @@ -14,8 +13,7 @@ tests: method: "testTrainedModelInference" - class: "org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT" issue: "https://github.com/elastic/elasticsearch/issues/109188" - method: "test {yaml=search/180_locale_dependent_mapping/Test Index and Search locale\ - \ dependent mappings / dates}" + method: "test {yaml=search/180_locale_dependent_mapping/Test Index and Search locale dependent mappings / dates}" - class: "org.elasticsearch.xpack.esql.qa.mixed.EsqlClientYamlIT" issue: "https://github.com/elastic/elasticsearch/issues/109189" method: "test {p0=esql/70_locale/Date format with Italian locale}" @@ -30,8 +28,7 @@ tests: method: "testTimestampFieldTypeExposedByAllIndicesServices" - class: "org.elasticsearch.analysis.common.CommonAnalysisClientYamlTestSuiteIT" issue: "https://github.com/elastic/elasticsearch/issues/109318" - method: "test {yaml=analysis-common/50_char_filters/pattern_replace error handling\ - \ (too complex pattern)}" + method: "test {yaml=analysis-common/50_char_filters/pattern_replace error handling (too complex pattern)}" - class: "org.elasticsearch.xpack.ml.integration.ClassificationHousePricingIT" issue: "https://github.com/elastic/elasticsearch/issues/101598" method: "testFeatureImportanceValues" @@ -80,8 +77,7 @@ tests: method: testLoadAll issue: https://github.com/elastic/elasticsearch/issues/110244 - class: org.elasticsearch.painless.LangPainlessClientYamlTestSuiteIT - method: test {yaml=painless/146_dense_vector_bit_basic/Cosine Similarity is not - supported} + method: test {yaml=painless/146_dense_vector_bit_basic/Cosine Similarity is not supported} issue: https://github.com/elastic/elasticsearch/issues/110290 - class: org.elasticsearch.painless.LangPainlessClientYamlTestSuiteIT method: test {yaml=painless/146_dense_vector_bit_basic/Dot Product is not supported} @@ -119,6 +115,9 @@ tests: - class: "org.elasticsearch.xpack.security.role.RoleWithDescriptionRestIT" issue: "https://github.com/elastic/elasticsearch/issues/110417" method: "testCreateOrUpdateRoleWithDescription" +- class: org.elasticsearch.test.rest.yaml.CcsCommonYamlTestSuiteIT + method: test {p0=search.vectors/41_knn_search_half_byte_quantized/Test create, merge, and search cosine} + issue: https://github.com/elastic/elasticsearch/issues/109978 # Examples: # From 1dc7eafe2f43d19630daf4f0a39fd15f7e07cc38 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 3 Jul 2024 13:13:22 -0400 Subject: [PATCH 167/216] ESQL: Merge more code into esql-proper (#110432) This removes the `Graphviz` class which we don't currently use and merges the `LoggingUtils` class into it's single caller, `EsqlResponseListener`. --- .../xpack/esql/core/util/Graphviz.java | 313 ------------------ .../xpack/esql/core/util/LoggingUtils.java | 24 -- .../esql/action/EsqlResponseListener.java | 9 +- 3 files changed, 7 insertions(+), 339 deletions(-) delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/Graphviz.java delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/LoggingUtils.java diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/Graphviz.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/Graphviz.java deleted file mode 100644 index 5502f04549ce3..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/Graphviz.java +++ /dev/null @@ -1,313 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.util; - -import org.elasticsearch.xpack.esql.core.tree.Node; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Map.Entry; -import java.util.concurrent.atomic.AtomicInteger; - -// use the awesome http://mdaines.github.io/viz.js/ to visualize and play around with the various options -public abstract class Graphviz { - - private static final int NODE_LABEL_INDENT = 12; - private static final int CLUSTER_INDENT = 2; - private static final int INDENT = 1; - - public static String dot(String name, Node root) { - StringBuilder sb = new StringBuilder(); - // name - sb.append(String.format(Locale.ROOT, """ - digraph G { rankdir=BT; - label="%s"; - node[shape=plaintext, color=azure1]; - edge[color=black,arrowsize=0.5]; - """, name)); - handleNode(sb, root, new AtomicInteger(0), INDENT, true); - sb.append("}"); - return sb.toString(); - } - - public static String dot(Map> clusters, boolean drawSubTrees) { - AtomicInteger nodeCounter = new AtomicInteger(0); - - StringBuilder sb = new StringBuilder(); - // name - sb.append(""" - digraph G { rankdir=BT; - node[shape=plaintext, color=azure1]; - edge[color=black]; - graph[compound=true]; - - """); - - int clusterNodeStart = 1; - int clusterId = 0; - - StringBuilder clusterEdges = new StringBuilder(); - - for (Entry> entry : clusters.entrySet()) { - indent(sb, INDENT); - // draw cluster - sb.append("subgraph cluster"); - sb.append(++clusterId); - sb.append(" {\n"); - indent(sb, CLUSTER_INDENT); - sb.append("color=blue;\n"); - indent(sb, CLUSTER_INDENT); - sb.append("label="); - sb.append(quoteGraphviz(entry.getKey())); - sb.append(";\n\n"); - - /* to help align the clusters, add an invisible node (that could - * otherwise be used for labeling but it consumes too much space) - * used for alignment */ - indent(sb, CLUSTER_INDENT); - sb.append("c" + clusterId); - sb.append("[style=invis]\n"); - // add edge to the first node in the cluster - indent(sb, CLUSTER_INDENT); - sb.append("node" + (nodeCounter.get() + 1)); - sb.append(" -> "); - sb.append("c" + clusterId); - sb.append(" [style=invis];\n"); - - handleNode(sb, entry.getValue(), nodeCounter, CLUSTER_INDENT, drawSubTrees); - - int clusterNodeStop = nodeCounter.get(); - - indent(sb, INDENT); - sb.append("}\n"); - - // connect cluster only if there are at least two - if (clusterId > 1) { - indent(clusterEdges, INDENT); - clusterEdges.append("node" + clusterNodeStart); - clusterEdges.append(" -> "); - clusterEdges.append("node" + clusterNodeStop); - clusterEdges.append("[ltail=cluster"); - clusterEdges.append(clusterId - 1); - clusterEdges.append(" lhead=cluster"); - clusterEdges.append(clusterId); - clusterEdges.append("];\n"); - } - clusterNodeStart = clusterNodeStop; - } - - sb.append("\n"); - - // connecting the clusters arranges them in a weird position - // so don't - // sb.append(clusterEdges.toString()); - - // align the cluster by requiring the invisible nodes in each cluster to be of the same rank - indent(sb, INDENT); - sb.append("{ rank=same"); - for (int i = 1; i <= clusterId; i++) { - sb.append(" c" + i); - } - sb.append(" };\n}"); - - return sb.toString(); - } - - private static void handleNode(StringBuilder output, Node n, AtomicInteger nodeId, int currentIndent, boolean drawSubTrees) { - // each node has its own id - int thisId = nodeId.incrementAndGet(); - - // first determine node info - StringBuilder nodeInfo = new StringBuilder(); - nodeInfo.append("\n"); - indent(nodeInfo, currentIndent + NODE_LABEL_INDENT); - nodeInfo.append(""" - - """); - indent(nodeInfo, currentIndent + NODE_LABEL_INDENT); - nodeInfo.append(String.format(Locale.ROOT, """ - - """, n.nodeName())); - indent(nodeInfo, currentIndent + NODE_LABEL_INDENT); - - List props = n.nodeProperties(); - List parsed = new ArrayList<>(props.size()); - List> subTrees = new ArrayList<>(); - - for (Object v : props) { - // skip null values, children and location - if (v != null && n.children().contains(v) == false) { - if (v instanceof Collection c) { - StringBuilder colS = new StringBuilder(); - for (Object o : c) { - if (drawSubTrees && isAnotherTree(o)) { - subTrees.add((Node) o); - } else { - colS.append(o); - colS.append("\n"); - } - } - if (colS.length() > 0) { - parsed.add(colS.toString()); - } - } else { - if (drawSubTrees && isAnotherTree(v)) { - subTrees.add((Node) v); - } else { - parsed.add(v.toString()); - } - } - } - } - - for (String line : parsed) { - nodeInfo.append("\n"); - indent(nodeInfo, currentIndent + NODE_LABEL_INDENT); - } - - nodeInfo.append("
    %s
    "); - nodeInfo.append(escapeHtml(line)); - nodeInfo.append("
    \n"); - - // check any subtrees - if (subTrees.isEmpty() == false) { - // write nested trees - output.append(String.format(Locale.ROOT, """ - subgraph cluster_%s{ - style=filled; color=white; fillcolor=azure2; label=""; - """, thisId)); - } - - // write node info - indent(output, currentIndent); - output.append("node"); - output.append(thisId); - output.append("[label="); - output.append(quoteGraphviz(nodeInfo.toString())); - output.append("];\n"); - - if (subTrees.isEmpty() == false) { - indent(output, currentIndent + INDENT); - output.append("node[shape=ellipse, color=black]\n"); - - for (Node node : subTrees) { - indent(output, currentIndent + INDENT); - drawNodeTree(output, node, "st_" + thisId + "_", 0); - } - - output.append("\n}\n"); - } - - indent(output, currentIndent + 1); - // output.append("{ rankdir=LR; rank=same; \n"); - int prevId = -1; - // handle children - for (Node c : n.children()) { - // the child will always have the next id - int childId = nodeId.get() + 1; - handleNode(output, c, nodeId, currentIndent + INDENT, drawSubTrees); - indent(output, currentIndent + 1); - output.append("node"); - output.append(childId); - output.append(" -> "); - output.append("node"); - output.append(thisId); - output.append(";\n"); - - // add invisible connection between children for ordering - if (prevId != -1) { - indent(output, currentIndent + 1); - output.append("node"); - output.append(prevId); - output.append(" -> "); - output.append("node"); - output.append(childId); - output.append(";\n"); - } - prevId = childId; - } - indent(output, currentIndent); - // output.append("}\n"); - } - - private static void drawNodeTree(StringBuilder sb, Node node, String prefix, int counter) { - String nodeName = prefix + counter; - prefix = nodeName; - - // draw node - drawNode(sb, node, nodeName); - // then draw all children nodes and connections between them to be on the same level - sb.append("{ rankdir=LR; rank=same;\n"); - int prevId = -1; - int saveId = counter; - for (Node child : node.children()) { - int currId = ++counter; - drawNode(sb, child, prefix + currId); - if (prevId > -1) { - sb.append(prefix + prevId + " -> " + prefix + currId + " [style=invis];\n"); - } - prevId = currId; - } - sb.append("}\n"); - - // now draw connections to the parent - for (int i = saveId; i < counter; i++) { - sb.append(prefix + (i + 1) + " -> " + nodeName + ";\n"); - } - - // draw the child - counter = saveId; - for (Node child : node.children()) { - drawNodeTree(sb, child, prefix, ++counter); - } - } - - private static void drawNode(StringBuilder sb, Node node, String nodeName) { - if (node.children().isEmpty()) { - sb.append(nodeName + " [label=\"" + node.toString() + "\"];\n"); - } else { - sb.append(nodeName + " [label=\"" + node.nodeName() + "\"];\n"); - } - } - - private static boolean isAnotherTree(Object value) { - if (value instanceof Node n) { - // create a subgraph - if (n.children().size() > 0) { - return true; - } - } - return false; - } - - private static String escapeHtml(Object value) { - return String.valueOf(value) - .replace("&", "&") - .replace("\"", """) - .replace("'", "'") - .replace("<", "<") - .replace(">", ">") - .replace("\n", "
    "); - } - - private static String quoteGraphviz(String value) { - if (value.contains("<")) { - return "<" + value + ">"; - } - - return "\"" + value + "\""; - } - - private static void indent(StringBuilder sb, int indent) { - for (int i = 0; i < indent; i++) { - sb.append(" "); - } - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/LoggingUtils.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/LoggingUtils.java deleted file mode 100644 index 09b80b25ca5f8..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/util/LoggingUtils.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.core.util; - -import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.logging.Level; -import org.elasticsearch.logging.Logger; -import org.elasticsearch.rest.RestStatus; - -public final class LoggingUtils { - - private LoggingUtils() {} - - public static void logOnFailure(Logger logger, Throwable throwable) { - RestStatus status = ExceptionsHelper.status(throwable); - logger.log(status.getStatus() >= 500 ? Level.WARN : Level.DEBUG, () -> "Request failed with status [" + status + "]: ", throwable); - } - -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResponseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResponseListener.java index 3e3f65daeeec5..5ce1ca25c5913 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResponseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResponseListener.java @@ -7,10 +7,12 @@ package org.elasticsearch.xpack.esql.action; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.logging.Level; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.rest.ChunkedRestResponseBodyPart; @@ -29,7 +31,6 @@ import java.util.Locale; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.xpack.esql.core.util.LoggingUtils.logOnFailure; import static org.elasticsearch.xpack.esql.formatter.TextFormat.CSV; import static org.elasticsearch.xpack.esql.formatter.TextFormat.URL_PARAM_DELIMITER; @@ -168,7 +169,7 @@ private RestResponse buildResponse(EsqlQueryResponse esqlResponse) throws IOExce */ public ActionListener wrapWithLogging() { ActionListener listener = ActionListener.wrap(this::onResponse, ex -> { - logOnFailure(LOGGER, ex); + logOnFailure(ex); onFailure(ex); }); if (LOGGER.isDebugEnabled() == false) { @@ -190,4 +191,8 @@ public ActionListener wrapWithLogging() { }); } + static void logOnFailure(Throwable throwable) { + RestStatus status = ExceptionsHelper.status(throwable); + LOGGER.log(status.getStatus() >= 500 ? Level.WARN : Level.DEBUG, () -> "Request failed with status [" + status + "]: ", throwable); + } } From 1dfb721b2295f56c31369afdf7d54c86345354d0 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 3 Jul 2024 14:02:47 -0400 Subject: [PATCH 168/216] ESQL: Rename `isInteger` to `isWholeNumber` (#110425) It's confusing that we have a type called `integer` and we have a method called `isInteger` which returns `true` for `integer` *and* `long`. This renames that method to `isWholeNumber`. It also renames `isRational` to `isRationalNumber` to line up. --- .../esql/core/expression/TypeResolutions.java | 4 +- .../xpack/esql/core/type/DataType.java | 61 +++++++++---------- .../esql/core/type/DataTypeConverter.java | 40 ++++++------ .../function/aggregate/CountDistinct.java | 4 +- .../expression/function/aggregate/Rate.java | 2 +- .../expression/function/aggregate/Sum.java | 2 +- .../expression/function/grouping/Bucket.java | 10 +-- .../expression/function/scalar/math/Ceil.java | 2 +- .../function/scalar/math/Floor.java | 2 +- .../function/scalar/math/Round.java | 4 +- .../rules/SimplifyComparisonsArithmetics.java | 10 +-- .../planner/EsqlExpressionTranslators.java | 2 +- 12 files changed, 71 insertions(+), 72 deletions(-) diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypeResolutions.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypeResolutions.java index 7302d08f81925..c3593e91c537e 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypeResolutions.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypeResolutions.java @@ -50,8 +50,8 @@ public static TypeResolution isBoolean(Expression e, String operationName, Param return isType(e, dt -> dt == BOOLEAN, operationName, paramOrd, "boolean"); } - public static TypeResolution isInteger(Expression e, String operationName, ParamOrdinal paramOrd) { - return isType(e, DataType::isInteger, operationName, paramOrd, "integer"); + public static TypeResolution isWholeNumber(Expression e, String operationName, ParamOrdinal paramOrd) { + return isType(e, DataType::isWholeNumber, operationName, paramOrd, "integer"); } public static TypeResolution isNumeric(Expression e, String operationName, ParamOrdinal paramOrd) { diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java index 2dc141dd1bac0..503c076b4f7a2 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java @@ -42,15 +42,15 @@ public enum DataType { COUNTER_INTEGER(builder().esType("counter_integer").size(Integer.BYTES).docValues().counter()), COUNTER_DOUBLE(builder().esType("counter_double").size(Double.BYTES).docValues().counter()), - LONG(builder().esType("long").size(Long.BYTES).integer().docValues().counter(COUNTER_LONG)), - INTEGER(builder().esType("integer").size(Integer.BYTES).integer().docValues().counter(COUNTER_INTEGER)), - SHORT(builder().esType("short").size(Short.BYTES).integer().docValues().widenSmallNumeric(INTEGER)), - BYTE(builder().esType("byte").size(Byte.BYTES).integer().docValues().widenSmallNumeric(INTEGER)), - UNSIGNED_LONG(builder().esType("unsigned_long").size(Long.BYTES).integer().docValues()), - DOUBLE(builder().esType("double").size(Double.BYTES).rational().docValues().counter(COUNTER_DOUBLE)), - FLOAT(builder().esType("float").size(Float.BYTES).rational().docValues().widenSmallNumeric(DOUBLE)), - HALF_FLOAT(builder().esType("half_float").size(Float.BYTES).rational().docValues().widenSmallNumeric(DOUBLE)), - SCALED_FLOAT(builder().esType("scaled_float").size(Long.BYTES).rational().docValues().widenSmallNumeric(DOUBLE)), + LONG(builder().esType("long").size(Long.BYTES).wholeNumber().docValues().counter(COUNTER_LONG)), + INTEGER(builder().esType("integer").size(Integer.BYTES).wholeNumber().docValues().counter(COUNTER_INTEGER)), + SHORT(builder().esType("short").size(Short.BYTES).wholeNumber().docValues().widenSmallNumeric(INTEGER)), + BYTE(builder().esType("byte").size(Byte.BYTES).wholeNumber().docValues().widenSmallNumeric(INTEGER)), + UNSIGNED_LONG(builder().esType("unsigned_long").size(Long.BYTES).wholeNumber().docValues()), + DOUBLE(builder().esType("double").size(Double.BYTES).rationalNumber().docValues().counter(COUNTER_DOUBLE)), + FLOAT(builder().esType("float").size(Float.BYTES).rationalNumber().docValues().widenSmallNumeric(DOUBLE)), + HALF_FLOAT(builder().esType("half_float").size(Float.BYTES).rationalNumber().docValues().widenSmallNumeric(DOUBLE)), + SCALED_FLOAT(builder().esType("scaled_float").size(Long.BYTES).rationalNumber().docValues().widenSmallNumeric(DOUBLE)), KEYWORD(builder().esType("keyword").unknownSize().docValues()), TEXT(builder().esType("text").unknownSize()), @@ -80,14 +80,14 @@ public enum DataType { private final int size; /** - * True if the type represents an integer number + * True if the type represents a "whole number", as in, does not have a decimal part. */ - private final boolean isInteger; + private final boolean isWholeNumber; /** - * True if the type represents a rational number + * True if the type represents a "rational number", as in, does have a decimal part. */ - private final boolean isRational; + private final boolean isRationalNumber; /** * True if the type supports doc values by default @@ -117,8 +117,8 @@ public enum DataType { this.name = typeString.toUpperCase(Locale.ROOT); this.esType = builder.esType; this.size = builder.size; - this.isInteger = builder.isInteger; - this.isRational = builder.isRational; + this.isWholeNumber = builder.isWholeNumber; + this.isRationalNumber = builder.isRationalNumber; this.docValues = builder.docValues; this.isCounter = builder.isCounter; this.widenSmallNumeric = builder.widenSmallNumeric; @@ -262,25 +262,24 @@ public String outputType() { } /** - * Does this data type represent whole numbers? As in, numbers without a decimal point. - * Like {@code int} or {@code long}. See {@link #isRational} for numbers with a decimal point. + * True if the type represents a "whole number", as in, does not have a decimal part. */ - public boolean isInteger() { - return isInteger; + public boolean isWholeNumber() { + return isWholeNumber; } /** - * Does this data type represent rational numbers (like floating point)? + * True if the type represents a "rational number", as in, does have a decimal part. */ - public boolean isRational() { - return isRational; + public boolean isRationalNumber() { + return isRationalNumber; } /** * Does this data type represent any number? */ public boolean isNumeric() { - return isInteger || isRational; + return isWholeNumber || isRationalNumber; } public int size() { @@ -356,14 +355,14 @@ private static class Builder { private int size; /** - * True if the type represents an integer number + * True if the type represents a "whole number", as in, does not have a decimal part. */ - private boolean isInteger; + private boolean isWholeNumber; /** - * True if the type represents a rational number + * True if the type represents a "rational number", as in, does have a decimal part. */ - private boolean isRational; + private boolean isRationalNumber; /** * True if the type supports doc values by default @@ -409,13 +408,13 @@ Builder unknownSize() { return this; } - Builder integer() { - this.isInteger = true; + Builder wholeNumber() { + this.isWholeNumber = true; return this; } - Builder rational() { - this.isRational = true; + Builder rationalNumber() { + this.isRationalNumber = true; return this; } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataTypeConverter.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataTypeConverter.java index bb53472d06e71..bd87a92f3289d 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataTypeConverter.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataTypeConverter.java @@ -78,9 +78,9 @@ public static DataType commonType(DataType left, DataType right) { } if (left.isNumeric() && right.isNumeric()) { // if one is int - if (left.isInteger()) { + if (left.isWholeNumber()) { // promote the highest int - if (right.isInteger()) { + if (right.isWholeNumber()) { if (left == UNSIGNED_LONG || right == UNSIGNED_LONG) { return UNSIGNED_LONG; } @@ -90,7 +90,7 @@ public static DataType commonType(DataType left, DataType right) { return right; } // try the other side - if (right.isInteger()) { + if (right.isWholeNumber()) { return left; } // promote the highest rational @@ -200,10 +200,10 @@ private static Converter conversionToVersion(DataType from) { } private static Converter conversionToUnsignedLong(DataType from) { - if (from.isRational()) { + if (from.isRationalNumber()) { return DefaultConverter.RATIONAL_TO_UNSIGNED_LONG; } - if (from.isInteger()) { + if (from.isWholeNumber()) { return DefaultConverter.INTEGER_TO_UNSIGNED_LONG; } if (from == BOOLEAN) { @@ -219,10 +219,10 @@ private static Converter conversionToUnsignedLong(DataType from) { } private static Converter conversionToLong(DataType from) { - if (from.isRational()) { + if (from.isRationalNumber()) { return DefaultConverter.RATIONAL_TO_LONG; } - if (from.isInteger()) { + if (from.isWholeNumber()) { return DefaultConverter.INTEGER_TO_LONG; } if (from == BOOLEAN) { @@ -238,10 +238,10 @@ private static Converter conversionToLong(DataType from) { } private static Converter conversionToInt(DataType from) { - if (from.isRational()) { + if (from.isRationalNumber()) { return DefaultConverter.RATIONAL_TO_INT; } - if (from.isInteger()) { + if (from.isWholeNumber()) { return DefaultConverter.INTEGER_TO_INT; } if (from == BOOLEAN) { @@ -257,10 +257,10 @@ private static Converter conversionToInt(DataType from) { } private static Converter conversionToShort(DataType from) { - if (from.isRational()) { + if (from.isRationalNumber()) { return DefaultConverter.RATIONAL_TO_SHORT; } - if (from.isInteger()) { + if (from.isWholeNumber()) { return DefaultConverter.INTEGER_TO_SHORT; } if (from == BOOLEAN) { @@ -276,10 +276,10 @@ private static Converter conversionToShort(DataType from) { } private static Converter conversionToByte(DataType from) { - if (from.isRational()) { + if (from.isRationalNumber()) { return DefaultConverter.RATIONAL_TO_BYTE; } - if (from.isInteger()) { + if (from.isWholeNumber()) { return DefaultConverter.INTEGER_TO_BYTE; } if (from == BOOLEAN) { @@ -295,10 +295,10 @@ private static Converter conversionToByte(DataType from) { } private static DefaultConverter conversionToFloat(DataType from) { - if (from.isRational()) { + if (from.isRationalNumber()) { return DefaultConverter.RATIONAL_TO_FLOAT; } - if (from.isInteger()) { + if (from.isWholeNumber()) { return DefaultConverter.INTEGER_TO_FLOAT; } if (from == BOOLEAN) { @@ -314,10 +314,10 @@ private static DefaultConverter conversionToFloat(DataType from) { } private static DefaultConverter conversionToDouble(DataType from) { - if (from.isRational()) { + if (from.isRationalNumber()) { return DefaultConverter.RATIONAL_TO_DOUBLE; } - if (from.isInteger()) { + if (from.isWholeNumber()) { return DefaultConverter.INTEGER_TO_DOUBLE; } if (from == BOOLEAN) { @@ -333,10 +333,10 @@ private static DefaultConverter conversionToDouble(DataType from) { } private static DefaultConverter conversionToDateTime(DataType from) { - if (from.isRational()) { + if (from.isRationalNumber()) { return DefaultConverter.RATIONAL_TO_DATETIME; } - if (from.isInteger()) { + if (from.isWholeNumber()) { return DefaultConverter.INTEGER_TO_DATETIME; } if (from == BOOLEAN) { @@ -628,6 +628,6 @@ public static DataType asInteger(DataType dataType) { return dataType; } - return dataType.isInteger() ? dataType : LONG; + return dataType.isWholeNumber() ? dataType : LONG; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java index f52c162ae5d7b..5e61f69758a47 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java @@ -40,8 +40,8 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isFoldable; -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isInteger; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isWholeNumber; public class CountDistinct extends AggregateFunction implements OptionalArgument, ToAggregator, SurrogateExpression { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( @@ -124,7 +124,7 @@ protected TypeResolution resolveType() { if (resolution.unresolved() || precision == null) { return resolution; } - return isInteger(precision, sourceText(), SECOND).and(isFoldable(precision, sourceText(), SECOND)); + return isWholeNumber(precision, sourceText(), SECOND).and(isFoldable(precision, sourceText(), SECOND)); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java index 620a3759d9b19..682590bb7e857 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java @@ -125,7 +125,7 @@ protected TypeResolution resolveType() { ); if (unit != null) { resolution = resolution.and( - isType(unit, dt -> dt.isInteger() || EsqlDataTypes.isTemporalAmount(dt), sourceText(), SECOND, "time_duration") + isType(unit, dt -> dt.isWholeNumber() || EsqlDataTypes.isTemporalAmount(dt), sourceText(), SECOND, "time_duration") ); } return resolution; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java index 34669454a2fa4..e15cf774c3c3f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java @@ -64,7 +64,7 @@ public Sum replaceChildren(List newChildren) { @Override public DataType dataType() { DataType dt = field().dataType(); - return dt.isInteger() == false || dt == UNSIGNED_LONG ? DOUBLE : LONG; + return dt.isWholeNumber() == false || dt == UNSIGNED_LONG ? DOUBLE : LONG; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java index b8b084066af34..40e927404befd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Bucket.java @@ -233,7 +233,7 @@ public boolean foldable() { public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { if (field.dataType() == DataType.DATETIME) { Rounding.Prepared preparedRounding; - if (buckets.dataType().isInteger()) { + if (buckets.dataType().isWholeNumber()) { int b = ((Number) buckets.fold()).intValue(); long f = foldToLong(from); long t = foldToLong(to); @@ -252,7 +252,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function dt.isInteger() || EsqlDataTypes.isTemporalAmount(dt), + dt -> dt.isWholeNumber() || EsqlDataTypes.isTemporalAmount(dt), sourceText(), SECOND, "integral", "date_period", "time_duration" ); - return bucketsType.isInteger() + return bucketsType.isWholeNumber() ? resolution.and(checkArgsCount(4)) .and(() -> isStringOrDate(from, sourceText(), THIRD)) .and(() -> isStringOrDate(to, sourceText(), FOURTH)) : resolution.and(checkArgsCount(2)); // temporal amount } if (fieldType.isNumeric()) { - return bucketsType.isInteger() + return bucketsType.isWholeNumber() ? checkArgsCount(4).and(() -> isNumeric(from, sourceText(), THIRD)).and(() -> isNumeric(to, sourceText(), FOURTH)) : isNumeric(buckets, sourceText(), SECOND).and(checkArgsCount(2)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java index 7d31cec0e54a2..909de387c62ff 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java @@ -65,7 +65,7 @@ public String getWriteableName() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - if (dataType().isInteger()) { + if (dataType().isWholeNumber()) { return toEvaluator.apply(field()); } var fieldEval = toEvaluator.apply(field()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java index 73ff0aec2b126..638770f2f079a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java @@ -67,7 +67,7 @@ public String getWriteableName() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - if (dataType().isInteger()) { + if (dataType().isWholeNumber()) { return toEvaluator.apply(field()); } return new FloorDoubleEvaluator.Factory(source(), toEvaluator.apply(field())); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java index 7223615294446..8fcb04d021e7a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java @@ -35,8 +35,8 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isInteger; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNumeric; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isWholeNumber; import static org.elasticsearch.xpack.esql.core.util.NumericUtils.unsignedLongAsNumber; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.bigIntegerToUnsignedLong; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.longToUnsignedLong; @@ -104,7 +104,7 @@ protected TypeResolution resolveType() { return resolution; } - return decimals == null ? TypeResolution.TYPE_RESOLVED : isInteger(decimals, sourceText(), SECOND); + return decimals == null ? TypeResolution.TYPE_RESOLVED : isWholeNumber(decimals, sourceText(), SECOND); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SimplifyComparisonsArithmetics.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SimplifyComparisonsArithmetics.java index 0d3aaaa3a9d47..151d11fa575ae 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SimplifyComparisonsArithmetics.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SimplifyComparisonsArithmetics.java @@ -132,7 +132,7 @@ final boolean isUnsafe(BiFunction typesCompatible) // x + 1e18 > 1e18::long will yield different results with a field value in [-2^6, 2^6], optimised vs original; // x * (1 + 1e-15d) > 1 : same with a field value of (1 - 1e-15d) // so consequently, int fields optimisation requiring FP arithmetic isn't possible either: (x - 1e-15) * (1 + 1e-15) > 1. - if (opLiteral.dataType().isRational() || bcLiteral.dataType().isRational()) { + if (opLiteral.dataType().isRationalNumber() || bcLiteral.dataType().isRationalNumber()) { return true; } @@ -146,7 +146,7 @@ final boolean isUnsafe(BiFunction typesCompatible) final Expression apply() { // force float point folding for FlP field - Literal bcl = operation.dataType().isRational() + Literal bcl = operation.dataType().isRationalNumber() ? new Literal(bcLiteral.source(), ((Number) bcLiteral.value()).doubleValue(), DataType.DOUBLE) : bcLiteral; @@ -177,7 +177,7 @@ private static class AddSubSimplifier extends OperationSimplifier { @Override boolean isOpUnsafe() { // no ADD/SUB with floating fields - if (operation.dataType().isRational()) { + if (operation.dataType().isRationalNumber()) { return true; } @@ -204,12 +204,12 @@ private static class MulDivSimplifier extends OperationSimplifier { @Override boolean isOpUnsafe() { // Integer divisions are not safe to optimise: x / 5 > 1 <=/=> x > 5 for x in [6, 9]; same for the `==` comp - if (operation.dataType().isInteger() && isDiv) { + if (operation.dataType().isWholeNumber() && isDiv) { return true; } // If current operation is a multiplication, it's inverse will be a division: safe only if outcome is still integral. - if (isDiv == false && opLeft.dataType().isInteger()) { + if (isDiv == false && opLeft.dataType().isWholeNumber()) { long opLiteralValue = ((Number) opLiteral.value()).longValue(); return opLiteralValue == 0 || ((Number) bcLiteral.value()).longValue() % opLiteralValue != 0; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java index 349483116a0a8..e87006ec7ee09 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java @@ -294,7 +294,7 @@ private static boolean isInRange(DataType numericFieldDataType, DataType valueDa // Unsigned longs may be represented as BigInteger. decimalValue = new BigDecimal(bigIntValue); } else { - decimalValue = valueDataType.isRational() ? BigDecimal.valueOf(doubleValue) : BigDecimal.valueOf(value.longValue()); + decimalValue = valueDataType.isRationalNumber() ? BigDecimal.valueOf(doubleValue) : BigDecimal.valueOf(value.longValue()); } // Determine min/max for dataType. Use BigDecimals as doubles will have rounding errors for long/ulong. From 3d4e1136d6a2be3a38769e39e03ff94cdabb5caf Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 3 Jul 2024 19:37:13 +0100 Subject: [PATCH 169/216] Add Javadocs for `safeAwait()` etc. test methods (#110407) Seems worth adding a few words about what exactly these methods are for. --- .../org/elasticsearch/test/ESTestCase.java | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 4bb5fbd5e7031..92ced07174c23 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -2212,6 +2212,10 @@ protected static SecureRandom secureRandomFips(final byte[] seed) throws NoSuchA */ public static final TimeValue SAFE_AWAIT_TIMEOUT = TimeValue.timeValueSeconds(10); + /** + * Await on the given {@link CyclicBarrier} with a timeout of {@link #SAFE_AWAIT_TIMEOUT}, preserving the thread's interrupt status flag + * and converting all exceptions into an {@link AssertionError} to trigger a test failure. + */ public static void safeAwait(CyclicBarrier barrier) { try { barrier.await(SAFE_AWAIT_TIMEOUT.millis(), TimeUnit.MILLISECONDS); @@ -2223,6 +2227,10 @@ public static void safeAwait(CyclicBarrier barrier) { } } + /** + * Await on the given {@link CountDownLatch} with a timeout of {@link #SAFE_AWAIT_TIMEOUT}, preserving the thread's interrupt status + * flag and asserting that the latch is indeed completed before the timeout. + */ public static void safeAwait(CountDownLatch countDownLatch) { try { assertTrue( @@ -2235,10 +2243,18 @@ public static void safeAwait(CountDownLatch countDownLatch) { } } + /** + * Acquire a single permit from the given {@link Semaphore}, with a timeout of {@link #SAFE_AWAIT_TIMEOUT}, preserving the thread's + * interrupt status flag and asserting that the permit was successfully acquired. + */ public static void safeAcquire(Semaphore semaphore) { safeAcquire(1, semaphore); } + /** + * Acquire the specified number of permits from the given {@link Semaphore}, with a timeout of {@link #SAFE_AWAIT_TIMEOUT}, preserving + * the thread's interrupt status flag and asserting that the permits were all successfully acquired. + */ public static void safeAcquire(int permits, Semaphore semaphore) { try { assertTrue( @@ -2251,12 +2267,24 @@ public static void safeAcquire(int permits, Semaphore semaphore) { } } + /** + * Wait for the successful completion of the given {@link SubscribableListener}, with a timeout of {@link #SAFE_AWAIT_TIMEOUT}, + * preserving the thread's interrupt status flag and converting all exceptions into an {@link AssertionError} to trigger a test failure. + * + * @return The value with which the {@code listener} was completed. + */ public static T safeAwait(SubscribableListener listener) { final var future = new PlainActionFuture(); listener.addListener(future); return safeGet(future); } + /** + * Wait for the successful completion of the given {@link Future}, with a timeout of {@link #SAFE_AWAIT_TIMEOUT}, preserving the + * thread's interrupt status flag and converting all exceptions into an {@link AssertionError} to trigger a test failure. + * + * @return The value with which the {@code future} was completed. + */ public static T safeGet(Future future) { try { return future.get(SAFE_AWAIT_TIMEOUT.millis(), TimeUnit.MILLISECONDS); @@ -2270,6 +2298,13 @@ public static T safeGet(Future future) { } } + /** + * Wait for the exceptional completion of the given {@link SubscribableListener}, with a timeout of {@link #SAFE_AWAIT_TIMEOUT}, + * preserving the thread's interrupt status flag and converting a successful completion, interrupt or timeout into an {@link + * AssertionError} to trigger a test failure. + * + * @return The exception with which the {@code listener} was completed exceptionally. + */ public static Exception safeAwaitFailure(SubscribableListener listener) { return safeAwait( SubscribableListener.newForked( @@ -2278,10 +2313,18 @@ public static Exception safeAwaitFailure(SubscribableListener listener) { ); } + /** + * Send the current thread to sleep for the given duration, asserting that the sleep is not interrupted but preserving the thread's + * interrupt status flag in any case. + */ public static void safeSleep(TimeValue timeValue) { safeSleep(timeValue.millis()); } + /** + * Send the current thread to sleep for the given number of milliseconds, asserting that the sleep is not interrupted but preserving the + * thread's interrupt status flag in any case. + */ public static void safeSleep(long millis) { try { Thread.sleep(millis); From 4cc59965c798f388032b89573acb2614490634a5 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 3 Jul 2024 20:40:23 +0200 Subject: [PATCH 170/216] Stop copying bucket list when doing desc sort (#110439) No need to copy here, the list is freshly allocated for us in 100% of cases here and we're not copying when sorting either. --- .../aggregations/bucket/histogram/InternalDateHistogram.java | 4 +--- .../aggregations/bucket/histogram/InternalHistogram.java | 4 +--- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java index 951ed222ffb77..4cfa7f449cf57 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java @@ -497,9 +497,7 @@ public InternalAggregation get() { } if (InternalOrder.isKeyDesc(order)) { // we just need to reverse here... - List reverse = new ArrayList<>(reducedBuckets); - Collections.reverse(reverse); - reducedBuckets = reverse; + Collections.reverse(reducedBuckets); } else if (InternalOrder.isKeyAsc(order) == false) { // nothing to do when sorting by key ascending, as data is already sorted since shards return // sorted buckets and the merge-sort performed by reduceBuckets maintains order. diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java index 33548aa96b27f..2404de76fdd35 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java @@ -448,9 +448,7 @@ public InternalAggregation get() { } if (InternalOrder.isKeyDesc(order)) { // we just need to reverse here... - List reverse = new ArrayList<>(reducedBuckets); - Collections.reverse(reverse); - reducedBuckets = reverse; + Collections.reverse(reducedBuckets); } else if (InternalOrder.isKeyAsc(order) == false) { // nothing to do when sorting by key ascending, as data is already sorted since shards return // sorted buckets and the merge-sort performed by reduceBuckets maintains order. From b6e9860919270140e405e10527f3229e5e6a8c5f Mon Sep 17 00:00:00 2001 From: George Wallace Date: Wed, 3 Jul 2024 13:00:52 -0600 Subject: [PATCH 171/216] Update role-mapping-resources.asciidoc (#110441) made it clear that some characters need to be escaped properly Co-authored-by: Jan Doberstein --- .../reference/rest-api/security/role-mapping-resources.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/rest-api/security/role-mapping-resources.asciidoc b/docs/reference/rest-api/security/role-mapping-resources.asciidoc index 4c9ed582b674a..25703dc073e00 100644 --- a/docs/reference/rest-api/security/role-mapping-resources.asciidoc +++ b/docs/reference/rest-api/security/role-mapping-resources.asciidoc @@ -70,7 +70,7 @@ example, `"username": "jsmith"`. `groups`:: (array of strings) The groups to which the user belongs. For example, `"groups" : [ "cn=admin,ou=groups,dc=example,dc=com","cn=esusers,ou=groups,dc=example,dc=com ]`. `metadata`:: -(object) Additional metadata for the user. For example, `"metadata": { "cn": "John Smith" }`. +(object) Additional metadata for the user. This can include a variety of key-value pairs. When referencing metadata fields in role mapping rules, use the dot notation to specify the key within the metadata object. If the key contains special characters such as parentheses, dots, or spaces, you must escape these characters using backslashes (`\`). For example, `"metadata": { "cn": "John Smith" }`. `realm`:: (object) The realm that authenticated the user. The only field in this object is the realm name. For example, `"realm": { "name": "ldap1" }`. From 89a1bd9c2da88160ba3d38ef572e98777f902e6a Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Wed, 3 Jul 2024 16:39:39 -0400 Subject: [PATCH 172/216] [Inference API] Prevent inference endpoints from being deleted if they are referenced by semantic text (#110399) Following on https://github.com/elastic/elasticsearch/pull/109123 --- docs/changelog/110399.yaml | 6 + .../org/elasticsearch/TransportVersions.java | 1 + .../action/DeleteInferenceEndpointAction.java | 29 +++- .../ml/utils/SemanticTextInfoExtractor.java | 48 +++++++ .../inference/InferenceBaseRestTest.java | 19 +++ .../xpack/inference/InferenceCrudIT.java | 88 ++++++++++++- ...ransportDeleteInferenceEndpointAction.java | 124 +++++++++++------- ..._text_query_inference_endpoint_changes.yml | 3 + 8 files changed, 266 insertions(+), 52 deletions(-) create mode 100644 docs/changelog/110399.yaml create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/SemanticTextInfoExtractor.java diff --git a/docs/changelog/110399.yaml b/docs/changelog/110399.yaml new file mode 100644 index 0000000000000..9e04e2656809e --- /dev/null +++ b/docs/changelog/110399.yaml @@ -0,0 +1,6 @@ +pr: 110399 +summary: "[Inference API] Prevent inference endpoints from being deleted if they are\ + \ referenced by semantic text" +area: Machine Learning +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 2004c6fda8ce5..fe87a055146d8 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -208,6 +208,7 @@ static TransportVersion def(int id) { public static final TransportVersion TEXT_SIMILARITY_RERANKER_RETRIEVER = def(8_699_00_0); public static final TransportVersion ML_INFERENCE_GOOGLE_VERTEX_AI_RERANKING_ADDED = def(8_700_00_0); public static final TransportVersion VERSIONED_MASTER_NODE_REQUESTS = def(8_701_00_0); + public static final TransportVersion ML_INFERENCE_DONT_DELETE_WHEN_SEMANTIC_TEXT_EXISTS = def(8_702_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceEndpointAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceEndpointAction.java index dfb77ccd49fc2..00debb5bf9366 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceEndpointAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceEndpointAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.TaskType; import org.elasticsearch.xcontent.XContentBuilder; @@ -105,10 +106,16 @@ public static class Response extends AcknowledgedResponse { private final String PIPELINE_IDS = "pipelines"; Set pipelineIds; + private final String REFERENCED_INDEXES = "indexes"; + Set indexes; + private final String DRY_RUN_MESSAGE = "error_message"; // error message only returned in response for dry_run + String dryRunMessage; - public Response(boolean acknowledged, Set pipelineIds) { + public Response(boolean acknowledged, Set pipelineIds, Set semanticTextIndexes, @Nullable String dryRunMessage) { super(acknowledged); this.pipelineIds = pipelineIds; + this.indexes = semanticTextIndexes; + this.dryRunMessage = dryRunMessage; } public Response(StreamInput in) throws IOException { @@ -118,6 +125,15 @@ public Response(StreamInput in) throws IOException { } else { pipelineIds = Set.of(); } + + if (in.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_DONT_DELETE_WHEN_SEMANTIC_TEXT_EXISTS)) { + indexes = in.readCollectionAsSet(StreamInput::readString); + dryRunMessage = in.readOptionalString(); + } else { + indexes = Set.of(); + dryRunMessage = null; + } + } @Override @@ -126,12 +142,18 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_ENHANCE_DELETE_ENDPOINT)) { out.writeCollection(pipelineIds, StreamOutput::writeString); } + if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_DONT_DELETE_WHEN_SEMANTIC_TEXT_EXISTS)) { + out.writeCollection(indexes, StreamOutput::writeString); + out.writeOptionalString(dryRunMessage); + } } @Override protected void addCustomFields(XContentBuilder builder, Params params) throws IOException { super.addCustomFields(builder, params); builder.field(PIPELINE_IDS, pipelineIds); + builder.field(REFERENCED_INDEXES, indexes); + builder.field(DRY_RUN_MESSAGE, dryRunMessage); } @Override @@ -142,6 +164,11 @@ public String toString() { for (String entry : pipelineIds) { returnable.append(entry).append(", "); } + returnable.append(", semanticTextFieldsByIndex: "); + for (String entry : indexes) { + returnable.append(entry).append(", "); + } + returnable.append(", dryRunMessage: ").append(dryRunMessage); return returnable.toString(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/SemanticTextInfoExtractor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/SemanticTextInfoExtractor.java new file mode 100644 index 0000000000000..ed021baf31828 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/SemanticTextInfoExtractor.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + * + * this file was contributed to by a Generative AI + */ + +package org.elasticsearch.xpack.core.ml.utils; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; +import org.elasticsearch.cluster.metadata.Metadata; + +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +public class SemanticTextInfoExtractor { + private static final Logger logger = LogManager.getLogger(SemanticTextInfoExtractor.class); + + public static Set extractIndexesReferencingInferenceEndpoints(Metadata metadata, Set endpointIds) { + assert endpointIds.isEmpty() == false; + assert metadata != null; + + Set referenceIndices = new HashSet<>(); + + Map indices = metadata.indices(); + + indices.forEach((indexName, indexMetadata) -> { + if (indexMetadata.getInferenceFields() != null) { + Map inferenceFields = indexMetadata.getInferenceFields(); + if (inferenceFields.entrySet() + .stream() + .anyMatch( + entry -> entry.getValue().getInferenceId() != null && endpointIds.contains(entry.getValue().getInferenceId()) + )) { + referenceIndices.add(indexName); + } + } + }); + + return referenceIndices; + } +} diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java index 419869c0c4a5e..f30f2e8fe201a 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java @@ -126,6 +126,25 @@ protected void deleteModel(String modelId, TaskType taskType) throws IOException assertOkOrCreated(response); } + protected void putSemanticText(String endpointId, String indexName) throws IOException { + var request = new Request("PUT", Strings.format("%s", indexName)); + String body = Strings.format(""" + { + "mappings": { + "properties": { + "inference_field": { + "type": "semantic_text", + "inference_id": "%s" + } + } + } + } + """, endpointId); + request.setJsonEntity(body); + var response = client().performRequest(request); + assertOkOrCreated(response); + } + protected Map putModel(String modelId, String modelConfig, TaskType taskType) throws IOException { String endpoint = Strings.format("_inference/%s/%s", taskType, modelId); return putRequest(endpoint, modelConfig); diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index 75e392b6d155f..034457ec28a79 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -16,6 +16,7 @@ import java.io.IOException; import java.util.List; +import java.util.Set; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasSize; @@ -124,14 +125,15 @@ public void testDeleteEndpointWhileReferencedByPipeline() throws IOException { putPipeline(pipelineId, endpointId); { + var errorString = new StringBuilder().append("Inference endpoint ") + .append(endpointId) + .append(" is referenced by pipelines: ") + .append(Set.of(pipelineId)) + .append(". ") + .append("Ensure that no pipelines are using this inference endpoint, ") + .append("or use force to ignore this warning and delete the inference endpoint."); var e = expectThrows(ResponseException.class, () -> deleteModel(endpointId)); - assertThat( - e.getMessage(), - containsString( - "Inference endpoint endpoint_referenced_by_pipeline is referenced by pipelines and cannot be deleted. " - + "Use `force` to delete it anyway, or use `dry_run` to list the pipelines that reference it." - ) - ); + assertThat(e.getMessage(), containsString(errorString.toString())); } { var response = deleteModel(endpointId, "dry_run=true"); @@ -146,4 +148,76 @@ public void testDeleteEndpointWhileReferencedByPipeline() throws IOException { } deletePipeline(pipelineId); } + + public void testDeleteEndpointWhileReferencedBySemanticText() throws IOException { + String endpointId = "endpoint_referenced_by_semantic_text"; + putModel(endpointId, mockSparseServiceModelConfig(), TaskType.SPARSE_EMBEDDING); + String indexName = randomAlphaOfLength(10).toLowerCase(); + putSemanticText(endpointId, indexName); + { + + var errorString = new StringBuilder().append(" Inference endpoint ") + .append(endpointId) + .append(" is being used in the mapping for indexes: ") + .append(Set.of(indexName)) + .append(". ") + .append("Ensure that no index mappings are using this inference endpoint, ") + .append("or use force to ignore this warning and delete the inference endpoint."); + var e = expectThrows(ResponseException.class, () -> deleteModel(endpointId)); + assertThat(e.getMessage(), containsString(errorString.toString())); + } + { + var response = deleteModel(endpointId, "dry_run=true"); + var entityString = EntityUtils.toString(response.getEntity()); + assertThat(entityString, containsString("\"acknowledged\":false")); + assertThat(entityString, containsString(indexName)); + } + { + var response = deleteModel(endpointId, "force=true"); + var entityString = EntityUtils.toString(response.getEntity()); + assertThat(entityString, containsString("\"acknowledged\":true")); + } + } + + public void testDeleteEndpointWhileReferencedBySemanticTextAndPipeline() throws IOException { + String endpointId = "endpoint_referenced_by_semantic_text"; + putModel(endpointId, mockSparseServiceModelConfig(), TaskType.SPARSE_EMBEDDING); + String indexName = randomAlphaOfLength(10).toLowerCase(); + putSemanticText(endpointId, indexName); + var pipelineId = "pipeline_referencing_model"; + putPipeline(pipelineId, endpointId); + { + + var errorString = new StringBuilder().append("Inference endpoint ") + .append(endpointId) + .append(" is referenced by pipelines: ") + .append(Set.of(pipelineId)) + .append(". ") + .append("Ensure that no pipelines are using this inference endpoint, ") + .append("or use force to ignore this warning and delete the inference endpoint.") + .append(" Inference endpoint ") + .append(endpointId) + .append(" is being used in the mapping for indexes: ") + .append(Set.of(indexName)) + .append(". ") + .append("Ensure that no index mappings are using this inference endpoint, ") + .append("or use force to ignore this warning and delete the inference endpoint."); + + var e = expectThrows(ResponseException.class, () -> deleteModel(endpointId)); + assertThat(e.getMessage(), containsString(errorString.toString())); + } + { + var response = deleteModel(endpointId, "dry_run=true"); + var entityString = EntityUtils.toString(response.getEntity()); + assertThat(entityString, containsString("\"acknowledged\":false")); + assertThat(entityString, containsString(indexName)); + assertThat(entityString, containsString(pipelineId)); + } + { + var response = deleteModel(endpointId, "force=true"); + var entityString = EntityUtils.toString(response.getEntity()); + assertThat(entityString, containsString("\"acknowledged\":true")); + } + deletePipeline(pipelineId); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointAction.java index 07d5e1e618578..9a84f572a6d60 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointAction.java @@ -3,6 +3,8 @@ * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. + * + * this file was contributed to by a Generative AI */ package org.elasticsearch.xpack.inference.action; @@ -18,12 +20,10 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.inference.InferenceServiceRegistry; -import org.elasticsearch.ingest.IngestMetadata; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -35,6 +35,8 @@ import java.util.Set; +import static org.elasticsearch.xpack.core.ml.utils.SemanticTextInfoExtractor.extractIndexesReferencingInferenceEndpoints; + public class TransportDeleteInferenceEndpointAction extends TransportMasterNodeAction< DeleteInferenceEndpointAction.Request, DeleteInferenceEndpointAction.Response> { @@ -89,17 +91,15 @@ protected void masterOperation( } if (request.isDryRun()) { - masterListener.onResponse( - new DeleteInferenceEndpointAction.Response( - false, - InferenceProcessorInfoExtractor.pipelineIdsForResource(state, Set.of(request.getInferenceEndpointId())) - ) - ); + handleDryRun(request, state, masterListener); return; - } else if (request.isForceDelete() == false - && endpointIsReferencedInPipelines(state, request.getInferenceEndpointId(), listener)) { + } else if (request.isForceDelete() == false) { + var errorString = endpointIsReferencedInPipelinesOrIndexes(state, request.getInferenceEndpointId()); + if (errorString != null) { + listener.onFailure(new ElasticsearchStatusException(errorString, RestStatus.CONFLICT)); return; } + } var service = serviceRegistry.getService(unparsedModel.service()); if (service.isPresent()) { @@ -126,47 +126,83 @@ && endpointIsReferencedInPipelines(state, request.getInferenceEndpointId(), list }) .addListener( masterListener.delegateFailure( - (l3, didDeleteModel) -> masterListener.onResponse(new DeleteInferenceEndpointAction.Response(didDeleteModel, Set.of())) + (l3, didDeleteModel) -> masterListener.onResponse( + new DeleteInferenceEndpointAction.Response(didDeleteModel, Set.of(), Set.of(), null) + ) ) ); } - private static boolean endpointIsReferencedInPipelines( - final ClusterState state, - final String inferenceEndpointId, - ActionListener listener + private static void handleDryRun( + DeleteInferenceEndpointAction.Request request, + ClusterState state, + ActionListener masterListener ) { - Metadata metadata = state.getMetadata(); - if (metadata == null) { - listener.onFailure( - new ElasticsearchStatusException( - " Could not determine if the endpoint is referenced in a pipeline as cluster state metadata was unexpectedly null. " - + "Use `force` to delete it anyway", - RestStatus.INTERNAL_SERVER_ERROR - ) - ); - // Unsure why the ClusterState metadata would ever be null, but in this case it seems safer to assume the endpoint is referenced - return true; + Set pipelines = InferenceProcessorInfoExtractor.pipelineIdsForResource(state, Set.of(request.getInferenceEndpointId())); + + Set indexesReferencedBySemanticText = extractIndexesReferencingInferenceEndpoints( + state.getMetadata(), + Set.of(request.getInferenceEndpointId()) + ); + + masterListener.onResponse( + new DeleteInferenceEndpointAction.Response( + false, + pipelines, + indexesReferencedBySemanticText, + buildErrorString(request.getInferenceEndpointId(), pipelines, indexesReferencedBySemanticText) + ) + ); + } + + private static String endpointIsReferencedInPipelinesOrIndexes(final ClusterState state, final String inferenceEndpointId) { + + var pipelines = endpointIsReferencedInPipelines(state, inferenceEndpointId); + var indexes = endpointIsReferencedInIndex(state, inferenceEndpointId); + + if (pipelines.isEmpty() == false || indexes.isEmpty() == false) { + return buildErrorString(inferenceEndpointId, pipelines, indexes); } - IngestMetadata ingestMetadata = metadata.custom(IngestMetadata.TYPE); - if (ingestMetadata == null) { - logger.debug("No ingest metadata found in cluster state while attempting to delete inference endpoint"); - } else { - Set modelIdsReferencedByPipelines = InferenceProcessorInfoExtractor.getModelIdsFromInferenceProcessors(ingestMetadata); - if (modelIdsReferencedByPipelines.contains(inferenceEndpointId)) { - listener.onFailure( - new ElasticsearchStatusException( - "Inference endpoint " - + inferenceEndpointId - + " is referenced by pipelines and cannot be deleted. " - + "Use `force` to delete it anyway, or use `dry_run` to list the pipelines that reference it.", - RestStatus.CONFLICT - ) - ); - return true; - } + return null; + } + + private static String buildErrorString(String inferenceEndpointId, Set pipelines, Set indexes) { + StringBuilder errorString = new StringBuilder(); + + if (pipelines.isEmpty() == false) { + errorString.append("Inference endpoint ") + .append(inferenceEndpointId) + .append(" is referenced by pipelines: ") + .append(pipelines) + .append(". ") + .append("Ensure that no pipelines are using this inference endpoint, ") + .append("or use force to ignore this warning and delete the inference endpoint."); } - return false; + + if (indexes.isEmpty() == false) { + errorString.append(" Inference endpoint ") + .append(inferenceEndpointId) + .append(" is being used in the mapping for indexes: ") + .append(indexes) + .append(". ") + .append("Ensure that no index mappings are using this inference endpoint, ") + .append("or use force to ignore this warning and delete the inference endpoint."); + } + + return errorString.toString(); + } + + private static Set endpointIsReferencedInIndex(final ClusterState state, final String inferenceEndpointId) { + Set indexes = extractIndexesReferencingInferenceEndpoints(state.getMetadata(), Set.of(inferenceEndpointId)); + return indexes; + } + + private static Set endpointIsReferencedInPipelines(final ClusterState state, final String inferenceEndpointId) { + Set modelIdsReferencedByPipelines = InferenceProcessorInfoExtractor.pipelineIdsForResource( + state, + Set.of(inferenceEndpointId) + ); + return modelIdsReferencedByPipelines; } @Override diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/50_semantic_text_query_inference_endpoint_changes.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/50_semantic_text_query_inference_endpoint_changes.yml index fd656c9d5d950..f6a7073914609 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/50_semantic_text_query_inference_endpoint_changes.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/50_semantic_text_query_inference_endpoint_changes.yml @@ -81,6 +81,7 @@ setup: - do: inference.delete: inference_id: sparse-inference-id + force: true - do: inference.put: @@ -119,6 +120,7 @@ setup: - do: inference.delete: inference_id: dense-inference-id + force: true - do: inference.put: @@ -155,6 +157,7 @@ setup: - do: inference.delete: inference_id: dense-inference-id + force: true - do: inference.put: From a671ba73718149cba8182e68abfbc4f9ef54afc2 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 3 Jul 2024 17:14:50 -0400 Subject: [PATCH 173/216] ESQL: Fix running tests in intellij (#110444) When we added arrow support we needed to add a security permission to hack around their initialization code. That confused intellij running tests. This removes the confusion by manually resolving the location. --- .../esql/src/main/plugin-metadata/plugin-security.codebases | 1 + 1 file changed, 1 insertion(+) create mode 100644 x-pack/plugin/esql/src/main/plugin-metadata/plugin-security.codebases diff --git a/x-pack/plugin/esql/src/main/plugin-metadata/plugin-security.codebases b/x-pack/plugin/esql/src/main/plugin-metadata/plugin-security.codebases new file mode 100644 index 0000000000000..ecae5129b3563 --- /dev/null +++ b/x-pack/plugin/esql/src/main/plugin-metadata/plugin-security.codebases @@ -0,0 +1 @@ +arrow: org.elasticsearch.xpack.esql.arrow.AllocationManagerShim From 1be0f2b5aedce0294efffdf88df97f9771286e33 Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Wed, 3 Jul 2024 17:27:12 -0400 Subject: [PATCH 174/216] =?UTF-8?q?Revert=20"[Inference=20API]=20Prevent?= =?UTF-8?q?=20inference=20endpoints=20from=20being=20deleted=20if=20the?= =?UTF-8?q?=E2=80=A6"=20(#110446)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This reverts commit 89a1bd9c2da88160ba3d38ef572e98777f902e6a. --- docs/changelog/110399.yaml | 6 - .../org/elasticsearch/TransportVersions.java | 1 - .../action/DeleteInferenceEndpointAction.java | 29 +--- .../ml/utils/SemanticTextInfoExtractor.java | 48 ------- .../inference/InferenceBaseRestTest.java | 19 --- .../xpack/inference/InferenceCrudIT.java | 88 +------------ ...ransportDeleteInferenceEndpointAction.java | 124 +++++++----------- ..._text_query_inference_endpoint_changes.yml | 3 - 8 files changed, 52 insertions(+), 266 deletions(-) delete mode 100644 docs/changelog/110399.yaml delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/SemanticTextInfoExtractor.java diff --git a/docs/changelog/110399.yaml b/docs/changelog/110399.yaml deleted file mode 100644 index 9e04e2656809e..0000000000000 --- a/docs/changelog/110399.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 110399 -summary: "[Inference API] Prevent inference endpoints from being deleted if they are\ - \ referenced by semantic text" -area: Machine Learning -type: enhancement -issues: [] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index fe87a055146d8..2004c6fda8ce5 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -208,7 +208,6 @@ static TransportVersion def(int id) { public static final TransportVersion TEXT_SIMILARITY_RERANKER_RETRIEVER = def(8_699_00_0); public static final TransportVersion ML_INFERENCE_GOOGLE_VERTEX_AI_RERANKING_ADDED = def(8_700_00_0); public static final TransportVersion VERSIONED_MASTER_NODE_REQUESTS = def(8_701_00_0); - public static final TransportVersion ML_INFERENCE_DONT_DELETE_WHEN_SEMANTIC_TEXT_EXISTS = def(8_702_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceEndpointAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceEndpointAction.java index 00debb5bf9366..dfb77ccd49fc2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceEndpointAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceEndpointAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.TaskType; import org.elasticsearch.xcontent.XContentBuilder; @@ -106,16 +105,10 @@ public static class Response extends AcknowledgedResponse { private final String PIPELINE_IDS = "pipelines"; Set pipelineIds; - private final String REFERENCED_INDEXES = "indexes"; - Set indexes; - private final String DRY_RUN_MESSAGE = "error_message"; // error message only returned in response for dry_run - String dryRunMessage; - public Response(boolean acknowledged, Set pipelineIds, Set semanticTextIndexes, @Nullable String dryRunMessage) { + public Response(boolean acknowledged, Set pipelineIds) { super(acknowledged); this.pipelineIds = pipelineIds; - this.indexes = semanticTextIndexes; - this.dryRunMessage = dryRunMessage; } public Response(StreamInput in) throws IOException { @@ -125,15 +118,6 @@ public Response(StreamInput in) throws IOException { } else { pipelineIds = Set.of(); } - - if (in.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_DONT_DELETE_WHEN_SEMANTIC_TEXT_EXISTS)) { - indexes = in.readCollectionAsSet(StreamInput::readString); - dryRunMessage = in.readOptionalString(); - } else { - indexes = Set.of(); - dryRunMessage = null; - } - } @Override @@ -142,18 +126,12 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_ENHANCE_DELETE_ENDPOINT)) { out.writeCollection(pipelineIds, StreamOutput::writeString); } - if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_DONT_DELETE_WHEN_SEMANTIC_TEXT_EXISTS)) { - out.writeCollection(indexes, StreamOutput::writeString); - out.writeOptionalString(dryRunMessage); - } } @Override protected void addCustomFields(XContentBuilder builder, Params params) throws IOException { super.addCustomFields(builder, params); builder.field(PIPELINE_IDS, pipelineIds); - builder.field(REFERENCED_INDEXES, indexes); - builder.field(DRY_RUN_MESSAGE, dryRunMessage); } @Override @@ -164,11 +142,6 @@ public String toString() { for (String entry : pipelineIds) { returnable.append(entry).append(", "); } - returnable.append(", semanticTextFieldsByIndex: "); - for (String entry : indexes) { - returnable.append(entry).append(", "); - } - returnable.append(", dryRunMessage: ").append(dryRunMessage); return returnable.toString(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/SemanticTextInfoExtractor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/SemanticTextInfoExtractor.java deleted file mode 100644 index ed021baf31828..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/SemanticTextInfoExtractor.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - * - * this file was contributed to by a Generative AI - */ - -package org.elasticsearch.xpack.core.ml.utils; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; -import org.elasticsearch.cluster.metadata.Metadata; - -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -public class SemanticTextInfoExtractor { - private static final Logger logger = LogManager.getLogger(SemanticTextInfoExtractor.class); - - public static Set extractIndexesReferencingInferenceEndpoints(Metadata metadata, Set endpointIds) { - assert endpointIds.isEmpty() == false; - assert metadata != null; - - Set referenceIndices = new HashSet<>(); - - Map indices = metadata.indices(); - - indices.forEach((indexName, indexMetadata) -> { - if (indexMetadata.getInferenceFields() != null) { - Map inferenceFields = indexMetadata.getInferenceFields(); - if (inferenceFields.entrySet() - .stream() - .anyMatch( - entry -> entry.getValue().getInferenceId() != null && endpointIds.contains(entry.getValue().getInferenceId()) - )) { - referenceIndices.add(indexName); - } - } - }); - - return referenceIndices; - } -} diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java index f30f2e8fe201a..419869c0c4a5e 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java @@ -126,25 +126,6 @@ protected void deleteModel(String modelId, TaskType taskType) throws IOException assertOkOrCreated(response); } - protected void putSemanticText(String endpointId, String indexName) throws IOException { - var request = new Request("PUT", Strings.format("%s", indexName)); - String body = Strings.format(""" - { - "mappings": { - "properties": { - "inference_field": { - "type": "semantic_text", - "inference_id": "%s" - } - } - } - } - """, endpointId); - request.setJsonEntity(body); - var response = client().performRequest(request); - assertOkOrCreated(response); - } - protected Map putModel(String modelId, String modelConfig, TaskType taskType) throws IOException { String endpoint = Strings.format("_inference/%s/%s", taskType, modelId); return putRequest(endpoint, modelConfig); diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index 034457ec28a79..75e392b6d155f 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -16,7 +16,6 @@ import java.io.IOException; import java.util.List; -import java.util.Set; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasSize; @@ -125,15 +124,14 @@ public void testDeleteEndpointWhileReferencedByPipeline() throws IOException { putPipeline(pipelineId, endpointId); { - var errorString = new StringBuilder().append("Inference endpoint ") - .append(endpointId) - .append(" is referenced by pipelines: ") - .append(Set.of(pipelineId)) - .append(". ") - .append("Ensure that no pipelines are using this inference endpoint, ") - .append("or use force to ignore this warning and delete the inference endpoint."); var e = expectThrows(ResponseException.class, () -> deleteModel(endpointId)); - assertThat(e.getMessage(), containsString(errorString.toString())); + assertThat( + e.getMessage(), + containsString( + "Inference endpoint endpoint_referenced_by_pipeline is referenced by pipelines and cannot be deleted. " + + "Use `force` to delete it anyway, or use `dry_run` to list the pipelines that reference it." + ) + ); } { var response = deleteModel(endpointId, "dry_run=true"); @@ -148,76 +146,4 @@ public void testDeleteEndpointWhileReferencedByPipeline() throws IOException { } deletePipeline(pipelineId); } - - public void testDeleteEndpointWhileReferencedBySemanticText() throws IOException { - String endpointId = "endpoint_referenced_by_semantic_text"; - putModel(endpointId, mockSparseServiceModelConfig(), TaskType.SPARSE_EMBEDDING); - String indexName = randomAlphaOfLength(10).toLowerCase(); - putSemanticText(endpointId, indexName); - { - - var errorString = new StringBuilder().append(" Inference endpoint ") - .append(endpointId) - .append(" is being used in the mapping for indexes: ") - .append(Set.of(indexName)) - .append(". ") - .append("Ensure that no index mappings are using this inference endpoint, ") - .append("or use force to ignore this warning and delete the inference endpoint."); - var e = expectThrows(ResponseException.class, () -> deleteModel(endpointId)); - assertThat(e.getMessage(), containsString(errorString.toString())); - } - { - var response = deleteModel(endpointId, "dry_run=true"); - var entityString = EntityUtils.toString(response.getEntity()); - assertThat(entityString, containsString("\"acknowledged\":false")); - assertThat(entityString, containsString(indexName)); - } - { - var response = deleteModel(endpointId, "force=true"); - var entityString = EntityUtils.toString(response.getEntity()); - assertThat(entityString, containsString("\"acknowledged\":true")); - } - } - - public void testDeleteEndpointWhileReferencedBySemanticTextAndPipeline() throws IOException { - String endpointId = "endpoint_referenced_by_semantic_text"; - putModel(endpointId, mockSparseServiceModelConfig(), TaskType.SPARSE_EMBEDDING); - String indexName = randomAlphaOfLength(10).toLowerCase(); - putSemanticText(endpointId, indexName); - var pipelineId = "pipeline_referencing_model"; - putPipeline(pipelineId, endpointId); - { - - var errorString = new StringBuilder().append("Inference endpoint ") - .append(endpointId) - .append(" is referenced by pipelines: ") - .append(Set.of(pipelineId)) - .append(". ") - .append("Ensure that no pipelines are using this inference endpoint, ") - .append("or use force to ignore this warning and delete the inference endpoint.") - .append(" Inference endpoint ") - .append(endpointId) - .append(" is being used in the mapping for indexes: ") - .append(Set.of(indexName)) - .append(". ") - .append("Ensure that no index mappings are using this inference endpoint, ") - .append("or use force to ignore this warning and delete the inference endpoint."); - - var e = expectThrows(ResponseException.class, () -> deleteModel(endpointId)); - assertThat(e.getMessage(), containsString(errorString.toString())); - } - { - var response = deleteModel(endpointId, "dry_run=true"); - var entityString = EntityUtils.toString(response.getEntity()); - assertThat(entityString, containsString("\"acknowledged\":false")); - assertThat(entityString, containsString(indexName)); - assertThat(entityString, containsString(pipelineId)); - } - { - var response = deleteModel(endpointId, "force=true"); - var entityString = EntityUtils.toString(response.getEntity()); - assertThat(entityString, containsString("\"acknowledged\":true")); - } - deletePipeline(pipelineId); - } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointAction.java index 9a84f572a6d60..07d5e1e618578 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceEndpointAction.java @@ -3,8 +3,6 @@ * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. - * - * this file was contributed to by a Generative AI */ package org.elasticsearch.xpack.inference.action; @@ -20,10 +18,12 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.inference.InferenceServiceRegistry; +import org.elasticsearch.ingest.IngestMetadata; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -35,8 +35,6 @@ import java.util.Set; -import static org.elasticsearch.xpack.core.ml.utils.SemanticTextInfoExtractor.extractIndexesReferencingInferenceEndpoints; - public class TransportDeleteInferenceEndpointAction extends TransportMasterNodeAction< DeleteInferenceEndpointAction.Request, DeleteInferenceEndpointAction.Response> { @@ -91,15 +89,17 @@ protected void masterOperation( } if (request.isDryRun()) { - handleDryRun(request, state, masterListener); + masterListener.onResponse( + new DeleteInferenceEndpointAction.Response( + false, + InferenceProcessorInfoExtractor.pipelineIdsForResource(state, Set.of(request.getInferenceEndpointId())) + ) + ); return; - } else if (request.isForceDelete() == false) { - var errorString = endpointIsReferencedInPipelinesOrIndexes(state, request.getInferenceEndpointId()); - if (errorString != null) { - listener.onFailure(new ElasticsearchStatusException(errorString, RestStatus.CONFLICT)); + } else if (request.isForceDelete() == false + && endpointIsReferencedInPipelines(state, request.getInferenceEndpointId(), listener)) { return; } - } var service = serviceRegistry.getService(unparsedModel.service()); if (service.isPresent()) { @@ -126,83 +126,47 @@ protected void masterOperation( }) .addListener( masterListener.delegateFailure( - (l3, didDeleteModel) -> masterListener.onResponse( - new DeleteInferenceEndpointAction.Response(didDeleteModel, Set.of(), Set.of(), null) - ) + (l3, didDeleteModel) -> masterListener.onResponse(new DeleteInferenceEndpointAction.Response(didDeleteModel, Set.of())) ) ); } - private static void handleDryRun( - DeleteInferenceEndpointAction.Request request, - ClusterState state, - ActionListener masterListener + private static boolean endpointIsReferencedInPipelines( + final ClusterState state, + final String inferenceEndpointId, + ActionListener listener ) { - Set pipelines = InferenceProcessorInfoExtractor.pipelineIdsForResource(state, Set.of(request.getInferenceEndpointId())); - - Set indexesReferencedBySemanticText = extractIndexesReferencingInferenceEndpoints( - state.getMetadata(), - Set.of(request.getInferenceEndpointId()) - ); - - masterListener.onResponse( - new DeleteInferenceEndpointAction.Response( - false, - pipelines, - indexesReferencedBySemanticText, - buildErrorString(request.getInferenceEndpointId(), pipelines, indexesReferencedBySemanticText) - ) - ); - } - - private static String endpointIsReferencedInPipelinesOrIndexes(final ClusterState state, final String inferenceEndpointId) { - - var pipelines = endpointIsReferencedInPipelines(state, inferenceEndpointId); - var indexes = endpointIsReferencedInIndex(state, inferenceEndpointId); - - if (pipelines.isEmpty() == false || indexes.isEmpty() == false) { - return buildErrorString(inferenceEndpointId, pipelines, indexes); - } - return null; - } - - private static String buildErrorString(String inferenceEndpointId, Set pipelines, Set indexes) { - StringBuilder errorString = new StringBuilder(); - - if (pipelines.isEmpty() == false) { - errorString.append("Inference endpoint ") - .append(inferenceEndpointId) - .append(" is referenced by pipelines: ") - .append(pipelines) - .append(". ") - .append("Ensure that no pipelines are using this inference endpoint, ") - .append("or use force to ignore this warning and delete the inference endpoint."); + Metadata metadata = state.getMetadata(); + if (metadata == null) { + listener.onFailure( + new ElasticsearchStatusException( + " Could not determine if the endpoint is referenced in a pipeline as cluster state metadata was unexpectedly null. " + + "Use `force` to delete it anyway", + RestStatus.INTERNAL_SERVER_ERROR + ) + ); + // Unsure why the ClusterState metadata would ever be null, but in this case it seems safer to assume the endpoint is referenced + return true; } - - if (indexes.isEmpty() == false) { - errorString.append(" Inference endpoint ") - .append(inferenceEndpointId) - .append(" is being used in the mapping for indexes: ") - .append(indexes) - .append(". ") - .append("Ensure that no index mappings are using this inference endpoint, ") - .append("or use force to ignore this warning and delete the inference endpoint."); + IngestMetadata ingestMetadata = metadata.custom(IngestMetadata.TYPE); + if (ingestMetadata == null) { + logger.debug("No ingest metadata found in cluster state while attempting to delete inference endpoint"); + } else { + Set modelIdsReferencedByPipelines = InferenceProcessorInfoExtractor.getModelIdsFromInferenceProcessors(ingestMetadata); + if (modelIdsReferencedByPipelines.contains(inferenceEndpointId)) { + listener.onFailure( + new ElasticsearchStatusException( + "Inference endpoint " + + inferenceEndpointId + + " is referenced by pipelines and cannot be deleted. " + + "Use `force` to delete it anyway, or use `dry_run` to list the pipelines that reference it.", + RestStatus.CONFLICT + ) + ); + return true; + } } - - return errorString.toString(); - } - - private static Set endpointIsReferencedInIndex(final ClusterState state, final String inferenceEndpointId) { - Set indexes = extractIndexesReferencingInferenceEndpoints(state.getMetadata(), Set.of(inferenceEndpointId)); - return indexes; - } - - private static Set endpointIsReferencedInPipelines(final ClusterState state, final String inferenceEndpointId) { - Set modelIdsReferencedByPipelines = InferenceProcessorInfoExtractor.pipelineIdsForResource( - state, - Set.of(inferenceEndpointId) - ); - return modelIdsReferencedByPipelines; + return false; } @Override diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/50_semantic_text_query_inference_endpoint_changes.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/50_semantic_text_query_inference_endpoint_changes.yml index f6a7073914609..fd656c9d5d950 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/50_semantic_text_query_inference_endpoint_changes.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/50_semantic_text_query_inference_endpoint_changes.yml @@ -81,7 +81,6 @@ setup: - do: inference.delete: inference_id: sparse-inference-id - force: true - do: inference.put: @@ -120,7 +119,6 @@ setup: - do: inference.delete: inference_id: dense-inference-id - force: true - do: inference.put: @@ -157,7 +155,6 @@ setup: - do: inference.delete: inference_id: dense-inference-id - force: true - do: inference.put: From 7a8a7c06289fee0f3544888498ec6852b7911e7a Mon Sep 17 00:00:00 2001 From: Nick Tindall Date: Thu, 4 Jul 2024 12:26:56 +1000 Subject: [PATCH 175/216] Improve mechanism for extracting the result of a PlainActionFuture (#110019) Closes #108125 --- docs/changelog/110019.yaml | 6 +++ .../action/support/PlainActionFuture.java | 46 ++++--------------- .../indices/TimestampFieldMapperService.java | 10 +++- ...ransportIndicesShardStoresActionTests.java | 10 ++-- .../support/PlainActionFutureTests.java | 13 ++---- .../CancellableSingleObjectCacheTests.java | 22 ++++----- .../concurrent/ListenableFutureTests.java | 4 +- .../snapshots/SnapshotResiliencyTests.java | 3 +- .../cluster/ESAllocationTestCase.java | 2 +- .../org/elasticsearch/test/ESTestCase.java | 15 ++++++ .../ProgressListenableActionFuture.java | 21 ++++++++- .../support/SecondaryAuthenticatorTests.java | 21 +++++++-- 12 files changed, 101 insertions(+), 72 deletions(-) create mode 100644 docs/changelog/110019.yaml diff --git a/docs/changelog/110019.yaml b/docs/changelog/110019.yaml new file mode 100644 index 0000000000000..632e79008d351 --- /dev/null +++ b/docs/changelog/110019.yaml @@ -0,0 +1,6 @@ +pr: 110019 +summary: Improve mechanism for extracting the result of a `PlainActionFuture` +area: Distributed +type: enhancement +issues: + - 108125 diff --git a/server/src/main/java/org/elasticsearch/action/support/PlainActionFuture.java b/server/src/main/java/org/elasticsearch/action/support/PlainActionFuture.java index 06b5fa4ffd0e8..47fcd43f0d238 100644 --- a/server/src/main/java/org/elasticsearch/action/support/PlainActionFuture.java +++ b/server/src/main/java/org/elasticsearch/action/support/PlainActionFuture.java @@ -178,32 +178,19 @@ public T actionGet(long timeout, TimeUnit unit) { * Return the result of this future, similarly to {@link FutureUtils#get} with a zero timeout except that this method ignores the * interrupted status of the calling thread. *

    - * As with {@link FutureUtils#get}, if the future completed exceptionally with a {@link RuntimeException} then this method throws that - * exception, but if the future completed exceptionally with an exception that is not a {@link RuntimeException} then this method throws - * an {@link UncategorizedExecutionException} whose cause is an {@link ExecutionException} whose cause is the completing exception. + * If the future completed exceptionally then this method throws an {@link ExecutionException} whose cause is the completing exception. *

    * It is not valid to call this method if the future is incomplete. * * @return the result of this future, if it has been completed successfully. - * @throws RuntimeException if this future was completed exceptionally, wrapping checked exceptions as described above. + * @throws ExecutionException if this future was completed exceptionally. * @throws CancellationException if this future was cancelled. + * @throws IllegalStateException if this future is incomplete. */ - public T result() { + public T result() throws ExecutionException { return sync.result(); } - /** - * Return the result of this future, if it has been completed successfully, or unwrap and throw the exception with which it was - * completed exceptionally. It is not valid to call this method if the future is incomplete. - */ - public T actionResult() { - try { - return result(); - } catch (ElasticsearchException e) { - throw unwrapEsException(e); - } - } - /** *

    Following the contract of {@link AbstractQueuedSynchronizer} we create a * private subclass to hold the synchronizer. This synchronizer is used to @@ -217,7 +204,7 @@ public T actionResult() { * RUNNING to COMPLETING, that thread will then set the result of the * computation, and only then transition to COMPLETED or CANCELLED. *

    - * We don't use the integer argument passed between acquire methods so we + * We don't use the integer argument passed between acquire methods, so we * pass around a -1 everywhere. */ static final class Sync extends AbstractQueuedSynchronizer { @@ -302,24 +289,9 @@ private V getValue() throws CancellationException, ExecutionException { } } - V result() { - final int state = getState(); - switch (state) { - case COMPLETED: - if (exception instanceof RuntimeException runtimeException) { - throw runtimeException; - } else if (exception != null) { - throw new UncategorizedExecutionException("Failed execution", new ExecutionException(exception)); - } else { - return value; - } - case CANCELLED: - throw new CancellationException("Task was cancelled."); - default: - final var message = "Error, synchronizer in invalid state: " + state; - assert false : message; - throw new IllegalStateException(message); - } + V result() throws CancellationException, ExecutionException { + assert isDone() : "Error, synchronizer in invalid state: " + getState(); + return getValue(); } /** @@ -358,7 +330,7 @@ boolean cancel() { } /** - * Implementation of completing a task. Either {@code v} or {@code t} will + * Implementation of completing a task. Either {@code v} or {@code e} will * be set but not both. The {@code finalState} is the state to change to * from {@link #RUNNING}. If the state is not in the RUNNING state we * return {@code false} after waiting for the state to be set to a valid diff --git a/server/src/main/java/org/elasticsearch/indices/TimestampFieldMapperService.java b/server/src/main/java/org/elasticsearch/indices/TimestampFieldMapperService.java index 15e409df552bd..4caeaef6514e5 100644 --- a/server/src/main/java/org/elasticsearch/indices/TimestampFieldMapperService.java +++ b/server/src/main/java/org/elasticsearch/indices/TimestampFieldMapperService.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.UncategorizedExecutionException; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; @@ -33,6 +34,7 @@ import java.util.Map; import java.util.Objects; +import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; @@ -172,8 +174,12 @@ public DateFieldMapper.DateFieldType getTimestampFieldType(Index index) { if (future == null || future.isDone() == false) { return null; } - // call non-blocking actionResult() as we could be on a network or scheduler thread which we must not block - return future.actionResult(); + // call non-blocking result() as we could be on a network or scheduler thread which we must not block + try { + return future.result(); + } catch (ExecutionException e) { + throw new UncategorizedExecutionException("An error occurred fetching timestamp field type for " + index, e); + } } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresActionTests.java index ffe42722b308d..a51e9b86858d7 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresActionTests.java @@ -46,6 +46,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.ExecutionException; import java.util.concurrent.Semaphore; import java.util.stream.Collectors; @@ -68,7 +69,7 @@ void runTest() { future ); - final var response = future.result(); + final var response = safeGet(future); assertThat(response.getFailures(), empty()); assertThat(response.getStoreStatuses(), anEmptyMap()); assertThat(shardsWithFailures, empty()); @@ -138,7 +139,7 @@ void runTest() { listExpected = false; assertFalse(future.isDone()); deterministicTaskQueue.runAllTasks(); - expectThrows(TaskCancelledException.class, future::result); + expectThrows(ExecutionException.class, TaskCancelledException.class, future::result); } }); } @@ -159,7 +160,10 @@ void runTest() { failOneRequest = true; deterministicTaskQueue.runAllTasks(); assertFalse(failOneRequest); - assertEquals("simulated", expectThrows(ElasticsearchException.class, future::result).getMessage()); + assertEquals( + "simulated", + expectThrows(ExecutionException.class, ElasticsearchException.class, future::result).getMessage() + ); } }); } diff --git a/server/src/test/java/org/elasticsearch/action/support/PlainActionFutureTests.java b/server/src/test/java/org/elasticsearch/action/support/PlainActionFutureTests.java index aa9456eaaa2e9..4784a42014825 100644 --- a/server/src/test/java/org/elasticsearch/action/support/PlainActionFutureTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/PlainActionFutureTests.java @@ -73,7 +73,6 @@ public void testNoResult() { assumeTrue("assertions required for this test", Assertions.ENABLED); final var future = new PlainActionFuture<>(); expectThrows(AssertionError.class, future::result); - expectThrows(AssertionError.class, future::actionResult); } public void testUnwrapException() { @@ -93,19 +92,17 @@ private void checkUnwrap(Exception exception, Class actionG assertEquals(actionGetException, expectThrows(RuntimeException.class, future::actionGet).getClass()); assertEquals(actionGetException, expectThrows(RuntimeException.class, () -> future.actionGet(10, TimeUnit.SECONDS)).getClass()); - assertEquals(actionGetException, expectThrows(RuntimeException.class, future::actionResult).getClass()); - assertEquals(actionGetException, expectThrows(RuntimeException.class, expectIgnoresInterrupt(future::actionResult)).getClass()); assertEquals(getException, expectThrows(ExecutionException.class, future::get).getCause().getClass()); assertEquals(getException, expectThrows(ExecutionException.class, () -> future.get(10, TimeUnit.SECONDS)).getCause().getClass()); if (exception instanceof RuntimeException) { - assertEquals(getException, expectThrows(Exception.class, future::result).getClass()); - assertEquals(getException, expectThrows(Exception.class, expectIgnoresInterrupt(future::result)).getClass()); + expectThrows(ExecutionException.class, getException, future::result); + expectThrows(ExecutionException.class, getException, expectIgnoresInterrupt(future::result)); assertEquals(getException, expectThrows(Exception.class, () -> FutureUtils.get(future)).getClass()); assertEquals(getException, expectThrows(Exception.class, () -> FutureUtils.get(future, 10, TimeUnit.SECONDS)).getClass()); } else { - assertEquals(getException, expectThrowsWrapped(future::result).getClass()); - assertEquals(getException, expectThrowsWrapped(expectIgnoresInterrupt(future::result)).getClass()); + expectThrows(ExecutionException.class, getException, future::result); + expectThrows(ExecutionException.class, getException, expectIgnoresInterrupt(future::result)); assertEquals(getException, expectThrowsWrapped(() -> FutureUtils.get(future)).getClass()); assertEquals(getException, expectThrowsWrapped(() -> FutureUtils.get(future, 10, TimeUnit.SECONDS)).getClass()); } @@ -129,12 +126,10 @@ public void testCancelException() { assertCancellation(() -> future.get(10, TimeUnit.SECONDS)); assertCancellation(() -> future.actionGet(10, TimeUnit.SECONDS)); assertCancellation(future::result); - assertCancellation(future::actionResult); try { Thread.currentThread().interrupt(); assertCancellation(future::result); - assertCancellation(future::actionResult); } finally { assertTrue(Thread.interrupted()); } diff --git a/server/src/test/java/org/elasticsearch/common/util/CancellableSingleObjectCacheTests.java b/server/src/test/java/org/elasticsearch/common/util/CancellableSingleObjectCacheTests.java index 30412059394cd..b038b6effd08f 100644 --- a/server/src/test/java/org/elasticsearch/common/util/CancellableSingleObjectCacheTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/CancellableSingleObjectCacheTests.java @@ -48,7 +48,7 @@ public void testNoPendingRefreshIfAlreadyCancelled() { expectThrows(ExecutionException.class, TaskCancelledException.class, future::get); } - public void testListenersCompletedByRefresh() { + public void testListenersCompletedByRefresh() throws ExecutionException { final TestCache testCache = new TestCache(); // The first get() calls the refresh function @@ -81,7 +81,7 @@ public void testListenersCompletedByRefresh() { assertThat(future3.result(), equalTo(2)); } - public void testListenerCompletedByRefreshEvenIfDiscarded() { + public void testListenerCompletedByRefreshEvenIfDiscarded() throws ExecutionException { final TestCache testCache = new TestCache(); // This computation is discarded before it completes. @@ -103,7 +103,7 @@ public void testListenerCompletedByRefreshEvenIfDiscarded() { assertThat(future1.result(), sameInstance(future2.result())); } - public void testListenerCompletedWithCancellationExceptionIfRefreshCancelled() { + public void testListenerCompletedWithCancellationExceptionIfRefreshCancelled() throws ExecutionException { final TestCache testCache = new TestCache(); // This computation is discarded before it completes. @@ -120,12 +120,12 @@ public void testListenerCompletedWithCancellationExceptionIfRefreshCancelled() { testCache.get("bar", () -> false, future2); testCache.assertPendingRefreshes(2); testCache.assertNextRefreshCancelled(); - expectThrows(TaskCancelledException.class, future1::result); + expectThrows(ExecutionException.class, TaskCancelledException.class, future1::result); testCache.completeNextRefresh("bar", 2); assertThat(future2.result(), equalTo(2)); } - public void testListenerCompletedWithFresherInputIfSuperseded() { + public void testListenerCompletedWithFresherInputIfSuperseded() throws ExecutionException { final TestCache testCache = new TestCache(); // This computation is superseded before it completes. @@ -164,10 +164,10 @@ public void testRunsCancellationChecksEvenWhenSuperseded() { isCancelled.set(true); testCache.completeNextRefresh("bar", 1); - expectThrows(TaskCancelledException.class, future1::result); + expectThrows(ExecutionException.class, TaskCancelledException.class, future1::result); } - public void testExceptionCompletesListenersButIsNotCached() { + public void testExceptionCompletesListenersButIsNotCached() throws ExecutionException { final TestCache testCache = new TestCache(); // If a refresh results in an exception then all the pending get() calls complete exceptionally @@ -178,8 +178,8 @@ public void testExceptionCompletesListenersButIsNotCached() { testCache.assertPendingRefreshes(1); final ElasticsearchException exception = new ElasticsearchException("simulated"); testCache.completeNextRefresh(exception); - assertSame(exception, expectThrows(ElasticsearchException.class, future0::result)); - assertSame(exception, expectThrows(ElasticsearchException.class, future1::result)); + assertSame(exception, expectThrows(ExecutionException.class, ElasticsearchException.class, future0::result)); + assertSame(exception, expectThrows(ExecutionException.class, ElasticsearchException.class, future1::result)); testCache.assertNoPendingRefreshes(); // The exception is not cached, however, so a subsequent get() call with a matching key performs another refresh @@ -187,7 +187,7 @@ public void testExceptionCompletesListenersButIsNotCached() { testCache.get("foo", () -> false, future2); testCache.assertPendingRefreshes(1); testCache.completeNextRefresh("foo", 1); - assertThat(future2.actionResult(), equalTo(1)); + assertThat(future2.result(), equalTo(1)); } public void testConcurrentRefreshesAndCancellation() throws InterruptedException { @@ -416,7 +416,7 @@ protected String getKey(String s) { testCache.get("successful", () -> false, successfulFuture); cancelledThread.join(); - expectThrows(TaskCancelledException.class, cancelledFuture::result); + expectThrows(ExecutionException.class, TaskCancelledException.class, cancelledFuture::result); } private static final ThreadContext testThreadContext = new ThreadContext(Settings.EMPTY); diff --git a/server/src/test/java/org/elasticsearch/common/util/concurrent/ListenableFutureTests.java b/server/src/test/java/org/elasticsearch/common/util/concurrent/ListenableFutureTests.java index 2d1ec3e53da5f..74136448d2147 100644 --- a/server/src/test/java/org/elasticsearch/common/util/concurrent/ListenableFutureTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/concurrent/ListenableFutureTests.java @@ -189,10 +189,10 @@ public void testRejection() { safeAwait(barrier); // release blocked executor if (success) { - expectThrows(EsRejectedExecutionException.class, future2::result); + expectThrows(ExecutionException.class, EsRejectedExecutionException.class, future2::result); assertNull(future1.actionGet(10, TimeUnit.SECONDS)); } else { - var exception = expectThrows(EsRejectedExecutionException.class, future2::result); + var exception = expectThrows(ExecutionException.class, EsRejectedExecutionException.class, future2::result); assertEquals(1, exception.getSuppressed().length); assertThat(exception.getSuppressed()[0], instanceOf(ElasticsearchException.class)); assertEquals( diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index b40e33c4baba8..8c9cd8cd54500 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -212,6 +212,7 @@ import java.util.Objects; import java.util.Optional; import java.util.Set; +import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @@ -258,7 +259,7 @@ public void createServices() { } @After - public void verifyReposThenStopServices() { + public void verifyReposThenStopServices() throws ExecutionException { try { clearDisruptionsAndAwaitSync(); diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java index f3fac694f9980..751d3bce2fb33 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java @@ -331,7 +331,7 @@ public static ClusterState startShardsAndReroute( public static ClusterState reroute(AllocationService allocationService, ClusterState clusterState) { final var listener = new PlainActionFuture(); final var result = allocationService.reroute(clusterState, "test reroute", listener); - listener.result(); // ensures it completed successfully + safeGet(listener::result); // ensures it completed successfully return result; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 92ced07174c23..add0de1993233 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -48,6 +48,7 @@ import org.elasticsearch.client.internal.Requests; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -2298,6 +2299,20 @@ public static T safeGet(Future future) { } } + /** + * Call a {@link CheckedSupplier}, converting all exceptions into an {@link AssertionError}. Useful for avoiding + * try/catch boilerplate or cumbersome propagation of checked exceptions around something that should never throw. + * + * @return The value returned by the {@code supplier}. + */ + public static T safeGet(CheckedSupplier supplier) { + try { + return supplier.get(); + } catch (Exception e) { + return fail(e); + } + } + /** * Wait for the exceptional completion of the given {@link SubscribableListener}, with a timeout of {@link #SAFE_AWAIT_TIMEOUT}, * preserving the thread's interrupt status flag and converting a successful completion, interrupt or timeout into an {@link diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/ProgressListenableActionFuture.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/ProgressListenableActionFuture.java index 00cc9554a64eb..c85dc46d5d8e9 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/ProgressListenableActionFuture.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/ProgressListenableActionFuture.java @@ -12,12 +12,13 @@ import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.core.Nullable; import java.util.ArrayList; import java.util.List; +import java.util.concurrent.ExecutionException; import java.util.function.LongConsumer; -import java.util.function.Supplier; /** * An {@link ActionFuture} that listeners can be attached to. Listeners are executed when the future is completed @@ -200,7 +201,23 @@ public void addListener(ActionListener listener, long value) { assert invariant(); } - private static void executeListener(final ActionListener listener, final Supplier result) { + /** + * Return the result of this future, if it has been completed successfully, or unwrap and throw the exception with which it was + * completed exceptionally. It is not valid to call this method if the future is incomplete. + */ + private Long actionResult() throws Exception { + try { + return result(); + } catch (ExecutionException e) { + if (e.getCause() instanceof Exception exCause) { + throw exCause; + } else { + throw e; + } + } + } + + private static void executeListener(final ActionListener listener, final CheckedSupplier result) { try { listener.onResponse(result.get()); } catch (Exception e) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/SecondaryAuthenticatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/SecondaryAuthenticatorTests.java index 0b29b46b19b36..f26cd59f7532c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/SecondaryAuthenticatorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/SecondaryAuthenticatorTests.java @@ -67,6 +67,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; @@ -192,7 +193,11 @@ public void testAuthenticateTransportRequestFailsIfHeaderHasUnrecognizedCredenti final PlainActionFuture future = new PlainActionFuture<>(); authenticator.authenticate(AuthenticateAction.NAME, request, future); - final ElasticsearchSecurityException ex = expectThrows(ElasticsearchSecurityException.class, future::actionResult); + final ElasticsearchSecurityException ex = expectThrows( + ExecutionException.class, + ElasticsearchSecurityException.class, + future::result + ); assertThat(ex, TestMatchers.throwableWithMessage(Matchers.containsString("secondary user"))); assertThat(ex.getCause(), TestMatchers.throwableWithMessage(Matchers.containsString("credentials"))); } @@ -203,7 +208,11 @@ public void testAuthenticateRestRequestFailsIfHeaderHasUnrecognizedCredentials() final PlainActionFuture future = new PlainActionFuture<>(); authenticator.authenticateAndAttachToContext(request, future); - final ElasticsearchSecurityException ex = expectThrows(ElasticsearchSecurityException.class, future::actionResult); + final ElasticsearchSecurityException ex = expectThrows( + ExecutionException.class, + ElasticsearchSecurityException.class, + future::result + ); assertThat(ex, TestMatchers.throwableWithMessage(Matchers.containsString("secondary user"))); assertThat(ex.getCause(), TestMatchers.throwableWithMessage(Matchers.containsString("credentials"))); @@ -287,7 +296,11 @@ private void assertAuthenticateWithIncorrectPassword(Consumer future = new PlainActionFuture<>(); authenticator.authenticate(AuthenticateAction.NAME, request, future); - final SecondaryAuthentication secondaryAuthentication = future.actionResult(); + final SecondaryAuthentication secondaryAuthentication = future.result(); assertThat(secondaryAuthentication, Matchers.notNullValue()); assertThat(secondaryAuthentication.getAuthentication(), Matchers.notNullValue()); assertThat(secondaryAuthentication.getAuthentication().getEffectiveSubject().getUser(), equalTo(user)); From e215a2a0764a9fed235e91212417e8b5e1cdc4ce Mon Sep 17 00:00:00 2001 From: Mikhail Berezovskiy Date: Wed, 3 Jul 2024 20:47:42 -0700 Subject: [PATCH 176/216] remove security manager supression (#110447) Follow for #110358. We don't need security suppression after Netty's SelfSignCertificate removal. --- .../SecurityNetty4HttpServerTransportCloseNotifyTests.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportCloseNotifyTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportCloseNotifyTests.java index 0ac6ddc8245a1..ec2881b989d0b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportCloseNotifyTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportCloseNotifyTests.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.http.AbstractHttpServerTransportTestCase; import org.elasticsearch.http.HttpServerTransport; @@ -40,7 +39,6 @@ import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.telemetry.tracing.Tracer; -import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.netty4.SharedGroupFactory; @@ -62,8 +60,6 @@ import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForNodePEMFiles; -@ESTestCase.WithoutSecurityManager -@SuppressForbidden(reason = "requires java.io.File for netty self-signed certificate") public class SecurityNetty4HttpServerTransportCloseNotifyTests extends AbstractHttpServerTransportTestCase { private static T safePoll(BlockingQueue queue) { From 6eaf1714110a90194b35732df6ebfcffc7b152c9 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 4 Jul 2024 09:20:19 +0200 Subject: [PATCH 177/216] Add some information about the impact of index.codec setting. (#110413) --- docs/reference/index-modules.asciidoc | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/reference/index-modules.asciidoc b/docs/reference/index-modules.asciidoc index 40b4ff4bb9dc8..04bebfae2763b 100644 --- a/docs/reference/index-modules.asciidoc +++ b/docs/reference/index-modules.asciidoc @@ -80,7 +80,9 @@ breaking change]. compression ratio, at the expense of slower stored fields performance. If you are updating the compression type, the new one will be applied after segments are merged. Segment merging can be forced using - <>. + <>. Experiments with indexing log datasets + have shown that `best_compression` gives up to ~18% lower storage usage + compared to `default` while only minimally affecting indexing throughput (~2%). [[index-mode-setting]] `index.mode`:: + From 87d51181c928efba9b1a3a487c77534102a61082 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Thu, 4 Jul 2024 08:52:38 +0100 Subject: [PATCH 178/216] Add some specific unit tests for ReservedClusterState methods (#110436) --- .../ReservedClusterStateServiceTests.java | 74 +++++++++++++++++++ 1 file changed, 74 insertions(+) diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedClusterStateServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedClusterStateServiceTests.java index 5d675b99ba9ab..db8af818f1c52 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedClusterStateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedClusterStateServiceTests.java @@ -34,6 +34,8 @@ import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.junit.Assert; +import org.mockito.ArgumentCaptor; +import org.mockito.ArgumentMatchers; import java.io.IOException; import java.util.ArrayList; @@ -54,17 +56,21 @@ import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.startsWith; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; public class ReservedClusterStateServiceTests extends ESTestCase { @@ -140,6 +146,32 @@ public void testOperatorController() throws IOException { } } + public void testInitEmptyTask() { + ClusterService clusterService = mock(ClusterService.class); + + ArgumentCaptor updateTask = ArgumentCaptor.captor(); + + // grab the update task when it gets given to us + when(clusterService.createTaskQueue(ArgumentMatchers.contains("reserved state update"), any(), any())).thenAnswer(i -> { + @SuppressWarnings("unchecked") + MasterServiceTaskQueue queue = mock(MasterServiceTaskQueue.class); + doNothing().when(queue).submitTask(any(), updateTask.capture(), any()); + return queue; + }); + + ReservedClusterStateService service = new ReservedClusterStateService(clusterService, mock(RerouteService.class), List.of()); + service.initEmpty("namespace", ActionListener.noop()); + + assertThat(updateTask.getValue(), notNullValue()); + ClusterState state = ClusterState.builder(new ClusterName("test")).build(); + ClusterState updatedState = updateTask.getValue().execute(state); + + assertThat( + updatedState.metadata().reservedStateMetadata(), + equalTo(Map.of("namespace", new ReservedStateMetadata("namespace", ReservedStateMetadata.EMPTY_VERSION, Map.of(), null))) + ); + } + public void testUpdateStateTasks() throws Exception { RerouteService rerouteService = mock(RerouteService.class); @@ -196,6 +228,48 @@ public Releasable captureResponseHeaders() { verify(rerouteService, times(1)).reroute(anyString(), any(), any()); } + public void testUpdateErrorState() { + ClusterService clusterService = mock(ClusterService.class); + ClusterState state = ClusterState.builder(new ClusterName("test")).build(); + + ArgumentCaptor updateTask = ArgumentCaptor.captor(); + @SuppressWarnings("unchecked") + MasterServiceTaskQueue errorQueue = mock(MasterServiceTaskQueue.class); + doNothing().when(errorQueue).submitTask(any(), updateTask.capture(), any()); + + // grab the update task when it gets given to us + when(clusterService.createTaskQueue(ArgumentMatchers.contains("reserved state error"), any(), any())) + .thenReturn(errorQueue); + when(clusterService.state()).thenReturn(state); + + ReservedClusterStateService service = new ReservedClusterStateService(clusterService, mock(RerouteService.class), List.of()); + + ErrorState error = new ErrorState("namespace", 2L, List.of("error"), ReservedStateErrorMetadata.ErrorKind.TRANSIENT); + service.updateErrorState(error); + + assertThat(updateTask.getValue(), notNullValue()); + verify(errorQueue).submitTask(any(), any(), any()); + + ClusterState updatedState = updateTask.getValue().execute(state); + assertThat( + updatedState.metadata().reservedStateMetadata().get("namespace"), + equalTo( + new ReservedStateMetadata( + "namespace", + ReservedStateMetadata.NO_VERSION, + Map.of(), + new ReservedStateErrorMetadata(2L, ReservedStateErrorMetadata.ErrorKind.TRANSIENT, List.of("error")) + ) + ) + ); + + // it should not update if the error version is less than the current version + when(clusterService.state()).thenReturn(updatedState); + ErrorState oldError = new ErrorState("namespace", 1L, List.of("old error"), ReservedStateErrorMetadata.ErrorKind.TRANSIENT); + service.updateErrorState(oldError); + verifyNoMoreInteractions(errorQueue); + } + public void testErrorStateTask() throws Exception { ClusterState state = ClusterState.builder(new ClusterName("test")).build(); From 9867eed831076eae3eba1ad36db464c17d04571f Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Thu, 4 Jul 2024 10:04:25 +0200 Subject: [PATCH 179/216] [Inference API] Make error message in AbstractBWCWireSerializationTestCase more explicit, that the number refers to the TransportVersion (#110433) --- .../core/ml/AbstractBWCWireSerializationTestCase.java | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/AbstractBWCWireSerializationTestCase.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/AbstractBWCWireSerializationTestCase.java index e9a5b08f8051d..2098a7ff904a1 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/AbstractBWCWireSerializationTestCase.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/AbstractBWCWireSerializationTestCase.java @@ -9,6 +9,7 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Strings; import org.elasticsearch.test.AbstractWireSerializingTestCase; import java.io.IOException; @@ -66,8 +67,10 @@ protected final void assertBwcSerialization(T testInstance, TransportVersion ver * @param version The version which serialized */ protected void assertOnBWCObject(T bwcSerializedObject, T testInstance, TransportVersion version) { - assertNotSame(version.toString(), bwcSerializedObject, testInstance); - assertEquals(version.toString(), bwcSerializedObject, testInstance); - assertEquals(version.toString(), bwcSerializedObject.hashCode(), testInstance.hashCode()); + var errorMessage = Strings.format("Failed for TransportVersion [%s]", version.toString()); + + assertNotSame(errorMessage, bwcSerializedObject, testInstance); + assertEquals(errorMessage, bwcSerializedObject, testInstance); + assertEquals(errorMessage, bwcSerializedObject.hashCode(), testInstance.hashCode()); } } From 12272b14d8776b2efd1fcc1d7952718dd5872f1e Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Thu, 4 Jul 2024 10:04:40 +0200 Subject: [PATCH 180/216] [Inference API] Use projectId for Google Vertex AI embeddings rate limit grouping (#110365) --- .../http/sender/GoogleVertexAiEmbeddingsRequestManager.java | 4 ++-- .../GoogleVertexAiEmbeddingsRateLimitServiceSettings.java | 2 +- .../embeddings/GoogleVertexAiEmbeddingsServiceSettings.java | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleVertexAiEmbeddingsRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleVertexAiEmbeddingsRequestManager.java index 7a9fcff2dc276..c682da9a1694a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleVertexAiEmbeddingsRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/GoogleVertexAiEmbeddingsRequestManager.java @@ -46,11 +46,11 @@ public GoogleVertexAiEmbeddingsRequestManager(GoogleVertexAiEmbeddingsModel mode this.truncator = Objects.requireNonNull(truncator); } - record RateLimitGrouping(int modelIdHash) { + record RateLimitGrouping(int projectIdHash) { public static RateLimitGrouping of(GoogleVertexAiEmbeddingsModel model) { Objects.requireNonNull(model); - return new RateLimitGrouping(model.rateLimitServiceSettings().modelId().hashCode()); + return new RateLimitGrouping(model.rateLimitServiceSettings().projectId().hashCode()); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsRateLimitServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsRateLimitServiceSettings.java index 7e1e0056de2b5..a95860b1793d5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsRateLimitServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsRateLimitServiceSettings.java @@ -11,5 +11,5 @@ public interface GoogleVertexAiEmbeddingsRateLimitServiceSettings extends GoogleVertexAiRateLimitServiceSettings { - String modelId(); + String projectId(); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsServiceSettings.java index ce7dc2726545f..f4bf40d290399 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsServiceSettings.java @@ -155,6 +155,7 @@ public GoogleVertexAiEmbeddingsServiceSettings(StreamInput in) throws IOExceptio this.rateLimitSettings = new RateLimitSettings(in); } + @Override public String projectId() { return projectId; } @@ -163,7 +164,6 @@ public String location() { return location; } - @Override public String modelId() { return modelId; } From c5eb558371d61a0149f57363b25fdc11aadfa90c Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 4 Jul 2024 09:10:43 +0000 Subject: [PATCH 181/216] Bump to version 8.16.0 --- .backportrc.json | 4 +- .buildkite/pipelines/intake.yml | 2 +- .buildkite/pipelines/periodic-packaging.yml | 17 +++++ .buildkite/pipelines/periodic.yml | 24 ++++++- .ci/bwcVersions | 1 + .ci/snapshotBwcVersions | 1 + build-tools-internal/version.properties | 2 +- docs/reference/migration/index.asciidoc | 2 + .../reference/migration/migrate_8_16.asciidoc | 20 ++++++ docs/reference/release-notes.asciidoc | 4 ++ docs/reference/release-notes/8.16.0.asciidoc | 8 +++ .../release-notes/highlights.asciidoc | 62 +++---------------- .../main/java/org/elasticsearch/Version.java | 3 +- 13 files changed, 89 insertions(+), 61 deletions(-) create mode 100644 docs/reference/migration/migrate_8_16.asciidoc create mode 100644 docs/reference/release-notes/8.16.0.asciidoc diff --git a/.backportrc.json b/.backportrc.json index 59843f4d5f134..77b06cd419275 100644 --- a/.backportrc.json +++ b/.backportrc.json @@ -1,9 +1,9 @@ { "upstream" : "elastic/elasticsearch", - "targetBranchChoices" : [ "main", "8.14", "8.13", "8.12", "8.11", "8.10", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ], + "targetBranchChoices" : [ "main", "8.15", "8.14", "8.13", "8.12", "8.11", "8.10", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ], "targetPRLabels" : [ "backport" ], "branchLabelMapping" : { - "^v8.15.0$" : "main", + "^v8.16.0$" : "main", "^v(\\d+).(\\d+).\\d+(?:-(?:alpha|beta|rc)\\d+)?$" : "$1.$2" } } \ No newline at end of file diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 4124d4e550d11..527a9fe1540f1 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -62,7 +62,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["7.17.23", "8.14.2", "8.15.0"] + BWC_VERSION: ["7.17.23", "8.14.2", "8.15.0", "8.16.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 4217fc91bf0fd..f7eb309ebfaca 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -611,6 +611,23 @@ steps: env: BWC_VERSION: 8.15.0 + - label: "{{matrix.image}} / 8.16.0 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.16.0 + timeout_in_minutes: 300 + matrix: + setup: + image: + - rocky-8 + - ubuntu-2004 + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + machineType: custom-16-32768 + buildDirectory: /dev/shm/bk + diskSizeGb: 250 + env: + BWC_VERSION: 8.16.0 + - group: packaging-tests-windows steps: - label: "{{matrix.image}} / packaging-tests-windows" diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 06e7ffbc8fb1c..253952826b8e7 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -682,6 +682,26 @@ steps: - signal_reason: agent_stop limit: 3 + - label: 8.16.0 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.16.0#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + diskSizeGb: 250 + env: + BWC_VERSION: 8.16.0 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + - label: concurrent-search-tests command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dtests.jvm.argline=-Des.concurrent_search=true -Des.concurrent_search=true functionalTests timeout_in_minutes: 420 @@ -751,7 +771,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk17 - BWC_VERSION: ["7.17.23", "8.14.2", "8.15.0"] + BWC_VERSION: ["7.17.23", "8.14.2", "8.15.0", "8.16.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -801,7 +821,7 @@ steps: - openjdk21 - openjdk22 - openjdk23 - BWC_VERSION: ["7.17.23", "8.14.2", "8.15.0"] + BWC_VERSION: ["7.17.23", "8.14.2", "8.15.0", "8.16.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index bce556e9fc352..833088dbd363a 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -33,3 +33,4 @@ BWC_VERSION: - "8.13.4" - "8.14.2" - "8.15.0" + - "8.16.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 5fc4b6c072899..893071c5b91f1 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -2,3 +2,4 @@ BWC_VERSION: - "7.17.23" - "8.14.2" - "8.15.0" + - "8.16.0" diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 0fa6142789381..728f44a365974 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,4 +1,4 @@ -elasticsearch = 8.15.0 +elasticsearch = 8.16.0 lucene = 9.11.1 bundled_jdk_vendor = openjdk diff --git a/docs/reference/migration/index.asciidoc b/docs/reference/migration/index.asciidoc index 51a2898b5d598..0690f60495c97 100644 --- a/docs/reference/migration/index.asciidoc +++ b/docs/reference/migration/index.asciidoc @@ -1,5 +1,6 @@ include::migration_intro.asciidoc[] +* <> * <> * <> * <> @@ -17,6 +18,7 @@ include::migration_intro.asciidoc[] * <> * <> +include::migrate_8_16.asciidoc[] include::migrate_8_15.asciidoc[] include::migrate_8_14.asciidoc[] include::migrate_8_13.asciidoc[] diff --git a/docs/reference/migration/migrate_8_16.asciidoc b/docs/reference/migration/migrate_8_16.asciidoc new file mode 100644 index 0000000000000..aea6322f292bf --- /dev/null +++ b/docs/reference/migration/migrate_8_16.asciidoc @@ -0,0 +1,20 @@ +[[migrating-8.16]] +== Migrating to 8.16 +++++ +8.16 +++++ + +This section discusses the changes that you need to be aware of when migrating +your application to {es} 8.16. + +See also <> and <>. + +coming::[8.16.0] + + +[discrete] +[[breaking-changes-8.16]] +=== Breaking changes + +There are no breaking changes in {es} 8.16. + diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc index 2e043834c9969..20889df0c58eb 100644 --- a/docs/reference/release-notes.asciidoc +++ b/docs/reference/release-notes.asciidoc @@ -6,7 +6,9 @@ This section summarizes the changes in each release. +* <> * <> +* <> * <> * <> * <> @@ -68,7 +70,9 @@ This section summarizes the changes in each release. -- +include::release-notes/8.16.0.asciidoc[] include::release-notes/8.15.0.asciidoc[] +include::release-notes/8.14.2.asciidoc[] include::release-notes/8.14.1.asciidoc[] include::release-notes/8.14.0.asciidoc[] include::release-notes/8.13.4.asciidoc[] diff --git a/docs/reference/release-notes/8.16.0.asciidoc b/docs/reference/release-notes/8.16.0.asciidoc new file mode 100644 index 0000000000000..7b2e7459be968 --- /dev/null +++ b/docs/reference/release-notes/8.16.0.asciidoc @@ -0,0 +1,8 @@ +[[release-notes-8.16.0]] +== {es} version 8.16.0 + +coming[8.16.0] + +Also see <>. + + diff --git a/docs/reference/release-notes/highlights.asciidoc b/docs/reference/release-notes/highlights.asciidoc index ead1596c64fdd..e70892ef25928 100644 --- a/docs/reference/release-notes/highlights.asciidoc +++ b/docs/reference/release-notes/highlights.asciidoc @@ -11,7 +11,8 @@ For detailed information about this release, see the <> and // Add previous release to the list Other versions: -{ref-bare}/8.14/release-highlights.html[8.14] +{ref-bare}/8.15/release-highlights.html[8.15] +| {ref-bare}/8.14/release-highlights.html[8.14] | {ref-bare}/8.13/release-highlights.html[8.13] | {ref-bare}/8.12/release-highlights.html[8.12] | {ref-bare}/8.11/release-highlights.html[8.11] @@ -29,60 +30,13 @@ Other versions: endif::[] +// The notable-highlights tag marks entries that +// should be featured in the Stack Installation and Upgrade Guide: // tag::notable-highlights[] - -[discrete] -[[stored_fields_are_compressed_with_zstandard_instead_of_lz4_deflate]] -=== Stored fields are now compressed with ZStandard instead of LZ4/DEFLATE -Stored fields are now compressed by splitting documents into blocks, which -are then compressed independently with ZStandard. `index.codec: default` -(default) uses blocks of at most 14kB or 128 documents compressed with level -0, while `index.codec: best_compression` uses blocks of at most 240kB or -2048 documents compressed at level 3. On most datasets that we tested -against, this yielded storage improvements in the order of 10%, slightly -faster indexing and similar retrieval latencies. - -{es-pull}103374[#103374] - +// [discrete] +// === Heading +// +// Description. // end::notable-highlights[] -[discrete] -[[new_custom_parser_for_iso_8601_datetimes]] -=== New custom parser for ISO-8601 datetimes -This introduces a new custom parser for ISO-8601 datetimes, for the `iso8601`, `strict_date_optional_time`, and -`strict_date_optional_time_nanos` built-in date formats. This provides a performance improvement over the -default Java date-time parsing. Whilst it maintains much of the same behaviour, -the new parser does not accept nonsensical date-time strings that have multiple fractional seconds fields -or multiple timezone specifiers. If the new parser fails to parse a string, it will then use the previous parser -to parse it. If a large proportion of the input data consists of these invalid strings, this may cause -a small performance degradation. If you wish to force the use of the old parsers regardless, -set the JVM property `es.datetime.java_time_parsers=true` on all ES nodes. - -{es-pull}106486[#106486] - -[discrete] -[[preview_support_for_connection_type_domain_isp_databases_in_geoip_processor]] -=== Preview: Support for the 'Connection Type, 'Domain', and 'ISP' databases in the geoip processor -As a Technical Preview, the {ref}/geoip-processor.html[`geoip`] processor can now use the commercial -https://dev.maxmind.com/geoip/docs/databases/connection-type[GeoIP2 'Connection Type'], -https://dev.maxmind.com/geoip/docs/databases/domain[GeoIP2 'Domain'], -and -https://dev.maxmind.com/geoip/docs/databases/isp[GeoIP2 'ISP'] -databases from MaxMind. - -{es-pull}108683[#108683] - -[discrete] -[[update_elasticsearch_to_lucene_9_11]] -=== Update Elasticsearch to Lucene 9.11 -Elasticsearch is now updated using the latest Lucene version 9.11. -Here are the full release notes: -But, here are some particular highlights: -- Usage of MADVISE for better memory management: https://github.com/apache/lucene/pull/13196 -- Use RWLock to access LRUQueryCache to reduce contention: https://github.com/apache/lucene/pull/13306 -- Speedup multi-segment HNSW graph search for nested kNN queries: https://github.com/apache/lucene/pull/13121 -- Add a MemorySegment Vector scorer - for scoring without copying on-heap vectors: https://github.com/apache/lucene/pull/13339 - -{es-pull}109219[#109219] - diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index b2c78453d9c75..bc1612f704c59 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -179,7 +179,8 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_14_1 = new Version(8_14_01_99); public static final Version V_8_14_2 = new Version(8_14_02_99); public static final Version V_8_15_0 = new Version(8_15_00_99); - public static final Version CURRENT = V_8_15_0; + public static final Version V_8_16_0 = new Version(8_16_00_99); + public static final Version CURRENT = V_8_16_0; private static final NavigableMap VERSION_IDS; private static final Map VERSION_STRINGS; From 4ecd5f1314d22afe80ab1eff5302f1e7a6399ca5 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 4 Jul 2024 09:46:58 +0000 Subject: [PATCH 182/216] Bump versions after 8.14.2 release --- .buildkite/pipelines/intake.yml | 2 +- .buildkite/pipelines/periodic-packaging.yml | 6 +++--- .buildkite/pipelines/periodic.yml | 10 +++++----- .ci/bwcVersions | 2 +- .ci/snapshotBwcVersions | 2 +- server/src/main/java/org/elasticsearch/Version.java | 1 + .../resources/org/elasticsearch/TransportVersions.csv | 1 + .../org/elasticsearch/index/IndexVersions.csv | 1 + 8 files changed, 14 insertions(+), 11 deletions(-) diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 527a9fe1540f1..c4ee846ba564f 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -62,7 +62,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["7.17.23", "8.14.2", "8.15.0", "8.16.0"] + BWC_VERSION: ["7.17.23", "8.14.3", "8.15.0", "8.16.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index f7eb309ebfaca..982c1f69856c0 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -577,8 +577,8 @@ steps: env: BWC_VERSION: 8.13.4 - - label: "{{matrix.image}} / 8.14.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.14.2 + - label: "{{matrix.image}} / 8.14.3 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.14.3 timeout_in_minutes: 300 matrix: setup: @@ -592,7 +592,7 @@ steps: buildDirectory: /dev/shm/bk diskSizeGb: 250 env: - BWC_VERSION: 8.14.2 + BWC_VERSION: 8.14.3 - label: "{{matrix.image}} / 8.15.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.15.0 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 253952826b8e7..5bc33433bbc72 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -642,8 +642,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.14.2 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.14.2#bwcTest + - label: 8.14.3 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.14.3#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -653,7 +653,7 @@ steps: preemptible: true diskSizeGb: 250 env: - BWC_VERSION: 8.14.2 + BWC_VERSION: 8.14.3 retry: automatic: - exit_status: "-1" @@ -771,7 +771,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk17 - BWC_VERSION: ["7.17.23", "8.14.2", "8.15.0", "8.16.0"] + BWC_VERSION: ["7.17.23", "8.14.3", "8.15.0", "8.16.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -821,7 +821,7 @@ steps: - openjdk21 - openjdk22 - openjdk23 - BWC_VERSION: ["7.17.23", "8.14.2", "8.15.0", "8.16.0"] + BWC_VERSION: ["7.17.23", "8.14.3", "8.15.0", "8.16.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 833088dbd363a..9de7dbfa2a5c2 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -31,6 +31,6 @@ BWC_VERSION: - "8.11.4" - "8.12.2" - "8.13.4" - - "8.14.2" + - "8.14.3" - "8.15.0" - "8.16.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 893071c5b91f1..90a3dcba977c8 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,5 +1,5 @@ BWC_VERSION: - "7.17.23" - - "8.14.2" + - "8.14.3" - "8.15.0" - "8.16.0" diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index bc1612f704c59..00ffcdd0f4d9e 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -178,6 +178,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_14_0 = new Version(8_14_00_99); public static final Version V_8_14_1 = new Version(8_14_01_99); public static final Version V_8_14_2 = new Version(8_14_02_99); + public static final Version V_8_14_3 = new Version(8_14_03_99); public static final Version V_8_15_0 = new Version(8_15_00_99); public static final Version V_8_16_0 = new Version(8_16_00_99); public static final Version CURRENT = V_8_16_0; diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index ba1dab5589ee2..5f1972e30198a 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -123,3 +123,4 @@ 8.13.4,8595001 8.14.0,8636001 8.14.1,8636001 +8.14.2,8636001 diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index b7ca55a2b2b0d..d1116ddf99ee7 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -123,3 +123,4 @@ 8.13.4,8503000 8.14.0,8505000 8.14.1,8505000 +8.14.2,8505000 From caebbac3f9bc3fc92f27003d809886bad6011d5d Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 4 Jul 2024 09:48:23 +0000 Subject: [PATCH 183/216] Prune changelogs after 8.14.2 release --- docs/changelog/106253.yaml | 6 ------ docs/changelog/109341.yaml | 5 ----- docs/changelog/109492.yaml | 5 ----- docs/changelog/109500.yaml | 5 ----- docs/changelog/109533.yaml | 5 ----- docs/changelog/109629.yaml | 5 ----- docs/changelog/109632.yaml | 5 ----- docs/changelog/109636.yaml | 5 ----- docs/changelog/109695.yaml | 5 ----- docs/changelog/109824.yaml | 6 ------ docs/changelog/110035.yaml | 5 ----- docs/changelog/110103.yaml | 5 ----- 12 files changed, 62 deletions(-) delete mode 100644 docs/changelog/106253.yaml delete mode 100644 docs/changelog/109341.yaml delete mode 100644 docs/changelog/109492.yaml delete mode 100644 docs/changelog/109500.yaml delete mode 100644 docs/changelog/109533.yaml delete mode 100644 docs/changelog/109629.yaml delete mode 100644 docs/changelog/109632.yaml delete mode 100644 docs/changelog/109636.yaml delete mode 100644 docs/changelog/109695.yaml delete mode 100644 docs/changelog/109824.yaml delete mode 100644 docs/changelog/110035.yaml delete mode 100644 docs/changelog/110103.yaml diff --git a/docs/changelog/106253.yaml b/docs/changelog/106253.yaml deleted file mode 100644 index b80cda37f63c7..0000000000000 --- a/docs/changelog/106253.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106253 -summary: Fix for from parameter when using `sub_searches` and rank -area: Ranking -type: bug -issues: - - 99011 diff --git a/docs/changelog/109341.yaml b/docs/changelog/109341.yaml deleted file mode 100644 index 0c1eaa98a8aa2..0000000000000 --- a/docs/changelog/109341.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 109341 -summary: Re-define `index.mapper.dynamic` setting in 8.x for a better 7.x to 8.x upgrade if this setting is used. -area: Mapping -type: bug -issues: [] diff --git a/docs/changelog/109492.yaml b/docs/changelog/109492.yaml deleted file mode 100644 index d4d1e83eb7786..0000000000000 --- a/docs/changelog/109492.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 109492 -summary: Add hexstring support byte painless scorers -area: Search -type: bug -issues: [] diff --git a/docs/changelog/109500.yaml b/docs/changelog/109500.yaml deleted file mode 100644 index cfd6bc770d5d6..0000000000000 --- a/docs/changelog/109500.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 109500 -summary: Guard file settings readiness on file settings support -area: Infra/Settings -type: bug -issues: [] diff --git a/docs/changelog/109533.yaml b/docs/changelog/109533.yaml deleted file mode 100644 index 5720410e5f370..0000000000000 --- a/docs/changelog/109533.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 109533 -summary: Fix IndexOutOfBoundsException during inference -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/109629.yaml b/docs/changelog/109629.yaml deleted file mode 100644 index c468388117b72..0000000000000 --- a/docs/changelog/109629.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 109629 -summary: "[Data streams] Fix the description of the lazy rollover task" -area: Data streams -type: bug -issues: [] diff --git a/docs/changelog/109632.yaml b/docs/changelog/109632.yaml deleted file mode 100644 index 6b04160bbdbec..0000000000000 --- a/docs/changelog/109632.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 109632 -summary: Force execute inactive sink reaper -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/109636.yaml b/docs/changelog/109636.yaml deleted file mode 100644 index f8f73a75dfd3d..0000000000000 --- a/docs/changelog/109636.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 109636 -summary: "Ensure a lazy rollover request will rollover the target data stream once." -area: Data streams -type: bug -issues: [] diff --git a/docs/changelog/109695.yaml b/docs/changelog/109695.yaml deleted file mode 100644 index f922b76412676..0000000000000 --- a/docs/changelog/109695.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 109695 -summary: Fix ESQL cancellation for exchange requests -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/109824.yaml b/docs/changelog/109824.yaml deleted file mode 100644 index 987e8c0a8b1a2..0000000000000 --- a/docs/changelog/109824.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 109824 -summary: Check array size before returning array item in script doc values -area: Infra/Scripting -type: bug -issues: - - 104998 diff --git a/docs/changelog/110035.yaml b/docs/changelog/110035.yaml deleted file mode 100644 index 670c58240d835..0000000000000 --- a/docs/changelog/110035.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 110035 -summary: Fix equals and hashcode for `SingleValueQuery.LuceneQuery` -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/110103.yaml b/docs/changelog/110103.yaml deleted file mode 100644 index 9f613ec2b446e..0000000000000 --- a/docs/changelog/110103.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 110103 -summary: Fix automatic tracking of collapse with `docvalue_fields` -area: Search -type: bug -issues: [] From ffea002a99554e72bfe0954d838857d523935942 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Thu, 4 Jul 2024 13:10:00 +0200 Subject: [PATCH 184/216] [DOCS] Adds 8.14.2 release notes to main. (#110471) --- docs/reference/release-notes/8.14.2.asciidoc | 38 ++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 docs/reference/release-notes/8.14.2.asciidoc diff --git a/docs/reference/release-notes/8.14.2.asciidoc b/docs/reference/release-notes/8.14.2.asciidoc new file mode 100644 index 0000000000000..2bb374451b2ac --- /dev/null +++ b/docs/reference/release-notes/8.14.2.asciidoc @@ -0,0 +1,38 @@ +[[release-notes-8.14.2]] +== {es} version 8.14.2 + +coming[8.14.2] + +Also see <>. + +[[bug-8.14.2]] +[float] +=== Bug fixes + +Data streams:: +* Ensure a lazy rollover request will rollover the target data stream once. {es-pull}109636[#109636] +* [Data streams] Fix the description of the lazy rollover task {es-pull}109629[#109629] + +ES|QL:: +* Fix ESQL cancellation for exchange requests {es-pull}109695[#109695] +* Fix equals and hashcode for `SingleValueQuery.LuceneQuery` {es-pull}110035[#110035] +* Force execute inactive sink reaper {es-pull}109632[#109632] + +Infra/Scripting:: +* Check array size before returning array item in script doc values {es-pull}109824[#109824] (issue: {es-issue}104998[#104998]) + +Infra/Settings:: +* Guard file settings readiness on file settings support {es-pull}109500[#109500] + +Machine Learning:: +* Fix IndexOutOfBoundsException during inference {es-pull}109533[#109533] + +Mapping:: +* Re-define `index.mapper.dynamic` setting in 8.x for a better 7.x to 8.x upgrade if this setting is used. {es-pull}109341[#109341] + +Ranking:: +* Fix for from parameter when using `sub_searches` and rank {es-pull}106253[#106253] (issue: {es-issue}99011[#99011]) + +Search:: +* Add hexstring support byte painless scorers {es-pull}109492[#109492] +* Fix automatic tracking of collapse with `docvalue_fields` {es-pull}110103[#110103] \ No newline at end of file From c62994710cb8c12de62152a02cd89557a43b217a Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 4 Jul 2024 15:16:05 +0200 Subject: [PATCH 185/216] Update replicas for downsample index only when necessary (#110467) The number of replicas for the downsample index gets set to 0 by default (overridable via setting) and later incremented to a higher value. This is done unconditionally, but in reality if the downsample index already has replicas, we should not override its number of replicas. Closes #109968 --- .../downsample/TransportDownsampleAction.java | 44 ++++++++++++------- 1 file changed, 29 insertions(+), 15 deletions(-) diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java index 66511f2cc15f0..abf629dc9c1fa 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java @@ -341,10 +341,22 @@ protected void masterOperation( delegate.onFailure(e); return; } + + /* + * When creating the downsample index, we copy the index.number_of_shards from source index, + * and we set the index.number_of_replicas to 0, to avoid replicating the index being built. + * Also, we set the index.refresh_interval to -1. + * We will set the correct number of replicas and refresh the index later. + * + * We should note that there is a risk of losing a node during the downsample process. In this + * case downsample will fail. + */ + int minNumReplicas = clusterService.getSettings().getAsInt(Downsample.DOWNSAMPLE_MIN_NUMBER_OF_REPLICAS_NAME, 0); + // 3. Create downsample index createDownsampleIndex( - clusterService.getSettings(), downsampleIndexName, + minNumReplicas, sourceIndexMetadata, mapping, request, @@ -353,6 +365,7 @@ protected void masterOperation( performShardDownsampling( request, delegate, + minNumReplicas, sourceIndexMetadata, downsampleIndexName, parentTask, @@ -382,6 +395,7 @@ protected void masterOperation( performShardDownsampling( request, delegate, + minNumReplicas, sourceIndexMetadata, downsampleIndexName, parentTask, @@ -451,6 +465,7 @@ private boolean canShortCircuit( private void performShardDownsampling( DownsampleAction.Request request, ActionListener listener, + int minNumReplicas, IndexMetadata sourceIndexMetadata, String downsampleIndexName, TaskId parentTask, @@ -509,7 +524,15 @@ public void onResponse(PersistentTasksCustomMetadata.PersistentTask listener, + int minNumReplicas, final IndexMetadata sourceIndexMetadata, final String downsampleIndexName, final TaskId parentTask, @@ -564,7 +588,7 @@ private void updateTargetIndexSettingStep( // 4. Make downsample index read-only and set the correct number of replicas final Settings.Builder settings = Settings.builder().put(IndexMetadata.SETTING_BLOCKS_WRITE, true); // Number of replicas had been previously set to 0 to speed up index population - if (sourceIndexMetadata.getNumberOfReplicas() > 0) { + if (sourceIndexMetadata.getNumberOfReplicas() > 0 && minNumReplicas == 0) { settings.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, sourceIndexMetadata.getNumberOfReplicas()); } // Setting index.hidden has been initially set to true. We revert this to the value of the @@ -842,28 +866,18 @@ private static void addDynamicTemplates(final XContentBuilder builder) throws IO } private void createDownsampleIndex( - Settings settings, String downsampleIndexName, + int minNumReplicas, IndexMetadata sourceIndexMetadata, String mapping, DownsampleAction.Request request, ActionListener listener ) { - /* - * When creating the downsample index, we copy the index.number_of_shards from source index, - * and we set the index.number_of_replicas to 0, to avoid replicating the index being built. - * Also, we set the index.refresh_interval to -1. - * We will set the correct number of replicas and refresh the index later. - * - * We should note that there is a risk of losing a node during the downsample process. In this - * case downsample will fail. - */ - int numberOfReplicas = settings.getAsInt(Downsample.DOWNSAMPLE_MIN_NUMBER_OF_REPLICAS_NAME, 0); var downsampleInterval = request.getDownsampleConfig().getInterval().toString(); Settings.Builder builder = Settings.builder() .put(IndexMetadata.SETTING_INDEX_HIDDEN, true) .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, sourceIndexMetadata.getNumberOfShards()) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, String.valueOf(numberOfReplicas)) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, minNumReplicas) .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "-1") .put(IndexMetadata.INDEX_DOWNSAMPLE_STATUS.getKey(), DownsampleTaskStatus.STARTED) .put(IndexMetadata.INDEX_DOWNSAMPLE_INTERVAL.getKey(), downsampleInterval) From f3c811c73990b50bb230bd38a4ec05e7cd36419f Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Thu, 4 Jul 2024 15:53:26 +0200 Subject: [PATCH 186/216] [Inference API] Use extractOptionalPositiveInteger instead of removeAsType in AzureAiStudioEmbeddingsServiceSettings (#110366) --- .../org/elasticsearch/test/ESTestCase.java | 7 ++ .../test/test/ESTestCaseTests.java | 5 ++ ...zureAiStudioEmbeddingsServiceSettings.java | 13 ++- ...iStudioEmbeddingsServiceSettingsTests.java | 86 +++++++++++++++++++ 4 files changed, 107 insertions(+), 4 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index add0de1993233..b8860690fffc4 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -999,6 +999,13 @@ public static int randomNonNegativeInt() { return randomInt() & Integer.MAX_VALUE; } + /** + * @return an int between Integer.MIN_VALUE and -1 (inclusive) chosen uniformly at random. + */ + public static int randomNegativeInt() { + return randomInt() | Integer.MIN_VALUE; + } + public static float randomFloat() { return random().nextFloat(); } diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/ESTestCaseTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/ESTestCaseTests.java index 125c0563577fc..714c9bcde0469 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/ESTestCaseTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/ESTestCaseTests.java @@ -45,6 +45,7 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; @@ -185,6 +186,10 @@ public void testRandomNonNegativeInt() { assertThat(randomNonNegativeInt(), greaterThanOrEqualTo(0)); } + public void testRandomNegativeInt() { + assertThat(randomNegativeInt(), lessThan(0)); + } + public void testRandomValueOtherThan() { // "normal" way of calling where the value is not null int bad = randomInt(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/embeddings/AzureAiStudioEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/embeddings/AzureAiStudioEmbeddingsServiceSettings.java index 1a39cd67a70f3..d4a1fd938625e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/embeddings/AzureAiStudioEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/embeddings/AzureAiStudioEmbeddingsServiceSettings.java @@ -33,8 +33,8 @@ import static org.elasticsearch.xpack.inference.services.ServiceFields.MAX_INPUT_TOKENS; import static org.elasticsearch.xpack.inference.services.ServiceFields.SIMILARITY; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalBoolean; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalPositiveInteger; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractSimilarity; -import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeAsType; public class AzureAiStudioEmbeddingsServiceSettings extends AzureAiStudioServiceSettings { @@ -59,10 +59,15 @@ private static AzureAiStudioEmbeddingCommonFields embeddingSettingsFromMap( ConfigurationParseContext context ) { var baseSettings = AzureAiStudioServiceSettings.fromMap(map, validationException, context); - SimilarityMeasure similarity = extractSimilarity(map, ModelConfigurations.SERVICE_SETTINGS, validationException); - Integer dims = removeAsType(map, DIMENSIONS, Integer.class); - Integer maxTokens = removeAsType(map, MAX_INPUT_TOKENS, Integer.class); + SimilarityMeasure similarity = extractSimilarity(map, ModelConfigurations.SERVICE_SETTINGS, validationException); + Integer dims = extractOptionalPositiveInteger(map, DIMENSIONS, ModelConfigurations.SERVICE_SETTINGS, validationException); + Integer maxTokens = extractOptionalPositiveInteger( + map, + MAX_INPUT_TOKENS, + ModelConfigurations.SERVICE_SETTINGS, + validationException + ); Boolean dimensionsSetByUser = extractOptionalBoolean(map, DIMENSIONS_SET_BY_USER, validationException); switch (context) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/embeddings/AzureAiStudioEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/embeddings/AzureAiStudioEmbeddingsServiceSettingsTests.java index 05388192b2f14..c857a22e52996 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/embeddings/AzureAiStudioEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/embeddings/AzureAiStudioEmbeddingsServiceSettingsTests.java @@ -170,6 +170,92 @@ public void testFromMap_Persistent_CreatesSettingsCorrectly() { ); } + public void testFromMap_ThrowsException_WhenDimensionsAreZero() { + var target = "http://sometarget.local"; + var provider = "openai"; + var endpointType = "token"; + var dimensions = 0; + + var settingsMap = createRequestSettingsMap(target, provider, endpointType, dimensions, true, null, SimilarityMeasure.COSINE); + + var thrownException = expectThrows( + ValidationException.class, + () -> AzureAiStudioEmbeddingsServiceSettings.fromMap(settingsMap, ConfigurationParseContext.REQUEST) + ); + + assertThat( + thrownException.getMessage(), + containsString("Validation Failed: 1: [service_settings] Invalid value [0]. [dimensions] must be a positive integer;") + ); + } + + public void testFromMap_ThrowsException_WhenDimensionsAreNegative() { + var target = "http://sometarget.local"; + var provider = "openai"; + var endpointType = "token"; + var dimensions = randomNegativeInt(); + + var settingsMap = createRequestSettingsMap(target, provider, endpointType, dimensions, true, null, SimilarityMeasure.COSINE); + + var thrownException = expectThrows( + ValidationException.class, + () -> AzureAiStudioEmbeddingsServiceSettings.fromMap(settingsMap, ConfigurationParseContext.REQUEST) + ); + + assertThat( + thrownException.getMessage(), + containsString( + Strings.format( + "Validation Failed: 1: [service_settings] Invalid value [%d]. [dimensions] must be a positive integer;", + dimensions + ) + ) + ); + } + + public void testFromMap_ThrowsException_WhenMaxInputTokensAreZero() { + var target = "http://sometarget.local"; + var provider = "openai"; + var endpointType = "token"; + var maxInputTokens = 0; + + var settingsMap = createRequestSettingsMap(target, provider, endpointType, null, true, maxInputTokens, SimilarityMeasure.COSINE); + + var thrownException = expectThrows( + ValidationException.class, + () -> AzureAiStudioEmbeddingsServiceSettings.fromMap(settingsMap, ConfigurationParseContext.REQUEST) + ); + + assertThat( + thrownException.getMessage(), + containsString("Validation Failed: 1: [service_settings] Invalid value [0]. [max_input_tokens] must be a positive integer;") + ); + } + + public void testFromMap_ThrowsException_WhenMaxInputTokensAreNegative() { + var target = "http://sometarget.local"; + var provider = "openai"; + var endpointType = "token"; + var maxInputTokens = randomNegativeInt(); + + var settingsMap = createRequestSettingsMap(target, provider, endpointType, null, true, maxInputTokens, SimilarityMeasure.COSINE); + + var thrownException = expectThrows( + ValidationException.class, + () -> AzureAiStudioEmbeddingsServiceSettings.fromMap(settingsMap, ConfigurationParseContext.REQUEST) + ); + + assertThat( + thrownException.getMessage(), + containsString( + Strings.format( + "Validation Failed: 1: [service_settings] Invalid value [%d]. [max_input_tokens] must be a positive integer;", + maxInputTokens + ) + ) + ); + } + public void testFromMap_PersistentContext_DoesNotThrowException_WhenDimensionsIsNull() { var target = "http://sometarget.local"; var provider = "openai"; From 8b7d83318177cb72150bbf41114320998ec7b244 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Iv=C3=A1n=20Cea=20Fontenla?= Date: Thu, 4 Jul 2024 16:37:51 +0200 Subject: [PATCH 187/216] ESQL: Add tests to call aggregation intermediate states (#110279) Test aggregations intermediate states on base aggregation test class. Added another "middleware" to add "no rows" test cases. --- .../function/AbstractAggregationTestCase.java | 145 ++++++++++++++++-- .../function/AbstractFunctionTestCase.java | 39 +++-- .../expression/function/TestCaseSupplier.java | 4 +- 3 files changed, 160 insertions(+), 28 deletions(-) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java index 05a6cec51284f..e20b9a987f5ef 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java @@ -23,7 +23,10 @@ import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.planner.ToAggregator; +import java.util.ArrayList; +import java.util.HashSet; import java.util.List; +import java.util.Set; import java.util.function.Consumer; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -31,8 +34,11 @@ import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.oneOf; /** * Base class for aggregation tests. @@ -47,7 +53,43 @@ public abstract class AbstractAggregationTestCase extends AbstractFunctionTestCa */ protected static Iterable parameterSuppliersFromTypedDataWithDefaultChecks(List suppliers) { // TODO: Add case with no input expecting null - return parameterSuppliersFromTypedData(randomizeBytesRefsOffset(suppliers)); + return parameterSuppliersFromTypedData(withNoRowsExpectingNull(randomizeBytesRefsOffset(suppliers))); + } + + /** + * Adds a test case with no rows, expecting null, to the list of suppliers. + */ + protected static List withNoRowsExpectingNull(List suppliers) { + List newSuppliers = new ArrayList<>(suppliers); + Set> uniqueSignatures = new HashSet<>(); + + for (TestCaseSupplier original : suppliers) { + if (uniqueSignatures.add(original.types())) { + newSuppliers.add(new TestCaseSupplier(original.name() + " with no rows", original.types(), () -> { + var testCase = original.get(); + + if (testCase.getData().stream().noneMatch(TestCaseSupplier.TypedData::isMultiRow)) { + // Fail if no multi-row data, at least until a real case is found + fail("No multi-row data found in test case: " + testCase); + } + + var newData = testCase.getData().stream().map(td -> td.isMultiRow() ? td.withData(List.of()) : td).toList(); + + return new TestCaseSupplier.TestCase( + newData, + testCase.evaluatorToString(), + testCase.expectedType(), + nullValue(), + null, + testCase.getExpectedTypeError(), + null, + null + ); + })); + } + } + + return newSuppliers; } public void testAggregate() { @@ -56,6 +98,12 @@ public void testAggregate() { resolveExpression(expression, this::aggregateSingleMode, this::evaluate); } + public void testAggregateIntermediate() { + Expression expression = randomBoolean() ? buildDeepCopyOfFieldExpression(testCase) : buildFieldExpression(testCase); + + resolveExpression(expression, this::aggregateWithIntermediates, this::evaluate); + } + public void testFold() { Expression expression = buildLiteralExpression(testCase); @@ -80,17 +128,78 @@ public void testFold() { }); } - private void aggregateSingleMode(AggregatorFunctionSupplier aggregatorFunctionSupplier) { + private void aggregateSingleMode(Expression expression) { + Object result; + try (var aggregator = aggregator(expression, initialInputChannels(), AggregatorMode.SINGLE)) { + Page inputPage = rows(testCase.getMultiRowFields()); + try { + aggregator.processPage(inputPage); + } finally { + inputPage.releaseBlocks(); + } + + result = extractResultFromAggregator(aggregator, PlannerUtils.toElementType(testCase.expectedType())); + } + + assertThat(result, not(equalTo(Double.NaN))); + assert testCase.getMatcher().matches(Double.POSITIVE_INFINITY) == false; + assertThat(result, not(equalTo(Double.POSITIVE_INFINITY))); + assert testCase.getMatcher().matches(Double.NEGATIVE_INFINITY) == false; + assertThat(result, not(equalTo(Double.NEGATIVE_INFINITY))); + assertThat(result, testCase.getMatcher()); + if (testCase.getExpectedWarnings() != null) { + assertWarnings(testCase.getExpectedWarnings()); + } + } + + private void aggregateWithIntermediates(Expression expression) { + int intermediateBlockOffset = randomIntBetween(0, 10); + Block[] intermediateBlocks; + int intermediateStates; + + // Input rows to intermediate states + try (var aggregator = aggregator(expression, initialInputChannels(), AggregatorMode.INITIAL)) { + intermediateStates = aggregator.evaluateBlockCount(); + + int intermediateBlockExtraSize = randomIntBetween(0, 10); + intermediateBlocks = new Block[intermediateBlockOffset + intermediateStates + intermediateBlockExtraSize]; + + Page inputPage = rows(testCase.getMultiRowFields()); + try { + aggregator.processPage(inputPage); + } finally { + inputPage.releaseBlocks(); + } + + aggregator.evaluate(intermediateBlocks, intermediateBlockOffset, driverContext()); + + int positionCount = intermediateBlocks[intermediateBlockOffset].getPositionCount(); + + // Fill offset and extra blocks with nulls + for (int i = 0; i < intermediateBlockOffset; i++) { + intermediateBlocks[i] = driverContext().blockFactory().newConstantNullBlock(positionCount); + } + for (int i = intermediateBlockOffset + intermediateStates; i < intermediateBlocks.length; i++) { + intermediateBlocks[i] = driverContext().blockFactory().newConstantNullBlock(positionCount); + } + } + Object result; - try (var aggregator = new Aggregator(aggregatorFunctionSupplier.aggregator(driverContext()), AggregatorMode.SINGLE)) { - Page inputPage = rows(testCase.getMultiRowDataValues()); + // Intermediate states to final result + try ( + var aggregator = aggregator( + expression, + intermediaryInputChannels(intermediateStates, intermediateBlockOffset), + AggregatorMode.FINAL + ) + ) { + Page inputPage = new Page(intermediateBlocks); try { aggregator.processPage(inputPage); } finally { inputPage.releaseBlocks(); } - // ElementType from DataType result = extractResultFromAggregator(aggregator, PlannerUtils.toElementType(testCase.expectedType())); } @@ -124,11 +233,7 @@ private void evaluate(Expression evaluableExpression) { } } - private void resolveExpression( - Expression expression, - Consumer onAggregator, - Consumer onEvaluableExpression - ) { + private void resolveExpression(Expression expression, Consumer onAggregator, Consumer onEvaluableExpression) { logger.info( "Test Values: " + testCase.getData().stream().map(TestCaseSupplier.TypedData::toString).collect(Collectors.joining(",")) ); @@ -154,8 +259,7 @@ private void resolveExpression( assertThat(expression, instanceOf(ToAggregator.class)); logger.info("Result type: " + expression.dataType()); - var inputChannels = inputChannels(); - onAggregator.accept(((ToAggregator) expression).supplier(inputChannels)); + onAggregator.accept(expression); } private Object extractResultFromAggregator(Aggregator aggregator, ElementType expectedElementType) { @@ -167,7 +271,8 @@ private Object extractResultFromAggregator(Aggregator aggregator, ElementType ex var block = blocks[resultBlockIndex]; - assertThat(block.elementType(), equalTo(expectedElementType)); + // For null blocks, the element type is NULL, so if the provided matcher matches, the type works too + assertThat(block.elementType(), is(oneOf(expectedElementType, ElementType.NULL))); return toJavaObject(blocks[resultBlockIndex], 0); } finally { @@ -175,10 +280,14 @@ private Object extractResultFromAggregator(Aggregator aggregator, ElementType ex } } - private List inputChannels() { + private List initialInputChannels() { // TODO: Randomize channels // TODO: If surrogated, channels may change - return IntStream.range(0, testCase.getMultiRowDataValues().size()).boxed().toList(); + return IntStream.range(0, testCase.getMultiRowFields().size()).boxed().toList(); + } + + private List intermediaryInputChannels(int intermediaryStates, int offset) { + return IntStream.range(offset, offset + intermediaryStates).boxed().toList(); } /** @@ -210,4 +319,10 @@ private Expression resolveSurrogates(Expression expression) { return expression; } + + private Aggregator aggregator(Expression expression, List inputChannels, AggregatorMode mode) { + AggregatorFunctionSupplier aggregatorFunctionSupplier = ((ToAggregator) expression).supplier(inputChannels); + + return new Aggregator(aggregatorFunctionSupplier.aggregator(driverContext()), mode); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index dc650e3fcd965..f8a5d997f4c54 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -49,6 +49,7 @@ import org.elasticsearch.xpack.esql.optimizer.FoldNull; import org.elasticsearch.xpack.esql.parser.ExpressionBuilder; import org.elasticsearch.xpack.esql.planner.Layout; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.versionfield.Version; import org.junit.After; import org.junit.AfterClass; @@ -214,24 +215,40 @@ protected final Page row(List values) { } /** - * Creates a page based on a list of lists, where each list represents a column. + * Creates a page based on a list of multi-row fields. */ - protected final Page rows(List> values) { - if (values.isEmpty()) { + protected final Page rows(List multirowFields) { + if (multirowFields.isEmpty()) { return new Page(0, BlockUtils.NO_BLOCKS); } - var rowsCount = values.get(0).size(); + var rowsCount = multirowFields.get(0).multiRowData().size(); - values.stream().skip(1).forEach(l -> assertThat("All multi-row fields must have the same number of rows", l, hasSize(rowsCount))); + multirowFields.stream() + .skip(1) + .forEach( + field -> assertThat("All multi-row fields must have the same number of rows", field.multiRowData(), hasSize(rowsCount)) + ); - var rows = new ArrayList>(); - for (int i = 0; i < rowsCount; i++) { - final int index = i; - rows.add(values.stream().map(l -> l.get(index)).toList()); - } + var blocks = new Block[multirowFields.size()]; - var blocks = BlockUtils.fromList(TestBlockFactory.getNonBreakingInstance(), rows); + for (int i = 0; i < multirowFields.size(); i++) { + var field = multirowFields.get(i); + try ( + var wrapper = BlockUtils.wrapperFor( + TestBlockFactory.getNonBreakingInstance(), + PlannerUtils.toElementType(field.type()), + rowsCount + ) + ) { + + for (var row : field.multiRowData()) { + wrapper.accept(row); + } + + blocks[i] = wrapper.builder().build(); + } + } return new Page(rowsCount, blocks); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index 9095f5da63bf3..77c45bbd69854 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -1301,8 +1301,8 @@ public List getDataValues() { return data.stream().filter(d -> d.forceLiteral == false).map(TypedData::data).collect(Collectors.toList()); } - public List> getMultiRowDataValues() { - return data.stream().filter(TypedData::isMultiRow).map(TypedData::multiRowData).collect(Collectors.toList()); + public List getMultiRowFields() { + return data.stream().filter(TypedData::isMultiRow).collect(Collectors.toList()); } public boolean canGetDataAsLiterals() { From 276ae121c22e9529186976b1aeba7c68ec1582b7 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Thu, 4 Jul 2024 09:48:04 -0700 Subject: [PATCH 188/216] Reflect latest changes in synthetic source documentation (#109501) --- docs/changelog/109501.yaml | 14 ++++ docs/reference/data-streams/tsds.asciidoc | 3 +- .../mapping/fields/source-field.asciidoc | 12 +-- .../mapping/fields/synthetic-source.asciidoc | 83 +++++++++++-------- 4 files changed, 70 insertions(+), 42 deletions(-) create mode 100644 docs/changelog/109501.yaml diff --git a/docs/changelog/109501.yaml b/docs/changelog/109501.yaml new file mode 100644 index 0000000000000..6e81f98816cbf --- /dev/null +++ b/docs/changelog/109501.yaml @@ -0,0 +1,14 @@ +pr: 109501 +summary: Reflect latest changes in synthetic source documentation +area: Mapping +type: enhancement +issues: [] +highlight: + title: Synthetic `_source` improvements + body: |- + There are multiple improvements to synthetic `_source` functionality: + + * Synthetic `_source` is now supported for all field types including `nested` and `object`. `object` fields are supported with `enabled` set to `false`. + + * Synthetic `_source` can be enabled together with `ignore_malformed` and `ignore_above` parameters for all field types that support them. + notable: false diff --git a/docs/reference/data-streams/tsds.asciidoc b/docs/reference/data-streams/tsds.asciidoc index 460048d8ccbc9..de89fa1ca3f31 100644 --- a/docs/reference/data-streams/tsds.asciidoc +++ b/docs/reference/data-streams/tsds.asciidoc @@ -53,8 +53,9 @@ shard segments by `_tsid` and `@timestamp`. documents, the document `_id` is a hash of the document's dimensions and `@timestamp`. A TSDS doesn't support custom document `_id` values. + * A TSDS uses <>, and as a result is -subject to a number of <>. +subject to some <> and <> applied to the `_source` field. NOTE: A time series index can contain fields other than dimensions or metrics. diff --git a/docs/reference/mapping/fields/source-field.asciidoc b/docs/reference/mapping/fields/source-field.asciidoc index ec824e421e015..903b301ab1a96 100644 --- a/docs/reference/mapping/fields/source-field.asciidoc +++ b/docs/reference/mapping/fields/source-field.asciidoc @@ -6,11 +6,11 @@ at index time. The `_source` field itself is not indexed (and thus is not searchable), but it is stored so that it can be returned when executing _fetch_ requests, like <> or <>. -If disk usage is important to you then have a look at -<> which shrinks disk usage at the cost of -only supporting a subset of mappings and slower fetches or (not recommended) -<> which also shrinks disk -usage but disables many features. +If disk usage is important to you, then consider the following options: + +- Using <>, which reconstructs source content at the time of retrieval instead of storing it on disk. This shrinks disk usage, at the cost of slower access to `_source` in <> and <> queries. +- <>. This shrinks disk +usage but disables features that rely on `_source`. include::synthetic-source.asciidoc[] @@ -43,7 +43,7 @@ available then a number of features are not supported: * The <>, <>, and <> APIs. -* In the {kib} link:{kibana-ref}/discover.html[Discover] application, field data will not be displayed. +* In the {kib} link:{kibana-ref}/discover.html[Discover] application, field data will not be displayed. * On the fly <>. diff --git a/docs/reference/mapping/fields/synthetic-source.asciidoc b/docs/reference/mapping/fields/synthetic-source.asciidoc index a0e7aed177a9c..ccea38cf602da 100644 --- a/docs/reference/mapping/fields/synthetic-source.asciidoc +++ b/docs/reference/mapping/fields/synthetic-source.asciidoc @@ -28,45 +28,22 @@ PUT idx While this on the fly reconstruction is *generally* slower than saving the source documents verbatim and loading them at query time, it saves a lot of storage -space. +space. Additional latency can be avoided by not loading `_source` field in queries when it is not needed. + +[[synthetic-source-fields]] +===== Supported fields +Synthetic `_source` is supported by all field types. Depending on implementation details, field types have different properties when used with synthetic `_source`. + +<> construct synthetic `_source` using existing data, most commonly <> and <>. For these field types, no additional space is needed to store the contents of `_source` field. Due to the storage layout of <>, the generated `_source` field undergoes <> compared to original document. + +For all other field types, the original value of the field is stored as is, in the same way as the `_source` field in non-synthetic mode. In this case there are no modifications and field data in `_source` is the same as in the original document. Similarly, malformed values of fields that use <> or <> need to be stored as is. This approach is less storage efficient since data needed for `_source` reconstruction is stored in addition to other data required to index the field (like `doc_values`). [[synthetic-source-restrictions]] ===== Synthetic `_source` restrictions -There are a couple of restrictions to be aware of: +Synthetic `_source` cannot be used together with field mappings that use <>. -* When you retrieve synthetic `_source` content it undergoes minor -<> compared to the original JSON. -* Synthetic `_source` can be used with indices that contain only these field -types: - -** <> -** {plugins}/mapper-annotated-text-usage.html#annotated-text-synthetic-source[`annotated-text`] -** <> -** <> -** <> -** <> -** <> -** <> -** <> -** <> -** <> -** <> -** <> -** <> -** <> -** <> -** <> -** <> -** <> -** <> -** <> -** <> -** <> -** <> -** <> -** <> -** <> +Some field types have additional restrictions. These restrictions are documented in the **synthetic `_source`** section of the field type's <>. [[synthetic-source-modifications]] ===== Synthetic `_source` modifications @@ -178,4 +155,40 @@ that ordering. [[synthetic-source-modifications-ranges]] ====== Representation of ranges -Range field vales (e.g. `long_range`) are always represented as inclusive on both sides with bounds adjusted accordingly. See <>. +Range field values (e.g. `long_range`) are always represented as inclusive on both sides with bounds adjusted accordingly. See <>. + +[[synthetic-source-precision-loss-for-point-types]] +====== Reduced precision of `geo_point` values +Values of `geo_point` fields are represented in synthetic `_source` with reduced precision. See <>. + + +[[synthetic-source-fields-native-list]] +===== Field types that support synthetic source with no storage overhead +The following field types support synthetic source using data from <> or <>, and require no additional storage space to construct the `_source` field. + +NOTE: If you enable the <> or <> settings, then additional storage is required to store ignored field values for these types. + +** <> +** {plugins}/mapper-annotated-text-usage.html#annotated-text-synthetic-source[`annotated-text`] +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> +** <> From 3b5395e31a5c4b8b6f21d9511c0c4660fb2b9ad4 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Thu, 4 Jul 2024 19:02:30 +0200 Subject: [PATCH 189/216] Override BufferedInputStream to not sychronize single byte reads in Compressor (#109647) With biased locking gone, we see some slowness in profiling when we use this stream for single byte reads. This is a recent regression that is a result of https://openjdk.org/jeps/374. -> the sychronization overhead for bulk reads hardly matters, but since we do quite a few single byte reads lets fix this. --- .../org/elasticsearch/common/compress/Compressor.java | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/common/compress/Compressor.java b/server/src/main/java/org/elasticsearch/common/compress/Compressor.java index 239f168306a94..400653a69a9be 100644 --- a/server/src/main/java/org/elasticsearch/common/compress/Compressor.java +++ b/server/src/main/java/org/elasticsearch/common/compress/Compressor.java @@ -26,7 +26,16 @@ public interface Compressor { */ default StreamInput threadLocalStreamInput(InputStream in) throws IOException { // wrap stream in buffer since InputStreamStreamInput doesn't do any buffering itself but does a lot of small reads - return new InputStreamStreamInput(new BufferedInputStream(threadLocalInputStream(in), DeflateCompressor.BUFFER_SIZE)); + return new InputStreamStreamInput(new BufferedInputStream(threadLocalInputStream(in), DeflateCompressor.BUFFER_SIZE) { + @Override + public int read() throws IOException { + // override read to avoid synchronized single byte reads now that JEP374 removed biased locking + if (pos >= count) { + return super.read(); + } + return buf[pos++] & 0xFF; + } + }); } /** From c8ece6a78e8870a3bb1d11ef1d2abd64342d919a Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Thu, 4 Jul 2024 12:26:37 -0700 Subject: [PATCH 190/216] Fix esql enrich memory leak (#109275) (#110450) This PR was reviewed in #109275 Block and Vector use a non-thread-safe RefCounted. Threads that increase or decrease the references must have a happen-before relationship. However, this order is not guaranteed in the enrich lookup for the reference of selectedPositions. The driver can complete the MergePositionsOperator, which decreases the reference count of selectedPositions, while the finally block may also decrease it in a separate thread. These actions occur without a defined happen-before relationship. Closes #108532 --- .../esql/enrich/EnrichLookupService.java | 164 ++++++++++-------- 1 file changed, 91 insertions(+), 73 deletions(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index 87c558fe5bd1e..2425fa24b17c2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -31,7 +31,6 @@ import org.elasticsearch.compute.data.BlockStreamInput; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LocalCircuitBreaker; import org.elasticsearch.compute.data.OrdinalBytesRefBlock; @@ -43,6 +42,7 @@ import org.elasticsearch.compute.operator.OutputOperator; import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.RefCounted; +import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.mapper.BlockLoader; import org.elasticsearch.index.mapper.MappedFieldType; @@ -247,30 +247,53 @@ private void doLookup( ActionListener listener ) { Block inputBlock = inputPage.getBlock(0); - final IntBlock selectedPositions; - final OrdinalBytesRefBlock ordinalsBytesRefBlock; - if (inputBlock instanceof BytesRefBlock bytesRefBlock && (ordinalsBytesRefBlock = bytesRefBlock.asOrdinals()) != null) { - inputBlock = ordinalsBytesRefBlock.getDictionaryVector().asBlock(); - selectedPositions = ordinalsBytesRefBlock.getOrdinalsBlock(); - selectedPositions.mustIncRef(); - } else { - selectedPositions = IntVector.range(0, inputBlock.getPositionCount(), blockFactory).asBlock(); + if (inputBlock.areAllValuesNull()) { + listener.onResponse(createNullResponse(inputPage.getPositionCount(), extractFields)); + return; } - LocalCircuitBreaker localBreaker = null; + final List releasables = new ArrayList<>(6); + boolean started = false; try { - if (inputBlock.areAllValuesNull()) { - listener.onResponse(createNullResponse(inputPage.getPositionCount(), extractFields)); - return; - } - ShardSearchRequest shardSearchRequest = new ShardSearchRequest(shardId, 0, AliasFilter.EMPTY); - SearchContext searchContext = searchService.createSearchContext(shardSearchRequest, SearchService.NO_TIMEOUT); - listener = ActionListener.runBefore(listener, searchContext::close); - localBreaker = new LocalCircuitBreaker( + final ShardSearchRequest shardSearchRequest = new ShardSearchRequest(shardId, 0, AliasFilter.EMPTY); + final SearchContext searchContext = searchService.createSearchContext(shardSearchRequest, SearchService.NO_TIMEOUT); + releasables.add(searchContext); + final LocalCircuitBreaker localBreaker = new LocalCircuitBreaker( blockFactory.breaker(), localBreakerSettings.overReservedBytes(), localBreakerSettings.maxOverReservedBytes() ); - DriverContext driverContext = new DriverContext(bigArrays, blockFactory.newChildFactory(localBreaker)); + releasables.add(localBreaker); + final DriverContext driverContext = new DriverContext(bigArrays, blockFactory.newChildFactory(localBreaker)); + final ElementType[] mergingTypes = new ElementType[extractFields.size()]; + for (int i = 0; i < extractFields.size(); i++) { + mergingTypes[i] = PlannerUtils.toElementType(extractFields.get(i).dataType()); + } + final int[] mergingChannels = IntStream.range(0, extractFields.size()).map(i -> i + 2).toArray(); + final MergePositionsOperator mergePositionsOperator; + final OrdinalBytesRefBlock ordinalsBytesRefBlock; + if (inputBlock instanceof BytesRefBlock bytesRefBlock && (ordinalsBytesRefBlock = bytesRefBlock.asOrdinals()) != null) { + inputBlock = ordinalsBytesRefBlock.getDictionaryVector().asBlock(); + var selectedPositions = ordinalsBytesRefBlock.getOrdinalsBlock(); + mergePositionsOperator = new MergePositionsOperator( + 1, + mergingChannels, + mergingTypes, + selectedPositions, + driverContext.blockFactory() + ); + + } else { + try (var selectedPositions = IntVector.range(0, inputBlock.getPositionCount(), blockFactory).asBlock()) { + mergePositionsOperator = new MergePositionsOperator( + 1, + mergingChannels, + mergingTypes, + selectedPositions, + driverContext.blockFactory() + ); + } + } + releasables.add(mergePositionsOperator); SearchExecutionContext searchExecutionContext = searchContext.getSearchExecutionContext(); MappedFieldType fieldType = searchExecutionContext.getFieldType(matchField); var queryList = switch (matchType) { @@ -284,57 +307,13 @@ private void doLookup( queryList, searchExecutionContext.getIndexReader() ); - List intermediateOperators = new ArrayList<>(extractFields.size() + 2); - final ElementType[] mergingTypes = new ElementType[extractFields.size()]; - // load the fields - List fields = new ArrayList<>(extractFields.size()); - for (int i = 0; i < extractFields.size(); i++) { - NamedExpression extractField = extractFields.get(i); - final ElementType elementType = PlannerUtils.toElementType(extractField.dataType()); - mergingTypes[i] = elementType; - EsPhysicalOperationProviders.ShardContext ctx = new EsPhysicalOperationProviders.DefaultShardContext( - 0, - searchContext.getSearchExecutionContext(), - searchContext.request().getAliasFilter() - ); - BlockLoader loader = ctx.blockLoader( - extractField instanceof Alias a ? ((NamedExpression) a.child()).name() : extractField.name(), - extractField.dataType() == DataType.UNSUPPORTED, - MappedFieldType.FieldExtractPreference.NONE - ); - fields.add( - new ValuesSourceReaderOperator.FieldInfo( - extractField.name(), - PlannerUtils.toElementType(extractField.dataType()), - shardIdx -> { - if (shardIdx != 0) { - throw new IllegalStateException("only one shard"); - } - return loader; - } - ) - ); - } - intermediateOperators.add( - new ValuesSourceReaderOperator( - driverContext.blockFactory(), - fields, - List.of( - new ValuesSourceReaderOperator.ShardContext( - searchContext.searcher().getIndexReader(), - searchContext::newSourceLoader - ) - ), - 0 - ) - ); - // merging field-values by position - final int[] mergingChannels = IntStream.range(0, extractFields.size()).map(i -> i + 2).toArray(); - intermediateOperators.add( - new MergePositionsOperator(1, mergingChannels, mergingTypes, selectedPositions, driverContext.blockFactory()) - ); + releasables.add(queryOperator); + var extractFieldsOperator = extractFieldsOperator(searchContext, driverContext, extractFields); + releasables.add(extractFieldsOperator); + AtomicReference result = new AtomicReference<>(); OutputOperator outputOperator = new OutputOperator(List.of(), Function.identity(), result::set); + releasables.add(outputOperator); Driver driver = new Driver( "enrich-lookup:" + sessionId, System.currentTimeMillis(), @@ -350,18 +329,16 @@ private void doLookup( inputPage.getPositionCount() ), queryOperator, - intermediateOperators, + List.of(extractFieldsOperator, mergePositionsOperator), outputOperator, Driver.DEFAULT_STATUS_INTERVAL, - localBreaker + Releasables.wrap(searchContext, localBreaker) ); task.addListener(() -> { String reason = Objects.requireNonNullElse(task.getReasonCancelled(), "task was cancelled"); driver.cancel(reason); }); - var threadContext = transportService.getThreadPool().getThreadContext(); - localBreaker = null; Driver.start(threadContext, executor, driver, Driver.DEFAULT_MAX_ITERATIONS, listener.map(ignored -> { Page out = result.get(); if (out == null) { @@ -369,11 +346,52 @@ private void doLookup( } return out; })); + started = true; } catch (Exception e) { listener.onFailure(e); } finally { - Releasables.close(selectedPositions, localBreaker); + if (started == false) { + Releasables.close(releasables); + } + } + } + + private static Operator extractFieldsOperator( + SearchContext searchContext, + DriverContext driverContext, + List extractFields + ) { + EsPhysicalOperationProviders.ShardContext shardContext = new EsPhysicalOperationProviders.DefaultShardContext( + 0, + searchContext.getSearchExecutionContext(), + searchContext.request().getAliasFilter() + ); + List fields = new ArrayList<>(extractFields.size()); + for (NamedExpression extractField : extractFields) { + BlockLoader loader = shardContext.blockLoader( + extractField instanceof Alias a ? ((NamedExpression) a.child()).name() : extractField.name(), + extractField.dataType() == DataType.UNSUPPORTED, + MappedFieldType.FieldExtractPreference.NONE + ); + fields.add( + new ValuesSourceReaderOperator.FieldInfo( + extractField.name(), + PlannerUtils.toElementType(extractField.dataType()), + shardIdx -> { + if (shardIdx != 0) { + throw new IllegalStateException("only one shard"); + } + return loader; + } + ) + ); } + return new ValuesSourceReaderOperator( + driverContext.blockFactory(), + fields, + List.of(new ValuesSourceReaderOperator.ShardContext(searchContext.searcher().getIndexReader(), searchContext::newSourceLoader)), + 0 + ); } private Page createNullResponse(int positionCount, List extractFields) { From 2b1d8802c064e5eed6c4e0a468694e84db651aa6 Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Fri, 5 Jul 2024 11:51:20 +0300 Subject: [PATCH 191/216] Wait for the logs templates to be initialised 5 sec longer (#110495) We have witnessed some test failures in `DataStreamUpgradeRestIT`, `EcsLogsDataStreamIT` and `LogsDataStreamIT` during which the `logs` related index or component template gets initialised right after the 10 seconds have passed. We increase the timeout to make the test more resilient to this scenario. --- .../org/elasticsearch/datastreams/AbstractDataStreamIT.java | 3 ++- .../org/elasticsearch/datastreams/DataStreamUpgradeRestIT.java | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/AbstractDataStreamIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/AbstractDataStreamIT.java index ca33f08324539..027ac7c736c8a 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/AbstractDataStreamIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/AbstractDataStreamIT.java @@ -25,6 +25,7 @@ import java.io.IOException; import java.util.List; import java.util.Map; +import java.util.concurrent.TimeUnit; /** * This base class provides the boilerplate to simplify the development of integration tests. @@ -53,7 +54,7 @@ static void waitForIndexTemplate(RestClient client, String indexTemplate) throws } catch (ResponseException e) { fail(e.getMessage()); } - }); + }, 15, TimeUnit.SECONDS); } static void createDataStream(RestClient client, String name) throws IOException { diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DataStreamUpgradeRestIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DataStreamUpgradeRestIT.java index f447e5b80f8c8..39cdf77d04810 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DataStreamUpgradeRestIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DataStreamUpgradeRestIT.java @@ -22,6 +22,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM; @@ -306,6 +307,6 @@ private void waitForLogsComponentTemplateInitialization() throws Exception { // Throw the exception, if it was an error we did not anticipate throw responseException; } - }); + }, 15, TimeUnit.SECONDS); } } From 35efffde91c6bc8a740290907a63f4f39d30c2b2 Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Fri, 5 Jul 2024 10:52:17 +0200 Subject: [PATCH 192/216] Fix testRelocationFailureNotRetriedForever (#109855) --- .../indices/IndicesLifecycleListenerIT.java | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java index b224d70eed8f8..e9e88a2d6b76c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java @@ -13,6 +13,7 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedRunnable; @@ -127,7 +128,7 @@ public void beforeIndexCreated(Index index, Settings indexSettings) { assertThat(state.nodes().get(shard.currentNodeId()).getName(), equalTo(node1)); } - public void testRelocationFailureNotRetriedForever() { + public void testRelocationFailureNotRetriedForever() throws Exception { String node1 = internalCluster().startNode(); createIndex("index1", 1, 0); ensureGreen("index1"); @@ -143,6 +144,16 @@ public void beforeIndexCreated(Index index, Settings indexSettings) { updateIndexSettings(Settings.builder().put(INDEX_ROUTING_EXCLUDE_GROUP_PREFIX + "._name", node1), "index1"); ensureGreen("index1"); + var maxAttempts = MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY.get(Settings.EMPTY); + + // await all relocation attempts are exhausted + assertBusy(() -> { + var state = clusterAdmin().prepareState().get().getState(); + var shard = state.routingTable().index("index1").shard(0).primaryShard(); + assertThat(shard, notNullValue()); + assertThat(shard.relocationFailureInfo().failedRelocations(), equalTo(maxAttempts)); + }); + // ensure the shard remain started var state = clusterAdmin().prepareState().get().getState(); logger.info("Final routing is {}", state.getRoutingNodes().toString()); var shard = state.routingTable().index("index1").shard(0).primaryShard(); From 1274a390b4a39058f4826b9d51c86ea57954a646 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20Fred=C3=A9n?= <109296772+jfreden@users.noreply.github.com> Date: Fri, 5 Jul 2024 11:06:27 +0200 Subject: [PATCH 193/216] Always write empty role descriptor fields to index (#110424) * Always write empty role descriptor fields to index --- muted-tests.yml | 6 - .../action/role/QueryRoleResponse.java | 2 +- .../core/security/authz/RoleDescriptor.java | 17 +-- .../security/role/BulkPutRoleRestIT.java | 110 +++++++++++++++++- .../authz/store/NativeRolesStore.java | 40 +++++-- .../authz/store/NativeRolesStoreTests.java | 60 +++++++++- 6 files changed, 203 insertions(+), 32 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index d8eba8ad2dba6..91f38f3a5ba46 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -109,12 +109,6 @@ tests: - class: "org.elasticsearch.xpack.searchablesnapshots.FrozenSearchableSnapshotsIntegTests" issue: "https://github.com/elastic/elasticsearch/issues/110408" method: "testCreateAndRestorePartialSearchableSnapshot" -- class: "org.elasticsearch.xpack.security.role.RoleWithDescriptionRestIT" - issue: "https://github.com/elastic/elasticsearch/issues/110416" - method: "testCreateOrUpdateRoleWithDescription" -- class: "org.elasticsearch.xpack.security.role.RoleWithDescriptionRestIT" - issue: "https://github.com/elastic/elasticsearch/issues/110417" - method: "testCreateOrUpdateRoleWithDescription" - class: org.elasticsearch.test.rest.yaml.CcsCommonYamlTestSuiteIT method: test {p0=search.vectors/41_knn_search_half_byte_quantized/Test create, merge, and search cosine} issue: https://github.com/elastic/elasticsearch/issues/109978 diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/QueryRoleResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/QueryRoleResponse.java index 6bdc6c66c1835..8e9da10e449ad 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/QueryRoleResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/QueryRoleResponse.java @@ -86,7 +86,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws // other details of the role descriptor (in the same object). assert Strings.isNullOrEmpty(roleDescriptor.getName()) == false; builder.field("name", roleDescriptor.getName()); - roleDescriptor.innerToXContent(builder, params, false, false); + roleDescriptor.innerToXContent(builder, params, false); if (sortValues != null && sortValues.length > 0) { builder.array("_sort", sortValues); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java index 7bedab61bd43d..1a8839fa0fa4a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java @@ -417,13 +417,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } public XContentBuilder toXContent(XContentBuilder builder, Params params, boolean docCreation) throws IOException { - return toXContent(builder, params, docCreation, false); - } - - public XContentBuilder toXContent(XContentBuilder builder, Params params, boolean docCreation, boolean includeMetadataFlattened) - throws IOException { builder.startObject(); - innerToXContent(builder, params, docCreation, includeMetadataFlattened); + innerToXContent(builder, params, docCreation); return builder.endObject(); } @@ -435,12 +430,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params, boolea * @param docCreation {@code true} if the x-content is being generated for creating a document * in the security index, {@code false} if the x-content being generated * is for API display purposes - * @param includeMetadataFlattened {@code true} if the metadataFlattened field should be included in doc * @return x-content builder * @throws IOException if there was an error writing the x-content to the builder */ - public XContentBuilder innerToXContent(XContentBuilder builder, Params params, boolean docCreation, boolean includeMetadataFlattened) - throws IOException { + public XContentBuilder innerToXContent(XContentBuilder builder, Params params, boolean docCreation) throws IOException { builder.array(Fields.CLUSTER.getPreferredName(), clusterPrivileges); if (configurableClusterPrivileges.length != 0) { builder.field(Fields.GLOBAL.getPreferredName()); @@ -452,9 +445,7 @@ public XContentBuilder innerToXContent(XContentBuilder builder, Params params, b builder.array(Fields.RUN_AS.getPreferredName(), runAs); } builder.field(Fields.METADATA.getPreferredName(), metadata); - if (includeMetadataFlattened) { - builder.field(Fields.METADATA_FLATTENED.getPreferredName(), metadata); - } + if (docCreation) { builder.field(Fields.TYPE.getPreferredName(), ROLE_TYPE); } else { @@ -1196,7 +1187,7 @@ private static ApplicationResourcePrivileges parseApplicationPrivilege(String ro public static final class RemoteIndicesPrivileges implements Writeable, ToXContentObject { - private static final RemoteIndicesPrivileges[] NONE = new RemoteIndicesPrivileges[0]; + public static final RemoteIndicesPrivileges[] NONE = new RemoteIndicesPrivileges[0]; private final IndicesPrivileges indicesPrivileges; private final String[] remoteClusters; diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/BulkPutRoleRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/BulkPutRoleRestIT.java index 0297abad7a508..88b952f33394e 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/BulkPutRoleRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/role/BulkPutRoleRestIT.java @@ -181,15 +181,74 @@ public void testPutNoValidRoles() throws Exception { public void testBulkUpdates() throws Exception { String request = """ {"roles": {"test1": {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["all"]}]}, "test2": - {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["read"]}]}, "test3": - {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["write"]}]}}}"""; - + {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["read"]}], "description": "something"}, "test3": + {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["write"]}], "remote_indices":[{"names":["logs-*"], + "privileges":["read"],"clusters":["my_cluster*","other_cluster"]}]}}}"""; { Map responseMap = upsertRoles(request); assertThat(responseMap, not(hasKey("errors"))); List> items = (List>) responseMap.get("created"); assertEquals(3, items.size()); + + fetchRoleAndAssertEqualsExpected( + "test1", + new RoleDescriptor( + "test1", + new String[] { "all" }, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("*").privileges("all").build() }, + null, + null, + null, + null, + null, + null, + null, + null, + null + ) + ); + fetchRoleAndAssertEqualsExpected( + "test2", + new RoleDescriptor( + "test2", + new String[] { "all" }, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("*").privileges("read").build() }, + null, + null, + null, + null, + null, + null, + null, + null, + "something" + ) + ); + fetchRoleAndAssertEqualsExpected( + "test3", + new RoleDescriptor( + "test3", + new String[] { "all" }, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("*").privileges("write").build() }, + null, + null, + null, + null, + null, + new RoleDescriptor.RemoteIndicesPrivileges[] { + RoleDescriptor.RemoteIndicesPrivileges.builder("my_cluster*", "other_cluster") + .indices("logs-*") + .privileges("read") + .build() }, + null, + null, + null + ) + ); } { Map responseMap = upsertRoles(request); @@ -200,7 +259,7 @@ public void testBulkUpdates() throws Exception { } { request = """ - {"roles": {"test1": {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["read"]}]}, "test2": + {"roles": {"test1": {}, "test2": {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["all"]}]}, "test3": {"cluster": ["all"],"indices": [{"names": ["*"],"privileges": ["all"]}]}}}"""; @@ -208,6 +267,49 @@ public void testBulkUpdates() throws Exception { assertThat(responseMap, not(hasKey("errors"))); List> items = (List>) responseMap.get("updated"); assertEquals(3, items.size()); + + assertThat(responseMap, not(hasKey("errors"))); + + fetchRoleAndAssertEqualsExpected( + "test1", + new RoleDescriptor("test1", null, null, null, null, null, null, null, null, null, null, null) + ); + fetchRoleAndAssertEqualsExpected( + "test2", + new RoleDescriptor( + "test2", + new String[] { "all" }, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("*").privileges("all").build() }, + null, + null, + null, + null, + null, + null, + null, + null, + null + ) + ); + fetchRoleAndAssertEqualsExpected( + "test3", + new RoleDescriptor( + "test3", + new String[] { "all" }, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("*").privileges("all").build() }, + null, + null, + null, + null, + null, + null, + null, + null, + null + ) + ); } } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index adeada6cbf6cf..a2d2b21b489ea 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -59,6 +59,7 @@ import org.elasticsearch.xpack.core.security.action.role.RoleDescriptorRequestValidator; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; +import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions; import org.elasticsearch.xpack.core.security.authz.store.RoleRetrievalResult; import org.elasticsearch.xpack.core.security.authz.support.DLSRoleQueryValidator; import org.elasticsearch.xpack.core.security.support.NativeRealmValidationUtil; @@ -607,16 +608,41 @@ private DeleteRequest createRoleDeleteRequest(final String roleName) { return client.prepareDelete(SECURITY_MAIN_ALIAS, getIdForRole(roleName)).request(); } - private XContentBuilder createRoleXContentBuilder(RoleDescriptor role) throws IOException { + // Package private for testing + XContentBuilder createRoleXContentBuilder(RoleDescriptor role) throws IOException { assert NativeRealmValidationUtil.validateRoleName(role.getName(), false) == null : "Role name was invalid or reserved: " + role.getName(); assert false == role.hasRestriction() : "restriction is not supported for native roles"; - return role.toXContent( - jsonBuilder(), - ToXContent.EMPTY_PARAMS, - true, - featureService.clusterHasFeature(clusterService.state(), SECURITY_ROLES_METADATA_FLATTENED) - ); + + XContentBuilder builder = jsonBuilder().startObject(); + role.innerToXContent(builder, ToXContent.EMPTY_PARAMS, true); + + if (featureService.clusterHasFeature(clusterService.state(), SECURITY_ROLES_METADATA_FLATTENED)) { + builder.field(RoleDescriptor.Fields.METADATA_FLATTENED.getPreferredName(), role.getMetadata()); + } + + // When role descriptor XContent is generated for the security index all empty fields need to have default values to make sure + // existing values are overwritten if not present since the request to update could be an UpdateRequest + // (update provided fields in existing document or create document) or IndexRequest (replace and reindex document) + if (role.hasConfigurableClusterPrivileges() == false) { + builder.startObject(RoleDescriptor.Fields.GLOBAL.getPreferredName()).endObject(); + } + + if (role.hasRemoteIndicesPrivileges() == false) { + builder.field(RoleDescriptor.Fields.REMOTE_INDICES.getPreferredName(), RoleDescriptor.RemoteIndicesPrivileges.NONE); + } + + if (role.hasRemoteClusterPermissions() == false + && clusterService.state().getMinTransportVersion().onOrAfter(ROLE_REMOTE_CLUSTER_PRIVS)) { + builder.array(RoleDescriptor.Fields.REMOTE_CLUSTER.getPreferredName(), RemoteClusterPermissions.NONE); + } + if (role.hasDescription() == false + && clusterService.state().getMinTransportVersion().onOrAfter(TransportVersions.SECURITY_ROLE_DESCRIPTION)) { + builder.field(RoleDescriptor.Fields.DESCRIPTION.getPreferredName(), ""); + } + + builder.endObject(); + return builder; } public void usageStats(ActionListener> listener) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java index a4ee449438fe0..bfa358d0b7d6e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java @@ -55,6 +55,7 @@ import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; @@ -78,6 +79,7 @@ import org.mockito.Mockito; import java.io.IOException; +import java.lang.reflect.Field; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; @@ -138,7 +140,7 @@ private NativeRolesStore createRoleStoreForTest() { private NativeRolesStore createRoleStoreForTest(Settings settings) { new ReservedRolesStore(Set.of("superuser")); - final ClusterService clusterService = mock(ClusterService.class); + final ClusterService clusterService = mockClusterServiceWithMinNodeVersion(TransportVersion.current()); final SecuritySystemIndices systemIndices = new SecuritySystemIndices(settings); final FeatureService featureService = mock(FeatureService.class); systemIndices.init(client, featureService, clusterService); @@ -807,6 +809,62 @@ public void testBulkDeleteReservedRole() { verify(client, times(0)).bulk(any(BulkRequest.class), any()); } + /** + * Make sure all top level fields for a RoleDescriptor have default values to make sure they can be set to empty in an upsert + * call to the roles API + */ + public void testAllTopFieldsHaveEmptyDefaultsForUpsert() throws IOException, IllegalAccessException { + final NativeRolesStore rolesStore = createRoleStoreForTest(); + RoleDescriptor allNullDescriptor = new RoleDescriptor( + "all-null-descriptor", + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null + ); + + Set fieldsWithoutDefaultValue = Set.of( + RoleDescriptor.Fields.INDEX, + RoleDescriptor.Fields.NAMES, + RoleDescriptor.Fields.ALLOW_RESTRICTED_INDICES, + RoleDescriptor.Fields.RESOURCES, + RoleDescriptor.Fields.QUERY, + RoleDescriptor.Fields.PRIVILEGES, + RoleDescriptor.Fields.CLUSTERS, + RoleDescriptor.Fields.APPLICATION, + RoleDescriptor.Fields.FIELD_PERMISSIONS, + RoleDescriptor.Fields.FIELD_PERMISSIONS_2X, + RoleDescriptor.Fields.GRANT_FIELDS, + RoleDescriptor.Fields.EXCEPT_FIELDS, + RoleDescriptor.Fields.METADATA_FLATTENED, + RoleDescriptor.Fields.TRANSIENT_METADATA, + RoleDescriptor.Fields.RESTRICTION, + RoleDescriptor.Fields.WORKFLOWS + ); + + String serializedOutput = Strings.toString(rolesStore.createRoleXContentBuilder(allNullDescriptor)); + Field[] fields = RoleDescriptor.Fields.class.getFields(); + + for (Field field : fields) { + ParseField fieldValue = (ParseField) field.get(null); + if (fieldsWithoutDefaultValue.contains(fieldValue) == false) { + assertThat( + "New RoleDescriptor field without a default value detected. " + + "Set a value or add to excluded list if not expected to be set to empty through role APIs", + serializedOutput, + containsString(fieldValue.getPreferredName()) + ); + } + } + } + private ClusterService mockClusterServiceWithMinNodeVersion(TransportVersion transportVersion) { final ClusterService clusterService = mock(ClusterService.class, Mockito.RETURNS_DEEP_STUBS); when(clusterService.state().getMinTransportVersion()).thenReturn(transportVersion); From 747fa59a2cfea844a31287022f4657504e7f0864 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Fri, 5 Jul 2024 12:46:48 +0300 Subject: [PATCH 194/216] DOCS Query Roles (#110473) These are the docs changes in relation to https://github.com/elastic/elasticsearch/pull/108733 --- docs/reference/rest-api/security.asciidoc | 2 + .../rest-api/security/get-roles.asciidoc | 5 +- .../rest-api/security/query-role.asciidoc | 283 ++++++++++++++++++ .../rest-api/security/query-user.asciidoc | 19 +- .../authorization/managing-roles.asciidoc | 14 +- 5 files changed, 311 insertions(+), 12 deletions(-) create mode 100644 docs/reference/rest-api/security/query-role.asciidoc diff --git a/docs/reference/rest-api/security.asciidoc b/docs/reference/rest-api/security.asciidoc index 04cd838c45600..82cf38e52bd80 100644 --- a/docs/reference/rest-api/security.asciidoc +++ b/docs/reference/rest-api/security.asciidoc @@ -50,6 +50,7 @@ Use the following APIs to add, remove, update, and retrieve roles in the native * <> * <> * <> +* <> [discrete] [[security-token-apis]] @@ -192,6 +193,7 @@ include::security/get-app-privileges.asciidoc[] include::security/get-builtin-privileges.asciidoc[] include::security/get-role-mappings.asciidoc[] include::security/get-roles.asciidoc[] +include::security/query-role.asciidoc[] include::security/get-service-accounts.asciidoc[] include::security/get-service-credentials.asciidoc[] include::security/get-settings.asciidoc[] diff --git a/docs/reference/rest-api/security/get-roles.asciidoc b/docs/reference/rest-api/security/get-roles.asciidoc index 3eb5a735194c6..3cc2f95c6ea7e 100644 --- a/docs/reference/rest-api/security/get-roles.asciidoc +++ b/docs/reference/rest-api/security/get-roles.asciidoc @@ -38,7 +38,10 @@ API cannot retrieve roles that are defined in roles files. ==== {api-response-body-title} A successful call returns an array of roles with the JSON representation of the -role. +role. The returned role format is a simple extension of the <> format, +only adding an extra field `transient_metadata.enabled`. +This field is `false` in case the role is automatically disabled, for example when the license +level does not allow some permissions that the role grants. [[security-api-get-role-response-codes]] ==== {api-response-codes-title} diff --git a/docs/reference/rest-api/security/query-role.asciidoc b/docs/reference/rest-api/security/query-role.asciidoc new file mode 100644 index 0000000000000..937bd263140fc --- /dev/null +++ b/docs/reference/rest-api/security/query-role.asciidoc @@ -0,0 +1,283 @@ +[role="xpack"] +[[security-api-query-role]] +=== Query Role API + +++++ +Query Role +++++ + +Retrieves roles with <> in a <> fashion. + +[[security-api-query-role-request]] +==== {api-request-title} + +`GET /_security/_query/role` + +`POST /_security/_query/role` + +[[security-api-query-role-prereqs]] +==== {api-prereq-title} + +* To use this API, you must have at least the `read_security` cluster privilege. + +[[security-api-query-role-desc]] +==== {api-description-title} + +The role management APIs are generally the preferred way to manage roles, rather than using +<>. +The query roles API does not retrieve roles that are defined in roles files, nor <> ones. +You can optionally filter the results with a query. Also, the results can be paginated and sorted. + +[[security-api-query-role-request-body]] +==== {api-request-body-title} + +You can specify the following parameters in the request body: + +`query`:: +(Optional, string) A <> to filter which roles to return. +The query supports a subset of query types, including +<>, <>, +<>, <>, +<>, <>, +<>, <>, +<>, <>, +and <>. ++ +You can query the following values associated with a role. ++ +.Valid values for `query` +[%collapsible%open] +==== +`name`:: +(keyword) The <> of the role. + +`description`:: +(text) The <> of the role. + +`metadata`:: +(flattened) Metadata field associated with the <>, such as `metadata.app_tag`. +Note that metadata is internally indexed as a <> field type. +This means that all sub-fields act like `keyword` fields when querying and sorting. +It also implies that it is not possible to refer to a subset of metadata fields using wildcard patterns, +e.g. `metadata.field*`, even for query types that support field name patterns. +Lastly, all the metadata fields can be searched together when simply mentioning the +`metadata` field (i.e. not followed by any dot and sub-field name). + +`applications`:: +The list of <> that the role grants. + +`application`::: +(keyword) The name of the application associated to the privileges and resources. + +`privileges`::: +(keyword) The names of the privileges that the role grants. + +`resources`::: +(keyword) The resources to which the privileges apply. + +==== + +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=from] ++ +By default, you cannot page through more than 10,000 hits using the `from` and +`size` parameters. To page through more hits, use the +<> parameter. + +`size`:: +(Optional, integer) The number of hits to return. Must not be negative and defaults to `10`. ++ +By default, you cannot page through more than 10,000 hits using the `from` and +`size` parameters. To page through more hits, use the +<> parameter. + +`sort`:: +(Optional, object) <>. You can sort on `username`, `roles` or `enabled`. +In addition, sort can also be applied to the `_doc` field to sort by index order. + +`search_after`:: +(Optional, array) <> definition. + + +[[security-api-query-role-response-body]] +==== {api-response-body-title} + +This API returns the following top level fields: + +`total`:: +The total number of roles found. + +`count`:: +The number of roles returned in the response. + +`roles`:: +A list of roles that match the query. +The returned role format is an extension of the <> format. +It adds the `transient_metadata.enabled` and the `_sort` fields. +`transient_metadata.enabled` is set to `false` in case the role is automatically disabled, +for example when the role grants privileges that are not allowed by the installed license. +`_sort` is present when the search query sorts on some field. +It contains the array of values that have been used for sorting. + +[[security-api-query-role-example]] +==== {api-examples-title} + +The following request lists all roles, sorted by the role name: + +[source,console] +---- +POST /_security/_query/role +{ + "sort": ["name"] +} +---- +// TEST[setup:admin_role,user_role] + +A successful call returns a JSON structure that contains the information +retrieved for one or more roles: + +[source,console-result] +---- +{ + "total": 2, + "count": 2, + "roles": [ <1> + { + "name" : "my_admin_role", + "cluster" : [ + "all" + ], + "indices" : [ + { + "names" : [ + "index1", + "index2" + ], + "privileges" : [ + "all" + ], + "field_security" : { + "grant" : [ + "title", + "body" + ] + }, + "allow_restricted_indices" : false + } + ], + "applications" : [ ], + "run_as" : [ + "other_user" + ], + "metadata" : { + "version" : 1 + }, + "transient_metadata" : { + "enabled" : true + }, + "description" : "Grants full access to all management features within the cluster.", + "_sort" : [ + "my_admin_role" + ] + }, + { + "name" : "my_user_role", + "cluster" : [ ], + "indices" : [ + { + "names" : [ + "index1", + "index2" + ], + "privileges" : [ + "all" + ], + "field_security" : { + "grant" : [ + "title", + "body" + ] + }, + "allow_restricted_indices" : false + } + ], + "applications" : [ ], + "run_as" : [ ], + "metadata" : { + "version" : 1 + }, + "transient_metadata" : { + "enabled" : true + }, + "description" : "Grants user access to some indicies.", + "_sort" : [ + "my_user_role" + ] + } + ] +} +---- +// TEST[continued] + +<1> The list of roles that were retrieved for this request + +Similarly, the following request can be used to query only the user access role, +given its description: + +[source,console] +---- +POST /_security/_query/role +{ + "query": { + "match": { + "description": { + "query": "user access" + } + } + }, + "size": 1 <1> +} +---- +// TEST[continued] + +<1> Return only the best matching role + +[source,console-result] +---- +{ + "total": 2, + "count": 1, + "roles": [ + { + "name" : "my_user_role", + "cluster" : [ ], + "indices" : [ + { + "names" : [ + "index1", + "index2" + ], + "privileges" : [ + "all" + ], + "field_security" : { + "grant" : [ + "title", + "body" + ] + }, + "allow_restricted_indices" : false + } + ], + "applications" : [ ], + "run_as" : [ ], + "metadata" : { + "version" : 1 + }, + "transient_metadata" : { + "enabled" : true + }, + "description" : "Grants user access to some indicies." + } + ] +} +---- diff --git a/docs/reference/rest-api/security/query-user.asciidoc b/docs/reference/rest-api/security/query-user.asciidoc index 952e0f40f2a3a..23852f0f2eed7 100644 --- a/docs/reference/rest-api/security/query-user.asciidoc +++ b/docs/reference/rest-api/security/query-user.asciidoc @@ -66,13 +66,6 @@ The email of the user. Specifies whether the user is enabled. ==== -[[security-api-query-user-query-params]] -==== {api-query-parms-title} - -`with_profile_uid`:: -(Optional, boolean) Determines whether to retrieve the <> `uid`, -if exists, for the users. Defaults to `false`. - include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=from] + By default, you cannot page through more than 10,000 hits using the `from` and @@ -93,6 +86,12 @@ In addition, sort can also be applied to the `_doc` field to sort by index order `search_after`:: (Optional, array) <> definition. +[[security-api-query-user-query-params]] +==== {api-query-parms-title} + +`with_profile_uid`:: +(Optional, boolean) Determines whether to retrieve the <> `uid`, +if exists, for the users. Defaults to `false`. [[security-api-query-user-response-body]] ==== {api-response-body-title} @@ -191,7 +190,7 @@ Use the user information retrieve the user with a query: [source,console] ---- -GET /_security/_query/user +POST /_security/_query/user { "query": { "prefix": { @@ -231,7 +230,7 @@ To retrieve the user `profile_uid` as part of the response: [source,console] -------------------------------------------------- -GET /_security/_query/user?with_profile_uid=true +POST /_security/_query/user?with_profile_uid=true { "query": { "prefix": { @@ -272,7 +271,7 @@ Use a `bool` query to issue complex logical conditions and use [source,js] ---- -GET /_security/_query/user +POST /_security/_query/user { "query": { "bool": { diff --git a/docs/reference/security/authorization/managing-roles.asciidoc b/docs/reference/security/authorization/managing-roles.asciidoc index 253aa33822234..535d70cbc5e9c 100644 --- a/docs/reference/security/authorization/managing-roles.asciidoc +++ b/docs/reference/security/authorization/managing-roles.asciidoc @@ -13,7 +13,9 @@ A role is defined by the following JSON structure: "indices": [ ... ], <4> "applications": [ ... ], <5> "remote_indices": [ ... ], <6> - "remote_cluster": [ ... ] <7> + "remote_cluster": [ ... ], <7> + "metadata": { ... }, <8> + "description": "..." <9> } ----- // NOTCONSOLE @@ -40,6 +42,16 @@ A role is defined by the following JSON structure: <>. This field is optional (missing `remote_cluster` privileges effectively means no additional cluster permissions for any API key based remote clusters). +<8> Metadata field associated with the role, such as `metadata.app_tag`. + Metadata is internally indexed as a <> field type. + This means that all sub-fields act like `keyword` fields when querying and sorting. + Metadata values can be simple values, but also lists and maps. + This field is optional. +<9> A string value with the description text of the role. + The maximum length of it is `1000` chars. + The field is internally indexed as a <> field type + (with default values for all parameters). + This field is optional. [[valid-role-name]] NOTE: Role names must be at least 1 and no more than 507 characters. They can From 5d791d4e278977bdd9113c58c25aaea54318d869 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 5 Jul 2024 12:06:46 +0200 Subject: [PATCH 195/216] Slightly adjust wording around potential savings mentioned in the description of the index.codec setting (#110468) --- docs/reference/index-modules.asciidoc | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/reference/index-modules.asciidoc b/docs/reference/index-modules.asciidoc index 04bebfae2763b..24149afe802a2 100644 --- a/docs/reference/index-modules.asciidoc +++ b/docs/reference/index-modules.asciidoc @@ -81,8 +81,9 @@ breaking change]. If you are updating the compression type, the new one will be applied after segments are merged. Segment merging can be forced using <>. Experiments with indexing log datasets - have shown that `best_compression` gives up to ~18% lower storage usage - compared to `default` while only minimally affecting indexing throughput (~2%). + have shown that `best_compression` gives up to ~18% lower storage usage in + the most ideal scenario compared to `default` while only minimally affecting + indexing throughput (~2%). [[index-mode-setting]] `index.mode`:: + From d7d86b4da59688457760ef01ead4c11c1275754d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20Fred=C3=A9n?= <109296772+jfreden@users.noreply.github.com> Date: Fri, 5 Jul 2024 14:03:53 +0200 Subject: [PATCH 196/216] Add audit logging for bulk role APIs (#110410) * Add audit logging for bulk put role --- .../role/BulkPutRoleRequestBuilder.java | 2 +- .../action/role/BulkPutRolesRequest.java | 4 +- .../audit/logfile/LoggingAuditTrail.java | 47 +++++++--- .../audit/logfile/LoggingAuditTrailTests.java | 88 ++++++++++++++----- 4 files changed, 106 insertions(+), 35 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRoleRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRoleRequestBuilder.java index ba199e183d4af..cda45a67e81c6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRoleRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/BulkPutRoleRequestBuilder.java @@ -44,7 +44,7 @@ public class BulkPutRoleRequestBuilder extends ActionRequestBuilder roles; - public BulkPutRolesRequest() {} + public BulkPutRolesRequest(List roles) { + this.roles = roles; + } public void setRoles(List roles) { this.roles = roles; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java index 01104806c4a1c..bc5cc4a5e6b3f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java @@ -44,6 +44,7 @@ import org.elasticsearch.xcontent.json.JsonStringEncoder; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.security.SecurityContext; +import org.elasticsearch.xpack.core.security.action.ActionTypes; import org.elasticsearch.xpack.core.security.action.Grant; import org.elasticsearch.xpack.core.security.action.apikey.AbstractCreateApiKeyRequest; import org.elasticsearch.xpack.core.security.action.apikey.BaseSingleUpdateApiKeyRequest; @@ -72,6 +73,8 @@ import org.elasticsearch.xpack.core.security.action.profile.SetProfileEnabledRequest; import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataAction; import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataRequest; +import org.elasticsearch.xpack.core.security.action.role.BulkDeleteRolesRequest; +import org.elasticsearch.xpack.core.security.action.role.BulkPutRolesRequest; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleAction; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleRequest; import org.elasticsearch.xpack.core.security.action.role.PutRoleAction; @@ -291,6 +294,8 @@ public class LoggingAuditTrail implements AuditTrail, ClusterStateListener { PutUserAction.NAME, PutRoleAction.NAME, PutRoleMappingAction.NAME, + ActionTypes.BULK_PUT_ROLES.name(), + ActionTypes.BULK_DELETE_ROLES.name(), TransportSetEnabledAction.TYPE.name(), TransportChangePasswordAction.TYPE.name(), CreateApiKeyAction.NAME, @@ -731,6 +736,11 @@ public void accessGranted( } else if (msg instanceof PutRoleRequest) { assert PutRoleAction.NAME.equals(action); securityChangeLogEntryBuilder(requestId).withRequestBody((PutRoleRequest) msg).build(); + } else if (msg instanceof BulkPutRolesRequest bulkPutRolesRequest) { + assert ActionTypes.BULK_PUT_ROLES.name().equals(action); + for (RoleDescriptor roleDescriptor : bulkPutRolesRequest.getRoles()) { + securityChangeLogEntryBuilder(requestId).withRequestBody(roleDescriptor.getName(), roleDescriptor).build(); + } } else if (msg instanceof PutRoleMappingRequest) { assert PutRoleMappingAction.NAME.equals(action); securityChangeLogEntryBuilder(requestId).withRequestBody((PutRoleMappingRequest) msg).build(); @@ -755,6 +765,11 @@ public void accessGranted( } else if (msg instanceof DeleteRoleRequest) { assert DeleteRoleAction.NAME.equals(action); securityChangeLogEntryBuilder(requestId).withRequestBody((DeleteRoleRequest) msg).build(); + } else if (msg instanceof BulkDeleteRolesRequest bulkDeleteRolesRequest) { + assert ActionTypes.BULK_DELETE_ROLES.name().equals(action); + for (String roleName : bulkDeleteRolesRequest.getRoleNames()) { + securityChangeLogEntryBuilder(requestId).withDeleteRole(roleName).build(); + } } else if (msg instanceof DeleteRoleMappingRequest) { assert DeleteRoleMappingAction.NAME.equals(action); securityChangeLogEntryBuilder(requestId).withRequestBody((DeleteRoleMappingRequest) msg).build(); @@ -1160,15 +1175,19 @@ LogEntryBuilder withRequestBody(ChangePasswordRequest changePasswordRequest) thr } LogEntryBuilder withRequestBody(PutRoleRequest putRoleRequest) throws IOException { + return withRequestBody(putRoleRequest.name(), putRoleRequest.roleDescriptor()); + } + + LogEntryBuilder withRequestBody(String roleName, RoleDescriptor roleDescriptor) throws IOException { logEntry.with(EVENT_ACTION_FIELD_NAME, "put_role"); XContentBuilder builder = JsonXContent.contentBuilder().humanReadable(true); builder.startObject() .startObject("role") - .field("name", putRoleRequest.name()) + .field("name", roleName) // the "role_descriptor" nested structure, where the "name" is left out, is closer to the event structure // for creating API Keys .field("role_descriptor"); - withRoleDescriptor(builder, putRoleRequest.roleDescriptor()); + withRoleDescriptor(builder, roleDescriptor); builder.endObject() // role .endObject(); logEntry.with(PUT_CONFIG_FIELD_NAME, Strings.toString(builder)); @@ -1350,7 +1369,7 @@ private static void withRoleDescriptor(XContentBuilder builder, RoleDescriptor r withIndicesPrivileges(builder, indicesPrivileges); } builder.endArray(); - // the toXContent method of the {@code RoleDescriptor.ApplicationResourcePrivileges) does a good job + // the toXContent method of the {@code RoleDescriptor.ApplicationResourcePrivileges} does a good job builder.xContentList(RoleDescriptor.Fields.APPLICATIONS.getPreferredName(), roleDescriptor.getApplicationPrivileges()); builder.array(RoleDescriptor.Fields.RUN_AS.getPreferredName(), roleDescriptor.getRunAs()); if (roleDescriptor.getMetadata() != null && false == roleDescriptor.getMetadata().isEmpty()) { @@ -1401,15 +1420,7 @@ LogEntryBuilder withRequestBody(DeleteUserRequest deleteUserRequest) throws IOEx } LogEntryBuilder withRequestBody(DeleteRoleRequest deleteRoleRequest) throws IOException { - logEntry.with(EVENT_ACTION_FIELD_NAME, "delete_role"); - XContentBuilder builder = JsonXContent.contentBuilder().humanReadable(true); - builder.startObject() - .startObject("role") - .field("name", deleteRoleRequest.name()) - .endObject() // role - .endObject(); - logEntry.with(DELETE_CONFIG_FIELD_NAME, Strings.toString(builder)); - return this; + return withDeleteRole(deleteRoleRequest.name()); } LogEntryBuilder withRequestBody(DeleteRoleMappingRequest deleteRoleMappingRequest) throws IOException { @@ -1532,6 +1543,18 @@ LogEntryBuilder withRequestBody(SetProfileEnabledRequest setProfileEnabledReques return this; } + LogEntryBuilder withDeleteRole(String roleName) throws IOException { + logEntry.with(EVENT_ACTION_FIELD_NAME, "delete_role"); + XContentBuilder builder = JsonXContent.contentBuilder().humanReadable(true); + builder.startObject() + .startObject("role") + .field("name", roleName) + .endObject() // role + .endObject(); + logEntry.with(DELETE_CONFIG_FIELD_NAME, Strings.toString(builder)); + return this; + } + static void withGrant(XContentBuilder builder, Grant grant) throws IOException { builder.startObject("grant").field("type", grant.getType()); if (grant.getUsername() != null) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java index a3292a6ab5f4e..17bad90415e7c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java @@ -47,6 +47,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.security.action.ActionTypes; import org.elasticsearch.xpack.core.security.action.apikey.ApiKeyTests; import org.elasticsearch.xpack.core.security.action.apikey.BulkUpdateApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.BulkUpdateApiKeyRequest; @@ -73,6 +74,8 @@ import org.elasticsearch.xpack.core.security.action.profile.SetProfileEnabledRequest; import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataAction; import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataRequest; +import org.elasticsearch.xpack.core.security.action.role.BulkDeleteRolesRequest; +import org.elasticsearch.xpack.core.security.action.role.BulkPutRolesRequest; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleAction; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleRequest; import org.elasticsearch.xpack.core.security.action.role.PutRoleAction; @@ -772,20 +775,19 @@ public void testSecurityConfigChangeEventFormattingForRoles() throws IOException auditTrail.accessGranted(requestId, authentication, PutRoleAction.NAME, putRoleRequest, authorizationInfo); output = CapturingLogger.output(logger.getName(), Level.INFO); assertThat(output.size(), is(2)); - String generatedPutRoleAuditEventString = output.get(1); - String expectedPutRoleAuditEventString = Strings.format(""" - "put":{"role":{"name":"%s","role_descriptor":%s}}\ - """, putRoleRequest.name(), auditedRolesMap.get(putRoleRequest.name())); - assertThat(generatedPutRoleAuditEventString, containsString(expectedPutRoleAuditEventString)); - generatedPutRoleAuditEventString = generatedPutRoleAuditEventString.replace(", " + expectedPutRoleAuditEventString, ""); - checkedFields = new HashMap<>(commonFields); - checkedFields.remove(LoggingAuditTrail.ORIGIN_ADDRESS_FIELD_NAME); - checkedFields.remove(LoggingAuditTrail.ORIGIN_TYPE_FIELD_NAME); - checkedFields.put("type", "audit"); - checkedFields.put(LoggingAuditTrail.EVENT_TYPE_FIELD_NAME, "security_config_change"); - checkedFields.put(LoggingAuditTrail.EVENT_ACTION_FIELD_NAME, "put_role"); - checkedFields.put(LoggingAuditTrail.REQUEST_ID_FIELD_NAME, requestId); - assertMsg(generatedPutRoleAuditEventString, checkedFields); + assertPutRoleAuditLogLine(putRoleRequest.name(), output.get(1), auditedRolesMap, requestId); + // clear log + CapturingLogger.output(logger.getName(), Level.INFO).clear(); + + BulkPutRolesRequest bulkPutRolesRequest = new BulkPutRolesRequest(allTestRoleDescriptors); + bulkPutRolesRequest.setRefreshPolicy(randomFrom(WriteRequest.RefreshPolicy.values())); + auditTrail.accessGranted(requestId, authentication, ActionTypes.BULK_PUT_ROLES.name(), bulkPutRolesRequest, authorizationInfo); + output = CapturingLogger.output(logger.getName(), Level.INFO); + assertThat(output.size(), is(allTestRoleDescriptors.size() + 1)); + + for (int i = 0; i < allTestRoleDescriptors.size(); i++) { + assertPutRoleAuditLogLine(allTestRoleDescriptors.get(i).getName(), output.get(i + 1), auditedRolesMap, requestId); + } // clear log CapturingLogger.output(logger.getName(), Level.INFO).clear(); @@ -795,25 +797,64 @@ public void testSecurityConfigChangeEventFormattingForRoles() throws IOException auditTrail.accessGranted(requestId, authentication, DeleteRoleAction.NAME, deleteRoleRequest, authorizationInfo); output = CapturingLogger.output(logger.getName(), Level.INFO); assertThat(output.size(), is(2)); - String generatedDeleteRoleAuditEventString = output.get(1); + assertDeleteRoleAuditLogLine(putRoleRequest.name(), output.get(1), requestId); + // clear log + CapturingLogger.output(logger.getName(), Level.INFO).clear(); + + BulkDeleteRolesRequest bulkDeleteRolesRequest = new BulkDeleteRolesRequest( + allTestRoleDescriptors.stream().map(RoleDescriptor::getName).toList() + ); + bulkDeleteRolesRequest.setRefreshPolicy(randomFrom(WriteRequest.RefreshPolicy.values())); + auditTrail.accessGranted( + requestId, + authentication, + ActionTypes.BULK_DELETE_ROLES.name(), + bulkDeleteRolesRequest, + authorizationInfo + ); + output = CapturingLogger.output(logger.getName(), Level.INFO); + assertThat(output.size(), is(allTestRoleDescriptors.size() + 1)); + for (int i = 0; i < allTestRoleDescriptors.size(); i++) { + assertDeleteRoleAuditLogLine(allTestRoleDescriptors.get(i).getName(), output.get(i + 1), requestId); + } + } + + private void assertPutRoleAuditLogLine(String roleName, String logLine, Map expectedLogByRoleName, String requestId) { + String expectedPutRoleAuditEventString = Strings.format(""" + "put":{"role":{"name":"%s","role_descriptor":%s}}\ + """, roleName, expectedLogByRoleName.get(roleName)); + + assertThat(logLine, containsString(expectedPutRoleAuditEventString)); + String reducedLogLine = logLine.replace(", " + expectedPutRoleAuditEventString, ""); + Map checkedFields = new HashMap<>(commonFields); + checkedFields.remove(LoggingAuditTrail.ORIGIN_ADDRESS_FIELD_NAME); + checkedFields.remove(LoggingAuditTrail.ORIGIN_TYPE_FIELD_NAME); + checkedFields.put("type", "audit"); + checkedFields.put(LoggingAuditTrail.EVENT_TYPE_FIELD_NAME, "security_config_change"); + checkedFields.put(LoggingAuditTrail.EVENT_ACTION_FIELD_NAME, "put_role"); + checkedFields.put(LoggingAuditTrail.REQUEST_ID_FIELD_NAME, requestId); + assertMsg(reducedLogLine, checkedFields); + } + + private void assertDeleteRoleAuditLogLine(String roleName, String logLine, String requestId) { StringBuilder deleteRoleStringBuilder = new StringBuilder().append("\"delete\":{\"role\":{\"name\":"); - if (deleteRoleRequest.name() == null) { + if (roleName == null) { deleteRoleStringBuilder.append("null"); } else { - deleteRoleStringBuilder.append("\"").append(deleteRoleRequest.name()).append("\""); + deleteRoleStringBuilder.append("\"").append(roleName).append("\""); } deleteRoleStringBuilder.append("}}"); String expectedDeleteRoleAuditEventString = deleteRoleStringBuilder.toString(); - assertThat(generatedDeleteRoleAuditEventString, containsString(expectedDeleteRoleAuditEventString)); - generatedDeleteRoleAuditEventString = generatedDeleteRoleAuditEventString.replace(", " + expectedDeleteRoleAuditEventString, ""); - checkedFields = new HashMap<>(commonFields); + assertThat(logLine, containsString(expectedDeleteRoleAuditEventString)); + String reducedLogLine = logLine.replace(", " + expectedDeleteRoleAuditEventString, ""); + Map checkedFields = new HashMap<>(commonFields); checkedFields.remove(LoggingAuditTrail.ORIGIN_ADDRESS_FIELD_NAME); checkedFields.remove(LoggingAuditTrail.ORIGIN_TYPE_FIELD_NAME); checkedFields.put("type", "audit"); checkedFields.put(LoggingAuditTrail.EVENT_TYPE_FIELD_NAME, "security_config_change"); checkedFields.put(LoggingAuditTrail.EVENT_ACTION_FIELD_NAME, "delete_role"); checkedFields.put(LoggingAuditTrail.REQUEST_ID_FIELD_NAME, requestId); - assertMsg(generatedDeleteRoleAuditEventString, checkedFields); + assertMsg(reducedLogLine, checkedFields); } public void testSecurityConfigChangeEventForCrossClusterApiKeys() throws IOException { @@ -1975,6 +2016,11 @@ public void testSecurityConfigChangedEventSelection() { Tuple actionAndRequest = randomFrom( new Tuple<>(PutUserAction.NAME, new PutUserRequest()), new Tuple<>(PutRoleAction.NAME, new PutRoleRequest()), + new Tuple<>( + ActionTypes.BULK_PUT_ROLES.name(), + new BulkPutRolesRequest(List.of(new RoleDescriptor(randomAlphaOfLength(20), null, null, null))) + ), + new Tuple<>(ActionTypes.BULK_DELETE_ROLES.name(), new BulkDeleteRolesRequest(List.of(randomAlphaOfLength(20)))), new Tuple<>(PutRoleMappingAction.NAME, new PutRoleMappingRequest()), new Tuple<>(TransportSetEnabledAction.TYPE.name(), new SetEnabledRequest()), new Tuple<>(TransportChangePasswordAction.TYPE.name(), new ChangePasswordRequest()), From 5d53c9a363885e5db2bcd18bd73dfaa94c70682e Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Fri, 5 Jul 2024 15:33:12 +0200 Subject: [PATCH 197/216] Add protection for OOM during aggregations partial reduction (#110520) This commit adds a check the parent circuit breaker every 1024 call to the buckets consumer during aggregations partial reduction. --- .../aggregations/TermsReduceBenchmark.java | 2 +- docs/changelog/110520.yaml | 5 +++ .../elasticsearch/search/SearchService.java | 10 ++++-- .../AggregationReduceContext.java | 21 ++++++++--- .../MultiBucketConsumerService.java | 29 ++++++++++++++- .../search/QueryPhaseResultConsumerTests.java | 8 ++++- .../search/SearchPhaseControllerTests.java | 2 +- .../search/SearchServiceTests.java | 36 +++++++++++++++++++ .../aggregations/AggregatorTestCase.java | 6 ++-- .../test/InternalAggregationTestCase.java | 7 ++-- .../action/TransportRollupSearchAction.java | 3 +- 11 files changed, 113 insertions(+), 16 deletions(-) create mode 100644 docs/changelog/110520.yaml diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/search/aggregations/TermsReduceBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/search/aggregations/TermsReduceBenchmark.java index 230e0c7e546c2..691874c775302 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/search/aggregations/TermsReduceBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/search/aggregations/TermsReduceBenchmark.java @@ -71,7 +71,7 @@ public class TermsReduceBenchmark { private final SearchPhaseController controller = new SearchPhaseController((task, req) -> new AggregationReduceContext.Builder() { @Override public AggregationReduceContext forPartialReduction() { - return new AggregationReduceContext.ForPartial(null, null, task, builder); + return new AggregationReduceContext.ForPartial(null, null, task, builder, b -> {}); } @Override diff --git a/docs/changelog/110520.yaml b/docs/changelog/110520.yaml new file mode 100644 index 0000000000000..fba4b84e2279e --- /dev/null +++ b/docs/changelog/110520.yaml @@ -0,0 +1,5 @@ +pr: 110520 +summary: Add protection for OOM during aggregations partial reduction +area: Aggregations +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 0c9d5ee51a9f0..979a59b4d0b94 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -1840,7 +1840,13 @@ public AggregationReduceContext.Builder aggReduceContextBuilder(Supplier isCanceled, - AggregatorFactories.Builder builders + AggregatorFactories.Builder builders, + IntConsumer multiBucketConsumer ) { super(bigArrays, scriptService, isCanceled, builders); + this.multiBucketConsumer = multiBucketConsumer; } - public ForPartial(BigArrays bigArrays, ScriptService scriptService, Supplier isCanceled, AggregationBuilder builder) { + public ForPartial( + BigArrays bigArrays, + ScriptService scriptService, + Supplier isCanceled, + AggregationBuilder builder, + IntConsumer multiBucketConsumer + ) { super(bigArrays, scriptService, isCanceled, builder); + this.multiBucketConsumer = multiBucketConsumer; } @Override @@ -158,7 +169,9 @@ public boolean isFinalReduce() { } @Override - protected void consumeBucketCountAndMaybeBreak(int size) {} + protected void consumeBucketCountAndMaybeBreak(int size) { + multiBucketConsumer.accept(size); + } @Override public PipelineTree pipelineTreeRoot() { @@ -167,7 +180,7 @@ public PipelineTree pipelineTreeRoot() { @Override protected AggregationReduceContext forSubAgg(AggregationBuilder sub) { - return new ForPartial(bigArrays(), scriptService(), isCanceled(), sub); + return new ForPartial(bigArrays(), scriptService(), isCanceled(), sub, multiBucketConsumer); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketConsumerService.java b/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketConsumerService.java index c876f971a7c65..a6f634ec371b1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketConsumerService.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketConsumerService.java @@ -134,10 +134,37 @@ public int getCount() { } } - public MultiBucketConsumer create() { + /** + * Similar to {@link MultiBucketConsumer} but it only checks the parent circuit breaker every 1024 calls. + * It provides protection for OOM during partial reductions. + */ + private static class MultiBucketConsumerPartialReduction implements IntConsumer { + private final CircuitBreaker breaker; + + // aggregations execute in a single thread so no atomic here + private int callCount = 0; + + private MultiBucketConsumerPartialReduction(CircuitBreaker breaker) { + this.breaker = breaker; + } + + @Override + public void accept(int value) { + // check parent circuit breaker every 1024 calls + if ((++callCount & 0x3FF) == 0) { + breaker.addEstimateBytesAndMaybeBreak(0, "allocated_buckets"); + } + } + } + + public IntConsumer createForFinal() { return new MultiBucketConsumer(maxBucket, breaker); } + public IntConsumer createForPartial() { + return new MultiBucketConsumerPartialReduction(breaker); + } + public int getLimit() { return maxBucket; } diff --git a/server/src/test/java/org/elasticsearch/action/search/QueryPhaseResultConsumerTests.java b/server/src/test/java/org/elasticsearch/action/search/QueryPhaseResultConsumerTests.java index db32213ff97b7..ab7d9f180eae4 100644 --- a/server/src/test/java/org/elasticsearch/action/search/QueryPhaseResultConsumerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/QueryPhaseResultConsumerTests.java @@ -53,7 +53,13 @@ public void setup() { searchPhaseController = new SearchPhaseController((t, s) -> new AggregationReduceContext.Builder() { @Override public AggregationReduceContext forPartialReduction() { - return new AggregationReduceContext.ForPartial(BigArrays.NON_RECYCLING_INSTANCE, null, t, mock(AggregationBuilder.class)); + return new AggregationReduceContext.ForPartial( + BigArrays.NON_RECYCLING_INSTANCE, + null, + t, + mock(AggregationBuilder.class), + b -> {} + ); } public AggregationReduceContext forFinalReduction() { diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index 43bca4bae2f3f..118a7055cd782 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -122,7 +122,7 @@ public void setup() { @Override public AggregationReduceContext forPartialReduction() { reductions.add(false); - return new AggregationReduceContext.ForPartial(BigArrays.NON_RECYCLING_INSTANCE, null, t, agg); + return new AggregationReduceContext.ForPartial(BigArrays.NON_RECYCLING_INSTANCE, null, t, agg, b -> {}); } public AggregationReduceContext forFinalReduction() { diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index 4609c7327c798..7ddcc88facb2a 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -47,7 +47,10 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; @@ -151,6 +154,7 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; import java.util.function.Function; +import java.util.function.IntConsumer; import java.util.function.Supplier; import static java.util.Collections.emptyList; @@ -1985,6 +1989,38 @@ public void testCreateReduceContext() { } } + public void testMultiBucketConsumerServiceCB() { + MultiBucketConsumerService service = new MultiBucketConsumerService( + getInstanceFromNode(ClusterService.class), + Settings.EMPTY, + new NoopCircuitBreaker("test") { + + @Override + public void addEstimateBytesAndMaybeBreak(long bytes, String label) throws CircuitBreakingException { + throw new CircuitBreakingException("tripped", getDurability()); + } + } + ); + // for partial + { + IntConsumer consumer = service.createForPartial(); + for (int i = 0; i < 1023; i++) { + consumer.accept(0); + } + CircuitBreakingException ex = expectThrows(CircuitBreakingException.class, () -> consumer.accept(0)); + assertThat(ex.getMessage(), equalTo("tripped")); + } + // for final + { + IntConsumer consumer = service.createForFinal(); + for (int i = 0; i < 1023; i++) { + consumer.accept(0); + } + CircuitBreakingException ex = expectThrows(CircuitBreakingException.class, () -> consumer.accept(0)); + assertThat(ex.getMessage(), equalTo("tripped")); + } + } + public void testCreateSearchContext() throws IOException { String index = randomAlphaOfLengthBetween(5, 10).toLowerCase(Locale.ROOT); IndexService indexService = createIndex(index); diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index d39a8df80c26d..b19174b8e5c8c 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -644,7 +644,8 @@ private A searchAndReduce( bigArraysForReduction, getMockScriptService(), () -> false, - builder + builder, + b -> {} ); AggregatorCollectorManager aggregatorCollectorManager = new AggregatorCollectorManager( aggregatorSupplier, @@ -669,7 +670,8 @@ private A searchAndReduce( bigArraysForReduction, getMockScriptService(), () -> false, - builder + builder, + b -> {} ); internalAggs = new ArrayList<>(internalAggs.subList(r, toReduceSize)); internalAggs.add(InternalAggregations.topLevelReduce(toReduce, reduceContext)); diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java index 12c5085cbcd73..4aed7ff4565cb 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java @@ -77,7 +77,7 @@ public static AggregationReduceContext.Builder emptyReduceContextBuilder(Aggrega return new AggregationReduceContext.Builder() { @Override public AggregationReduceContext forPartialReduction() { - return new AggregationReduceContext.ForPartial(BigArrays.NON_RECYCLING_INSTANCE, null, () -> false, aggs); + return new AggregationReduceContext.ForPartial(BigArrays.NON_RECYCLING_INSTANCE, null, () -> false, aggs, b -> {}); } @Override @@ -95,7 +95,7 @@ public static AggregationReduceContext.Builder mockReduceContext(AggregationBuil return new AggregationReduceContext.Builder() { @Override public AggregationReduceContext forPartialReduction() { - return new AggregationReduceContext.ForPartial(BigArrays.NON_RECYCLING_INSTANCE, null, () -> false, agg); + return new AggregationReduceContext.ForPartial(BigArrays.NON_RECYCLING_INSTANCE, null, () -> false, agg, b -> {}); } @Override @@ -244,7 +244,8 @@ public void testReduceRandom() throws IOException { bigArrays, mockScriptService, () -> false, - inputs.builder() + inputs.builder(), + b -> {} ); @SuppressWarnings("unchecked") T reduced = (T) reduce(toPartialReduce, context); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java index 6bd29ddb52301..4108b0f6d3c83 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java @@ -128,7 +128,8 @@ public AggregationReduceContext forPartialReduction() { bigArrays, scriptService, ((CancellableTask) task)::isCancelled, - request.source().aggregations() + request.source().aggregations(), + b -> {} ); } From 1aea04943c6c1fb4db97e079c5d08d8569658fe1 Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Fri, 5 Jul 2024 09:54:51 -0400 Subject: [PATCH 198/216] [ML] Avoid ModelAssignment deadlock (#109684) The model loading scheduled thread iterates through the model queue and deploys each model. Rather than block and wait on each deployment, the thread will attach a listener that will either iterate to the next model (if one is in the queue) or reschedule the thread. This change should not impact: 1. the iterative nature of the model deployment process - each model is still deployed one at a time, and no additional threads are consumed per model. 2. the 1s delay between model deployment tries - if a deployment fails but can be retried, the retry is added to the next batch of models that are consumed after the 1s scheduled delay. Co-authored-by: Elastic Machine Co-authored-by: David Kyle --- docs/changelog/109684.yaml | 5 + .../TrainedModelAssignmentNodeService.java | 121 ++++++++++-------- ...rainedModelAssignmentNodeServiceTests.java | 113 ++++++++++------ 3 files changed, 150 insertions(+), 89 deletions(-) create mode 100644 docs/changelog/109684.yaml diff --git a/docs/changelog/109684.yaml b/docs/changelog/109684.yaml new file mode 100644 index 0000000000000..156f568290cf5 --- /dev/null +++ b/docs/changelog/109684.yaml @@ -0,0 +1,5 @@ +pr: 109684 +summary: Avoid `ModelAssignment` deadlock +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeService.java index 7052e6f147b36..1ac177be3d594 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeService.java @@ -12,8 +12,7 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchPhaseExecutionException; -import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.action.support.UnsafePlainActionFuture; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; @@ -53,7 +52,6 @@ import org.elasticsearch.xpack.ml.inference.deployment.TrainedModelDeploymentTask; import org.elasticsearch.xpack.ml.task.AbstractJobPersistentTasksExecutor; -import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collections; import java.util.Deque; @@ -154,16 +152,29 @@ public void beforeStop() { this.expressionResolver = expressionResolver; } - public void start() { + void start() { stopped = false; - scheduledFuture = threadPool.scheduleWithFixedDelay( - this::loadQueuedModels, - MODEL_LOADING_CHECK_INTERVAL, - threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME) - ); + schedule(false); } - public void stop() { + private void schedule(boolean runImmediately) { + if (stopped) { + // do not schedule when stopped + return; + } + + var rescheduleListener = ActionListener.wrap(this::schedule, e -> this.schedule(false)); + Runnable loadQueuedModels = () -> loadQueuedModels(rescheduleListener); + var executor = threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME); + + if (runImmediately) { + executor.execute(loadQueuedModels); + } else { + scheduledFuture = threadPool.schedule(loadQueuedModels, MODEL_LOADING_CHECK_INTERVAL, executor); + } + } + + void stop() { stopped = true; ThreadPool.Cancellable cancellable = this.scheduledFuture; if (cancellable != null) { @@ -171,9 +182,8 @@ public void stop() { } } - void loadQueuedModels() { - TrainedModelDeploymentTask loadingTask; - if (loadingModels.isEmpty()) { + void loadQueuedModels(ActionListener rescheduleImmediately) { + if (stopped) { return; } if (latestState != null) { @@ -188,39 +198,49 @@ void loadQueuedModels() { ); if (unassignedIndices.size() > 0) { logger.trace("not loading models as indices {} primary shards are unassigned", unassignedIndices); + rescheduleImmediately.onResponse(false); return; } } - logger.trace("attempting to load all currently queued models"); - // NOTE: As soon as this method exits, the timer for the scheduler starts ticking - Deque loadingToRetry = new ArrayDeque<>(); - while ((loadingTask = loadingModels.poll()) != null) { - final String deploymentId = loadingTask.getDeploymentId(); - if (loadingTask.isStopped()) { - if (logger.isTraceEnabled()) { - String reason = loadingTask.stoppedReason().orElse("_unknown_"); - logger.trace("[{}] attempted to load stopped task with reason [{}]", deploymentId, reason); - } - continue; + + var loadingTask = loadingModels.poll(); + if (loadingTask == null) { + rescheduleImmediately.onResponse(false); + return; + } + + loadModel(loadingTask, ActionListener.wrap(retry -> { + if (retry != null && retry) { + loadingModels.offer(loadingTask); + // don't reschedule immediately if the next task is the one we just queued, instead wait a bit to retry + rescheduleImmediately.onResponse(loadingModels.peek() != loadingTask); + } else { + rescheduleImmediately.onResponse(loadingModels.isEmpty() == false); } - if (stopped) { - return; + }, e -> rescheduleImmediately.onResponse(loadingModels.isEmpty() == false))); + } + + void loadModel(TrainedModelDeploymentTask loadingTask, ActionListener retryListener) { + if (loadingTask.isStopped()) { + if (logger.isTraceEnabled()) { + logger.trace( + "[{}] attempted to load stopped task with reason [{}]", + loadingTask.getDeploymentId(), + loadingTask.stoppedReason().orElse("_unknown_") + ); } - final PlainActionFuture listener = new UnsafePlainActionFuture<>( - MachineLearning.UTILITY_THREAD_POOL_NAME - ); - try { - deploymentManager.startDeployment(loadingTask, listener); - // This needs to be synchronous here in the utility thread to keep queueing order - TrainedModelDeploymentTask deployedTask = listener.actionGet(); - // kicks off asynchronous cluster state update - handleLoadSuccess(deployedTask); - } catch (Exception ex) { + retryListener.onResponse(false); + return; + } + SubscribableListener.newForked(l -> deploymentManager.startDeployment(loadingTask, l)) + .andThen(threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME), threadPool.getThreadContext(), this::handleLoadSuccess) + .addListener(retryListener.delegateResponse((retryL, ex) -> { + var deploymentId = loadingTask.getDeploymentId(); logger.warn(() -> "[" + deploymentId + "] Start deployment failed", ex); if (ExceptionsHelper.unwrapCause(ex) instanceof ResourceNotFoundException) { - String modelId = loadingTask.getParams().getModelId(); + var modelId = loadingTask.getParams().getModelId(); logger.debug(() -> "[" + deploymentId + "] Start deployment failed as model [" + modelId + "] was not found", ex); - handleLoadFailure(loadingTask, ExceptionsHelper.missingTrainedModel(modelId, ex)); + handleLoadFailure(loadingTask, ExceptionsHelper.missingTrainedModel(modelId, ex), retryL); } else if (ExceptionsHelper.unwrapCause(ex) instanceof SearchPhaseExecutionException) { /* * This case will not catch the ElasticsearchException generated from the ChunkedTrainedModelRestorer in a scenario @@ -232,13 +252,11 @@ void loadQueuedModels() { // A search phase execution failure should be retried, push task back to the queue // This will cause the entire model to be reloaded (all the chunks) - loadingToRetry.add(loadingTask); + retryL.onResponse(true); } else { - handleLoadFailure(loadingTask, ex); + handleLoadFailure(loadingTask, ex, retryL); } - } - } - loadingModels.addAll(loadingToRetry); + }), threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME), threadPool.getThreadContext()); } public void gracefullyStopDeploymentAndNotify( @@ -680,14 +698,14 @@ void prepareModelToLoad(StartTrainedModelDeploymentAction.TaskParams taskParams) ); // threadsafe check to verify we are not loading/loaded the model if (deploymentIdToTask.putIfAbsent(taskParams.getDeploymentId(), task) == null) { - loadingModels.add(task); + loadingModels.offer(task); } else { // If there is already a task for the deployment, unregister the new task taskManager.unregister(task); } } - private void handleLoadSuccess(TrainedModelDeploymentTask task) { + private void handleLoadSuccess(ActionListener retryListener, TrainedModelDeploymentTask task) { logger.debug( () -> "[" + task.getParams().getDeploymentId() @@ -704,13 +722,16 @@ private void handleLoadSuccess(TrainedModelDeploymentTask task) { task.stoppedReason().orElse("_unknown_") ) ); + retryListener.onResponse(false); return; } updateStoredState( task.getDeploymentId(), RoutingInfoUpdate.updateStateAndReason(new RoutingStateAndReason(RoutingState.STARTED, "")), - ActionListener.wrap(r -> logger.debug(() -> "[" + task.getDeploymentId() + "] model loaded and accepting routes"), e -> { + ActionListener.runAfter(ActionListener.wrap(r -> { + logger.debug(() -> "[" + task.getDeploymentId() + "] model loaded and accepting routes"); + }, e -> { // This means that either the assignment has been deleted, or this node's particular route has been removed if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { logger.debug( @@ -732,7 +753,7 @@ private void handleLoadSuccess(TrainedModelDeploymentTask task) { e ); } - }) + }), () -> retryListener.onResponse(false)) ); } @@ -752,7 +773,7 @@ private void updateStoredState(String deploymentId, RoutingInfoUpdate update, Ac ); } - private void handleLoadFailure(TrainedModelDeploymentTask task, Exception ex) { + private void handleLoadFailure(TrainedModelDeploymentTask task, Exception ex, ActionListener retryListener) { logger.error(() -> "[" + task.getDeploymentId() + "] model [" + task.getParams().getModelId() + "] failed to load", ex); if (task.isStopped()) { logger.debug( @@ -769,14 +790,14 @@ private void handleLoadFailure(TrainedModelDeploymentTask task, Exception ex) { Runnable stopTask = () -> stopDeploymentAsync( task, "model failed to load; reason [" + ex.getMessage() + "]", - ActionListener.noop() + ActionListener.running(() -> retryListener.onResponse(false)) ); updateStoredState( task.getDeploymentId(), RoutingInfoUpdate.updateStateAndReason( new RoutingStateAndReason(RoutingState.FAILED, ExceptionsHelper.unwrapCause(ex).getMessage()) ), - ActionListener.wrap(r -> stopTask.run(), e -> stopTask.run()) + ActionListener.running(stopTask) ); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeServiceTests.java index 2444134ce2920..f8f699b86966d 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeServiceTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; @@ -49,10 +50,13 @@ import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.BiConsumer; import static org.elasticsearch.xpack.ml.MachineLearning.UTILITY_THREAD_POOL_NAME; import static org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentClusterServiceTests.shutdownMetadata; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; @@ -91,19 +95,13 @@ public void setupObjects() { taskManager = new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet()); deploymentManager = mock(DeploymentManager.class); doAnswer(invocationOnMock -> { - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; - listener.onResponse(invocationOnMock.getArguments()[0]); + ActionListener listener = invocationOnMock.getArgument(1); + listener.onResponse(invocationOnMock.getArgument(0)); return null; }).when(deploymentManager).startDeployment(any(), any()); doAnswer(invocationOnMock -> { - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; - listener.onResponse(null); - return null; - }).when(deploymentManager).stopAfterCompletingPendingWork(any()); - - doAnswer(invocationOnMock -> { - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; + ActionListener listener = invocationOnMock.getArgument(1); listener.onResponse(AcknowledgedResponse.TRUE); return null; }).when(trainedModelAssignmentService).updateModelAssignmentState(any(), any()); @@ -114,15 +112,54 @@ public void shutdown() throws InterruptedException { terminate(threadPool); } - public void testLoadQueuedModels_GivenNoQueuedModels() { - TrainedModelAssignmentNodeService trainedModelAssignmentNodeService = createService(); - + public void testLoadQueuedModels_GivenNoQueuedModels() throws InterruptedException { // When there are no queued models - trainedModelAssignmentNodeService.loadQueuedModels(); + loadQueuedModels(createService()); verify(deploymentManager, never()).startDeployment(any(), any()); } - public void testLoadQueuedModels() { + private void loadQueuedModels(TrainedModelAssignmentNodeService trainedModelAssignmentNodeService) throws InterruptedException { + loadQueuedModels(trainedModelAssignmentNodeService, false); + } + + private void loadQueuedModels(TrainedModelAssignmentNodeService trainedModelAssignmentNodeService, boolean expectedRunImmediately) { + trainedModelAssignmentNodeService.loadQueuedModels(ActionListener.wrap(actualRunImmediately -> { + assertThat( + "We should rerun immediately if there are still model loading tasks to process.", + actualRunImmediately, + equalTo(expectedRunImmediately) + ); + }, e -> fail("We should never call the onFailure method of this listener."))); + } + + private void loadQueuedModels(TrainedModelAssignmentNodeService trainedModelAssignmentNodeService, int times) + throws InterruptedException { + var modelQueueSize = new AtomicInteger(times); + BiConsumer, Boolean> verifyRerunningImmediately = (listener, result) -> { + var runImmediately = modelQueueSize.decrementAndGet() > 0; + assertThat( + "We should rerun immediately if there are still model loading tasks to process. Models remaining: " + modelQueueSize.get(), + result, + is(runImmediately) + ); + listener.onResponse(null); + }; + + var chain = SubscribableListener.newForked( + l -> trainedModelAssignmentNodeService.loadQueuedModels(l.delegateFailure(verifyRerunningImmediately)) + ); + for (int i = 1; i < times; i++) { + chain = chain.andThen( + (l, r) -> trainedModelAssignmentNodeService.loadQueuedModels(l.delegateFailure(verifyRerunningImmediately)) + ); + } + + var latch = new CountDownLatch(1); + chain.addListener(ActionListener.running(latch::countDown)); + assertTrue("Timed out waiting for loadQueuedModels to finish.", latch.await(10, TimeUnit.SECONDS)); + } + + public void testLoadQueuedModels() throws InterruptedException { TrainedModelAssignmentNodeService trainedModelAssignmentNodeService = createService(); String modelToLoad = "loading-model"; @@ -136,7 +173,8 @@ public void testLoadQueuedModels() { trainedModelAssignmentNodeService.prepareModelToLoad(newParams(deploymentId, modelToLoad)); trainedModelAssignmentNodeService.prepareModelToLoad(newParams(anotherDeployment, anotherModel)); - trainedModelAssignmentNodeService.loadQueuedModels(); + loadQueuedModels(trainedModelAssignmentNodeService, true); + loadQueuedModels(trainedModelAssignmentNodeService, false); ArgumentCaptor taskCapture = ArgumentCaptor.forClass(TrainedModelDeploymentTask.class); ArgumentCaptor requestCapture = ArgumentCaptor.forClass( @@ -157,11 +195,11 @@ public void testLoadQueuedModels() { // Since models are loaded, there shouldn't be any more loadings to occur trainedModelAssignmentNodeService.prepareModelToLoad(newParams(anotherDeployment, anotherModel)); - trainedModelAssignmentNodeService.loadQueuedModels(); + loadQueuedModels(trainedModelAssignmentNodeService); verifyNoMoreInteractions(deploymentManager, trainedModelAssignmentService); } - public void testLoadQueuedModelsWhenFailureIsRetried() { + public void testLoadQueuedModelsWhenFailureIsRetried() throws InterruptedException { String modelToLoad = "loading-model"; String failedModelToLoad = "failed-search-loading-model"; String deploymentId = "foo"; @@ -174,9 +212,9 @@ public void testLoadQueuedModelsWhenFailureIsRetried() { trainedModelAssignmentNodeService.prepareModelToLoad(newParams(deploymentId, modelToLoad)); trainedModelAssignmentNodeService.prepareModelToLoad(newParams(failedDeploymentId, failedModelToLoad)); - trainedModelAssignmentNodeService.loadQueuedModels(); - - trainedModelAssignmentNodeService.loadQueuedModels(); + loadQueuedModels(trainedModelAssignmentNodeService, true); + loadQueuedModels(trainedModelAssignmentNodeService, false); + loadQueuedModels(trainedModelAssignmentNodeService, false); ArgumentCaptor startTaskCapture = ArgumentCaptor.forClass(TrainedModelDeploymentTask.class); ArgumentCaptor requestCapture = ArgumentCaptor.forClass( @@ -209,7 +247,9 @@ public void testLoadQueuedModelsWhenStopped() { trainedModelAssignmentNodeService.prepareModelToLoad(newParams(modelToLoad, modelToLoad)); trainedModelAssignmentNodeService.stop(); - trainedModelAssignmentNodeService.loadQueuedModels(); + trainedModelAssignmentNodeService.loadQueuedModels( + ActionListener.running(() -> fail("When stopped, then loadQueuedModels should never run.")) + ); verifyNoMoreInteractions(deploymentManager, trainedModelAssignmentService); } @@ -231,7 +271,8 @@ public void testLoadQueuedModelsWhenTaskIsStopped() throws Exception { trainedModelAssignmentNodeService.prepareModelToLoad(newParams(loadingDeploymentId, modelToLoad)); trainedModelAssignmentNodeService.prepareModelToLoad(newParams(stoppedLoadingDeploymentId, stoppedModelToLoad)); trainedModelAssignmentNodeService.getTask(stoppedLoadingDeploymentId).stop("testing", false, ActionListener.noop()); - trainedModelAssignmentNodeService.loadQueuedModels(); + loadQueuedModels(trainedModelAssignmentNodeService, true); + loadQueuedModels(trainedModelAssignmentNodeService, false); assertBusy(() -> { ArgumentCaptor stoppedTaskCapture = ArgumentCaptor.forClass(TrainedModelDeploymentTask.class); @@ -283,15 +324,8 @@ public void testLoadQueuedModelsWhenOneFails() throws InterruptedException { trainedModelAssignmentNodeService.prepareModelToLoad(newParams(loadingDeploymentId, modelToLoad)); trainedModelAssignmentNodeService.prepareModelToLoad(newParams(failedLoadingDeploymentId, failedModelToLoad)); - CountDownLatch latch = new CountDownLatch(1); - doAnswer(invocationOnMock -> { - latch.countDown(); - return null; - }).when(deploymentManager).stopDeployment(any()); - - trainedModelAssignmentNodeService.loadQueuedModels(); - - latch.await(5, TimeUnit.SECONDS); + loadQueuedModels(trainedModelAssignmentNodeService, true); + loadQueuedModels(trainedModelAssignmentNodeService, false); ArgumentCaptor startTaskCapture = ArgumentCaptor.forClass(TrainedModelDeploymentTask.class); ArgumentCaptor requestCapture = ArgumentCaptor.forClass( @@ -318,7 +352,7 @@ public void testLoadQueuedModelsWhenOneFails() throws InterruptedException { verifyNoMoreInteractions(deploymentManager, trainedModelAssignmentService); } - public void testClusterChangedWithResetMode() { + public void testClusterChangedWithResetMode() throws InterruptedException { final TrainedModelAssignmentNodeService trainedModelAssignmentNodeService = createService(); final DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId(NODE_ID).add(DiscoveryNodeUtils.create(NODE_ID, NODE_ID)).build(); String modelOne = "model-1"; @@ -362,7 +396,7 @@ public void testClusterChangedWithResetMode() { ); trainedModelAssignmentNodeService.clusterChanged(event); - trainedModelAssignmentNodeService.loadQueuedModels(); + loadQueuedModels(trainedModelAssignmentNodeService); verifyNoMoreInteractions(deploymentManager, trainedModelAssignmentService); } @@ -480,7 +514,6 @@ public void testClusterChanged_WhenAssigmentIsRoutedToShuttingDownNodeButAlready String modelOne = "model-1"; String deploymentOne = "deployment-1"; - ArgumentCaptor stopParamsCapture = ArgumentCaptor.forClass(TrainedModelDeploymentTask.class); var taskParams = newParams(deploymentOne, modelOne); ClusterChangedEvent event = new ClusterChangedEvent( @@ -558,7 +591,7 @@ public void testClusterChanged_WhenAssigmentIsRoutedToShuttingDownNodeWithStarti verifyNoMoreInteractions(deploymentManager, trainedModelAssignmentService); } - public void testClusterChanged_WhenAssigmentIsStopping_DoesNotAddModelToBeLoaded() { + public void testClusterChanged_WhenAssigmentIsStopping_DoesNotAddModelToBeLoaded() throws InterruptedException { final TrainedModelAssignmentNodeService trainedModelAssignmentNodeService = createService(); final DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId(NODE_ID).add(DiscoveryNodeUtils.create(NODE_ID, NODE_ID)).build(); String modelOne = "model-1"; @@ -592,7 +625,7 @@ public void testClusterChanged_WhenAssigmentIsStopping_DoesNotAddModelToBeLoaded // trainedModelAssignmentNodeService.prepareModelToLoad(taskParams); trainedModelAssignmentNodeService.clusterChanged(event); - trainedModelAssignmentNodeService.loadQueuedModels(); + loadQueuedModels(trainedModelAssignmentNodeService); verify(deploymentManager, never()).startDeployment(any(), any()); verifyNoMoreInteractions(deploymentManager, trainedModelAssignmentService); @@ -706,7 +739,8 @@ public void testClusterChanged() throws Exception { ); trainedModelAssignmentNodeService.clusterChanged(event); - trainedModelAssignmentNodeService.loadQueuedModels(); + loadQueuedModels(trainedModelAssignmentNodeService, true); + loadQueuedModels(trainedModelAssignmentNodeService, false); assertBusy(() -> { ArgumentCaptor stoppedTaskCapture = ArgumentCaptor.forClass(TrainedModelDeploymentTask.class); @@ -749,7 +783,7 @@ public void testClusterChanged() throws Exception { ); trainedModelAssignmentNodeService.clusterChanged(event); - trainedModelAssignmentNodeService.loadQueuedModels(); + loadQueuedModels(trainedModelAssignmentNodeService); verifyNoMoreInteractions(deploymentManager, trainedModelAssignmentService); } @@ -764,7 +798,8 @@ public void testClusterChanged_GivenAllStartedAssignments_AndNonMatchingTargetAl givenAssignmentsInClusterStateForModels(List.of(deploymentOne, deploymentTwo), List.of(modelOne, modelTwo)); trainedModelAssignmentNodeService.prepareModelToLoad(newParams(deploymentOne, modelOne)); trainedModelAssignmentNodeService.prepareModelToLoad(newParams(deploymentTwo, modelTwo)); - trainedModelAssignmentNodeService.loadQueuedModels(); + loadQueuedModels(trainedModelAssignmentNodeService, true); + loadQueuedModels(trainedModelAssignmentNodeService, false); ClusterChangedEvent event = new ClusterChangedEvent( "shouldUpdateAllocations", From 5c8c76e6b18bbb5e3e1b1e8978e296f3cacdaa24 Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Fri, 5 Jul 2024 10:06:32 -0400 Subject: [PATCH 199/216] Fix bit vector tests (#110521) Bit vector tests were failing in cases where an index has more than 1 shards. For error cases when we expected a failure of the whole request, shards with empty documents returned success and the whoel request unexpectedly returned 200. Ensuring that index contains only 1 shard fixes these failures. Closes #110290, #110291 --- .../test/painless/146_dense_vector_bit_basic.yml | 6 ++---- muted-tests.yml | 6 ------ 2 files changed, 2 insertions(+), 10 deletions(-) diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/146_dense_vector_bit_basic.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/146_dense_vector_bit_basic.yml index 3eb686bda2174..4c195a0e32623 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/146_dense_vector_bit_basic.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/146_dense_vector_bit_basic.yml @@ -8,6 +8,8 @@ setup: indices.create: index: test-index body: + settings: + number_of_shards: 1 mappings: properties: vector: @@ -107,7 +109,6 @@ setup: headers: Content-Type: application/json search: - rest_total_hits_as_int: true body: query: script_score: @@ -138,7 +139,6 @@ setup: headers: Content-Type: application/json search: - rest_total_hits_as_int: true body: query: script_score: @@ -152,7 +152,6 @@ setup: headers: Content-Type: application/json search: - rest_total_hits_as_int: true body: query: script_score: @@ -167,7 +166,6 @@ setup: headers: Content-Type: application/json search: - rest_total_hits_as_int: true body: query: script_score: diff --git a/muted-tests.yml b/muted-tests.yml index 91f38f3a5ba46..71e7d050c0e19 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -76,12 +76,6 @@ tests: - class: org.elasticsearch.compute.lucene.ValueSourceReaderTypeConversionTests method: testLoadAll issue: https://github.com/elastic/elasticsearch/issues/110244 -- class: org.elasticsearch.painless.LangPainlessClientYamlTestSuiteIT - method: test {yaml=painless/146_dense_vector_bit_basic/Cosine Similarity is not supported} - issue: https://github.com/elastic/elasticsearch/issues/110290 -- class: org.elasticsearch.painless.LangPainlessClientYamlTestSuiteIT - method: test {yaml=painless/146_dense_vector_bit_basic/Dot Product is not supported} - issue: https://github.com/elastic/elasticsearch/issues/110291 - class: org.elasticsearch.action.search.SearchProgressActionListenerIT method: testSearchProgressWithQuery issue: https://github.com/elastic/elasticsearch/issues/109867 From df24e4f0288f1f0bc72e1e58c1fc7a43e9ccee2e Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 5 Jul 2024 10:22:24 -0400 Subject: [PATCH 200/216] ESQL: Plumb a way to run phased plans (#110445) INLINESTATS is going to run two ESQL commands - one to get the STATS and one to join the stats results to the output. This plumbs a way for `EsqlSession#execute` to run multiple dips into the compute engine using a `BiConsumer> runPhase`. For now, we just plug that right into the output to keep things working as they are now. But soon, so soon, we'll plug in a second phase. --- .../xpack/esql/execution/PlanExecutor.java | 8 ++- .../xpack/esql/plugin/ComputeService.java | 9 ++-- .../esql/plugin/TransportEsqlQueryAction.java | 49 ++++++++++--------- .../xpack/esql/session/EsqlSession.java | 14 +++++- .../xpack/esql/session/Result.java | 17 ++++++- .../esql/stats/PlanExecutorMetricsTests.java | 12 +++-- 6 files changed, 73 insertions(+), 36 deletions(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java index df67f4609c33e..4e07c3084ab7b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java @@ -20,9 +20,12 @@ import org.elasticsearch.xpack.esql.session.EsqlConfiguration; import org.elasticsearch.xpack.esql.session.EsqlSession; import org.elasticsearch.xpack.esql.session.IndexResolver; +import org.elasticsearch.xpack.esql.session.Result; import org.elasticsearch.xpack.esql.stats.Metrics; import org.elasticsearch.xpack.esql.stats.QueryMetric; +import java.util.function.BiConsumer; + import static org.elasticsearch.action.ActionListener.wrap; public class PlanExecutor { @@ -48,7 +51,8 @@ public void esql( String sessionId, EsqlConfiguration cfg, EnrichPolicyResolver enrichPolicyResolver, - ActionListener listener + BiConsumer> runPhase, + ActionListener listener ) { final var session = new EsqlSession( sessionId, @@ -63,7 +67,7 @@ public void esql( ); QueryMetric clientId = QueryMetric.fromString("rest"); metrics.total(clientId); - session.execute(request, wrap(listener::onResponse, ex -> { + session.execute(request, runPhase, wrap(listener::onResponse, ex -> { // TODO when we decide if we will differentiate Kibana from REST, this String value will likely come from the request metrics.failed(clientId); listener.onFailure(ex); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 4ebc4af258134..e28c8e8434643 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -72,6 +72,7 @@ import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; +import org.elasticsearch.xpack.esql.session.Result; import java.util.ArrayList; import java.util.Collections; @@ -89,8 +90,6 @@ * Computes the result of a {@link PhysicalPlan}. */ public class ComputeService { - public record Result(List pages, List profiles) {} - private static final Logger LOGGER = LogManager.getLogger(ComputeService.class); private final SearchService searchService; private final BigArrays bigArrays; @@ -176,7 +175,7 @@ public void execute( rootTask, computeContext, coordinatorPlan, - listener.map(driverProfiles -> new Result(collectedPages, driverProfiles)) + listener.map(driverProfiles -> new Result(physicalPlan.output(), collectedPages, driverProfiles)) ); return; } else { @@ -201,7 +200,9 @@ public void execute( ); try ( Releasable ignored = exchangeSource.addEmptySink(); - RefCountingListener refs = new RefCountingListener(listener.map(unused -> new Result(collectedPages, collectedProfiles))) + RefCountingListener refs = new RefCountingListener( + listener.map(unused -> new Result(physicalPlan.output(), collectedPages, collectedProfiles)) + ) ) { // run compute on the coordinator exchangeSource.addCompletionListener(refs.acquire()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index 9328992120c08..5a6812c969757 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -37,7 +37,9 @@ import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolver; import org.elasticsearch.xpack.esql.execution.PlanExecutor; +import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; +import org.elasticsearch.xpack.esql.session.Result; import java.io.IOException; import java.time.ZoneOffset; @@ -45,6 +47,7 @@ import java.util.Locale; import java.util.Map; import java.util.concurrent.Executor; +import java.util.function.BiConsumer; import static org.elasticsearch.xpack.core.ClientHelper.ASYNC_SEARCH_ORIGIN; @@ -157,37 +160,37 @@ private void innerExecute(Task task, EsqlQueryRequest request, ActionListener> runPhase = (physicalPlan, resultListener) -> computeService.execute( + sessionId, + (CancellableTask) task, + physicalPlan, + configuration, + resultListener + ); + planExecutor.esql( request, sessionId, configuration, enrichPolicyResolver, - listener.delegateFailureAndWrap( - (delegate, physicalPlan) -> computeService.execute( - sessionId, - (CancellableTask) task, - physicalPlan, - configuration, - delegate.map(result -> { - List columns = physicalPlan.output() - .stream() - .map(c -> new ColumnInfoImpl(c.qualifiedName(), c.dataType().outputType())) - .toList(); - EsqlQueryResponse.Profile profile = configuration.profile() - ? new EsqlQueryResponse.Profile(result.profiles()) - : null; - if (task instanceof EsqlQueryTask asyncTask && request.keepOnCompletion()) { - String id = asyncTask.getExecutionId().getEncoded(); - return new EsqlQueryResponse(columns, result.pages(), profile, request.columnar(), id, false, request.async()); - } else { - return new EsqlQueryResponse(columns, result.pages(), profile, request.columnar(), request.async()); - } - }) - ) - ) + runPhase, + listener.map(result -> toResponse(task, request, configuration, result)) ); } + private EsqlQueryResponse toResponse(Task task, EsqlQueryRequest request, EsqlConfiguration configuration, Result result) { + List columns = result.schema() + .stream() + .map(c -> new ColumnInfoImpl(c.qualifiedName(), c.dataType().outputType())) + .toList(); + EsqlQueryResponse.Profile profile = configuration.profile() ? new EsqlQueryResponse.Profile(result.profiles()) : null; + if (task instanceof EsqlQueryTask asyncTask && request.keepOnCompletion()) { + String id = asyncTask.getExecutionId().getEncoded(); + return new EsqlQueryResponse(columns, result.pages(), profile, request.columnar(), id, false, request.async()); + } + return new EsqlQueryResponse(columns, result.pages(), profile, request.columnar(), request.async()); + } + /** * Returns the ID for this compute session. The ID is unique within the cluster, and is used * to identify the compute-session across nodes. The ID is just the TaskID of the task that diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 3119b328e8074..370de6bb2ce8e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -58,6 +58,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.Predicate; import java.util.stream.Collectors; @@ -110,10 +111,19 @@ public String sessionId() { return sessionId; } - public void execute(EsqlQueryRequest request, ActionListener listener) { + public void execute( + EsqlQueryRequest request, + BiConsumer> runPhase, + ActionListener listener + ) { LOGGER.debug("ESQL query:\n{}", request.query()); + LogicalPlan logicalPlan = parse(request.query(), request.params()); + logicalPlanToPhysicalPlan(logicalPlan, request, listener.delegateFailureAndWrap((l, r) -> runPhase.accept(r, l))); + } + + private void logicalPlanToPhysicalPlan(LogicalPlan logicalPlan, EsqlQueryRequest request, ActionListener listener) { optimizedPhysicalPlan( - parse(request.query(), request.params()), + logicalPlan, listener.map(plan -> EstimatesRowSize.estimateRowSize(0, plan.transformUp(FragmentExec.class, f -> { QueryBuilder filter = request.filter(); if (filter != null) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/Result.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/Result.java index 7cbf3987af2cb..5abaa78f54196 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/Result.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/Result.java @@ -7,8 +7,23 @@ package org.elasticsearch.xpack.esql.session; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverProfile; import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import java.util.List; -public record Result(List columns, List> values) {} +/** + * Results from running a chunk of ESQL. + * @param schema "Schema" of the {@link Attribute}s that are produced by the {@link LogicalPlan} + * that was run. Each {@link Page} contains a {@link Block} of values for each + * attribute in this list. + * @param pages Actual values produced by running the ESQL. + * @param profiles {@link DriverProfile}s from all drivers that ran to produce the output. These + * are quite cheap to build, so we build them for all ESQL runs, regardless of if + * users have asked for them. But we only include them in the results if users ask + * for them. + */ +public record Result(List schema, List pages, List profiles) {} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java index 5883d41f32125..427c30311df0b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.xpack.esql.execution.PlanExecutor; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.session.IndexResolver; +import org.elasticsearch.xpack.esql.session.Result; import org.elasticsearch.xpack.esql.type.EsqlDataTypeRegistry; import org.junit.After; import org.junit.Before; @@ -33,6 +34,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.BiConsumer; import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.hamcrest.Matchers.instanceOf; @@ -100,9 +102,10 @@ public void testFailedMetric() { var request = new EsqlQueryRequest(); // test a failed query: xyz field doesn't exist request.query("from test | stats m = max(xyz)"); - planExecutor.esql(request, randomAlphaOfLength(10), EsqlTestUtils.TEST_CFG, enrichResolver, new ActionListener<>() { + BiConsumer> runPhase = (p, r) -> fail("this shouldn't happen"); + planExecutor.esql(request, randomAlphaOfLength(10), EsqlTestUtils.TEST_CFG, enrichResolver, runPhase, new ActionListener<>() { @Override - public void onResponse(PhysicalPlan physicalPlan) { + public void onResponse(Result result) { fail("this shouldn't happen"); } @@ -119,9 +122,10 @@ public void onFailure(Exception e) { // fix the failing query: foo field does exist request.query("from test | stats m = max(foo)"); - planExecutor.esql(request, randomAlphaOfLength(10), EsqlTestUtils.TEST_CFG, enrichResolver, new ActionListener<>() { + runPhase = (p, r) -> r.onResponse(null); + planExecutor.esql(request, randomAlphaOfLength(10), EsqlTestUtils.TEST_CFG, enrichResolver, runPhase, new ActionListener<>() { @Override - public void onResponse(PhysicalPlan physicalPlan) {} + public void onResponse(Result result) {} @Override public void onFailure(Exception e) { From 0d31b328197216d7b029c62e00b68223b0dccfd7 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 5 Jul 2024 10:29:12 -0400 Subject: [PATCH 201/216] ESQL: Move `LogicalPlan` to esql proper (#110426) This moves LogicalPlan and all subclasses and references out of our esql-core and into esql-proper so that we can further simplify things. --- .../esql/core/analyzer/VerifierChecks.java | 31 --- .../xpack/esql/EsqlTestUtils.java | 2 +- .../xpack/esql/analysis/Analyzer.java | 9 +- .../xpack/esql/analysis}/AnalyzerRules.java | 4 +- .../xpack/esql/analysis/PreAnalyzer.java | 2 +- .../xpack/esql/analysis/Verifier.java | 20 +- .../xpack/esql/io/stream/PlanNamedTypes.java | 8 +- .../xpack/esql/io/stream/PlanStreamInput.java | 2 +- .../esql/io/stream/PlanStreamOutput.java | 2 +- .../optimizer/LocalLogicalPlanOptimizer.java | 12 +- .../optimizer/LocalPhysicalPlanOptimizer.java | 2 +- .../esql/optimizer/LogicalPlanOptimizer.java | 6 +- .../xpack/esql/optimizer/LogicalVerifier.java | 2 +- .../xpack/esql/optimizer/OptimizerRules.java | 2 +- .../optimizer/PhysicalOptimizerRules.java | 2 +- .../esql/optimizer/rules/AddDefaultTopN.java | 6 +- .../BooleanFunctionEqualsElimination.java | 5 +- .../rules/BooleanSimplification.java | 2 +- .../rules/CombineDisjunctionsToIn.java | 4 +- .../esql/optimizer/rules/CombineEvals.java | 3 +- .../optimizer/rules/CombineProjections.java | 5 +- .../esql/optimizer/rules/ConstantFolding.java | 1 - .../rules/ConvertStringToByteRef.java | 1 - .../rules/DuplicateLimitAfterMvExpand.java | 11 +- .../xpack/esql/optimizer/rules/FoldNull.java | 1 - .../optimizer/rules/LiteralsOnTheRight.java | 1 - .../esql/optimizer/rules}/OptimizerRules.java | 216 +----------------- .../optimizer/rules/PartiallyFoldCase.java | 3 +- .../rules/PropagateEmptyRelation.java | 5 +- .../esql/optimizer/rules/PropagateEquals.java | 4 +- .../rules/PropagateEvalFoldables.java | 4 +- .../optimizer/rules/PropagateNullable.java | 1 - .../esql/optimizer/rules/PruneColumns.java | 4 +- .../esql/optimizer/rules/PruneEmptyPlans.java | 5 +- .../esql/optimizer/rules/PruneFilters.java | 57 ++++- .../rules/PruneLiteralsInOrderBy.java | 5 +- .../rules/PruneOrderByBeforeStats.java | 9 +- .../rules/PruneRedundantSortClauses.java | 5 +- .../rules/PushDownAndCombineFilters.java | 9 +- .../rules/PushDownAndCombineLimits.java | 7 +- .../rules/PushDownAndCombineOrderBy.java | 5 +- .../esql/optimizer/rules/PushDownEnrich.java | 3 +- .../esql/optimizer/rules/PushDownEval.java | 3 +- .../optimizer/rules/PushDownRegexExtract.java | 3 +- .../optimizer/rules/RemoveStatsOverride.java | 4 +- .../rules/ReplaceAliasingEvalWithProject.java | 2 +- .../rules/ReplaceLimitAndSortAsTopN.java | 7 +- .../rules/ReplaceLookupWithJoin.java | 3 +- .../ReplaceOrderByExpressionWithEval.java | 5 +- .../optimizer/rules/ReplaceRegexMatch.java | 5 +- .../ReplaceStatsAggExpressionWithEval.java | 3 +- .../ReplaceStatsNestedExpressionWithEval.java | 3 +- .../rules/ReplaceTrivialTypeConversions.java | 3 +- .../esql/optimizer/rules/SetAsOptimized.java | 2 +- .../rules/SimplifyComparisonsArithmetics.java | 5 +- .../rules/SkipQueryOnEmptyMappings.java | 3 +- .../optimizer/rules/SkipQueryOnLimitZero.java | 17 +- .../rules/SplitInWithFoldableValue.java | 1 - .../rules/SubstituteSpatialSurrogates.java | 1 - .../optimizer/rules/SubstituteSurrogates.java | 3 +- .../rules/TranslateMetricsAggregate.java | 3 +- .../xpack/esql/parser/EsqlParser.java | 2 +- .../xpack/esql/parser/LogicalPlanBuilder.java | 8 +- .../xpack/esql/plan/logical/Aggregate.java | 2 - .../xpack/esql}/plan/logical/BinaryPlan.java | 2 +- .../xpack/esql/plan/logical/Dissect.java | 2 - .../xpack/esql/plan/logical/Drop.java | 2 - .../xpack/esql/plan/logical/Enrich.java | 2 - .../xpack/esql/plan/logical/EsRelation.java | 1 - .../esql/plan/logical/EsqlAggregate.java | 1 - .../xpack/esql/plan/logical/Eval.java | 2 - .../xpack/esql/plan/logical/Explain.java | 2 - .../xpack/esql}/plan/logical/Filter.java | 2 +- .../xpack/esql/plan/logical/Grok.java | 2 - .../xpack/esql/plan/logical/InlineStats.java | 2 - .../xpack/esql/plan/logical/Keep.java | 1 - .../xpack/esql}/plan/logical/LeafPlan.java | 2 +- .../xpack/esql}/plan/logical/Limit.java | 2 +- .../xpack/esql}/plan/logical/LogicalPlan.java | 2 +- .../xpack/esql/plan/logical/Lookup.java | 2 - .../xpack/esql/plan/logical/MvExpand.java | 2 - .../xpack/esql}/plan/logical/OrderBy.java | 2 +- .../xpack/esql/plan/logical/Project.java | 2 - .../xpack/esql/plan/logical/RegexExtract.java | 2 - .../xpack/esql/plan/logical/Rename.java | 2 - .../xpack/esql/plan/logical/Row.java | 2 - .../xpack/esql/plan/logical/TopN.java | 2 - .../xpack/esql}/plan/logical/UnaryPlan.java | 2 +- .../esql/plan/logical/UnresolvedRelation.java | 1 - .../xpack/esql/plan/logical/join/Join.java | 4 +- .../esql/plan/logical/local/EsqlProject.java | 2 +- .../plan/logical/local/LocalRelation.java | 2 +- .../esql/plan/logical/meta/MetaFunctions.java | 4 +- .../esql/plan/logical/show/ShowInfo.java | 4 +- .../esql/plan/physical/FragmentExec.java | 2 +- .../xpack/esql/planner/Mapper.java | 12 +- .../xpack/esql/planner/PlannerUtils.java | 10 +- .../xpack/esql/session/EsqlSession.java | 2 +- .../xpack/esql/stats/FeatureMetric.java | 6 +- .../elasticsearch/xpack/esql/CsvTests.java | 2 +- .../xpack/esql/SerializationTestUtils.java | 2 +- .../esql/analysis/AnalyzerTestUtils.java | 2 +- .../xpack/esql/analysis/AnalyzerTests.java | 8 +- .../xpack/esql/analysis/ParsingTests.java | 2 +- .../esql/io/stream/PlanNamedTypesTests.java | 8 +- .../esql/io/stream/PlanStreamInputTests.java | 4 +- .../LocalLogicalPlanOptimizerTests.java | 8 +- .../optimizer/LogicalPlanOptimizerTests.java | 10 +- .../esql/optimizer/OptimizerRulesTests.java | 25 +- .../optimizer/PhysicalPlanOptimizerTests.java | 6 +- .../parser/AbstractStatementParserTests.java | 2 +- .../xpack/esql/parser/ExpressionTests.java | 4 +- .../esql/parser/StatementParserTests.java | 8 +- .../xpack/esql/plan/QueryPlanTests.java | 8 +- .../esql/plugin/DataNodeRequestTests.java | 2 +- .../esql/tree/EsqlNodeSubclassTests.java | 2 +- 116 files changed, 258 insertions(+), 515 deletions(-) delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/analyzer/VerifierChecks.java rename x-pack/plugin/{esql-core/src/main/java/org/elasticsearch/xpack/esql/core/analyzer => esql/src/main/java/org/elasticsearch/xpack/esql/analysis}/AnalyzerRules.java (97%) rename x-pack/plugin/{esql-core/src/main/java/org/elasticsearch/xpack/esql/core/optimizer => esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules}/OptimizerRules.java (63%) rename x-pack/plugin/{esql-core/src/main/java/org/elasticsearch/xpack/esql/core => esql/src/main/java/org/elasticsearch/xpack/esql}/plan/logical/BinaryPlan.java (95%) rename x-pack/plugin/{esql-core/src/main/java/org/elasticsearch/xpack/esql/core => esql/src/main/java/org/elasticsearch/xpack/esql}/plan/logical/Filter.java (97%) rename x-pack/plugin/{esql-core/src/main/java/org/elasticsearch/xpack/esql/core => esql/src/main/java/org/elasticsearch/xpack/esql}/plan/logical/LeafPlan.java (92%) rename x-pack/plugin/{esql-core/src/main/java/org/elasticsearch/xpack/esql/core => esql/src/main/java/org/elasticsearch/xpack/esql}/plan/logical/Limit.java (96%) rename x-pack/plugin/{esql-core/src/main/java/org/elasticsearch/xpack/esql/core => esql/src/main/java/org/elasticsearch/xpack/esql}/plan/logical/LogicalPlan.java (97%) rename x-pack/plugin/{esql-core/src/main/java/org/elasticsearch/xpack/esql/core => esql/src/main/java/org/elasticsearch/xpack/esql}/plan/logical/OrderBy.java (96%) rename x-pack/plugin/{esql-core/src/main/java/org/elasticsearch/xpack/esql/core => esql/src/main/java/org/elasticsearch/xpack/esql}/plan/logical/UnaryPlan.java (96%) diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/analyzer/VerifierChecks.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/analyzer/VerifierChecks.java deleted file mode 100644 index 36ce187d8600c..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/analyzer/VerifierChecks.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.core.analyzer; - -import org.elasticsearch.xpack.esql.core.common.Failure; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.plan.logical.Filter; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; - -import java.util.Set; - -import static org.elasticsearch.xpack.esql.core.common.Failure.fail; -import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; - -public final class VerifierChecks { - - public static void checkFilterConditionType(LogicalPlan p, Set localFailures) { - if (p instanceof Filter) { - Expression condition = ((Filter) p).condition(); - if (condition.dataType() != BOOLEAN) { - localFailures.add(fail(condition, "Condition expression needs to be boolean, found [{}]", condition.dataType())); - } - } - } - -} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index d7e067658267f..2bf3baf845010 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -33,7 +33,6 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.predicate.Range; import org.elasticsearch.xpack.esql.core.index.EsIndex; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.session.Configuration; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -49,6 +48,7 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 4fcd37faa311a..cdb5935f9bd72 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -14,9 +14,8 @@ import org.elasticsearch.xpack.esql.Column; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.VerificationException; -import org.elasticsearch.xpack.esql.core.analyzer.AnalyzerRules; -import org.elasticsearch.xpack.esql.core.analyzer.AnalyzerRules.BaseAnalyzerRule; -import org.elasticsearch.xpack.esql.core.analyzer.AnalyzerRules.ParameterizedAnalyzerRule; +import org.elasticsearch.xpack.esql.analysis.AnalyzerRules.BaseAnalyzerRule; +import org.elasticsearch.xpack.esql.analysis.AnalyzerRules.ParameterizedAnalyzerRule; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.common.Failure; import org.elasticsearch.xpack.esql.core.expression.Alias; @@ -38,8 +37,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.esql.core.index.EsIndex; import org.elasticsearch.xpack.esql.core.plan.TableIdentifier; -import org.elasticsearch.xpack.esql.core.plan.logical.Limit; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.rule.ParameterizedRule; import org.elasticsearch.xpack.esql.core.rule.ParameterizedRuleExecutor; import org.elasticsearch.xpack.esql.core.rule.RuleExecutor; @@ -71,6 +68,8 @@ import org.elasticsearch.xpack.esql.plan.logical.EsqlUnresolvedRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Keep; +import org.elasticsearch.xpack.esql.plan.logical.Limit; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Lookup; import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.Project; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/analyzer/AnalyzerRules.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/AnalyzerRules.java similarity index 97% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/analyzer/AnalyzerRules.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/AnalyzerRules.java index ce188511fe7bc..3314129fae405 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/analyzer/AnalyzerRules.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/AnalyzerRules.java @@ -5,13 +5,13 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.core.analyzer; +package org.elasticsearch.xpack.esql.analysis; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.rule.ParameterizedRule; import org.elasticsearch.xpack.esql.core.rule.Rule; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import java.util.ArrayList; import java.util.Collection; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/PreAnalyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/PreAnalyzer.java index 7c37d5b8392c5..790142bef6a86 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/PreAnalyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/PreAnalyzer.java @@ -8,9 +8,9 @@ package org.elasticsearch.xpack.esql.analysis; import org.elasticsearch.xpack.esql.core.analyzer.TableInfo; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.EsqlUnresolvedRelation; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import java.util.ArrayList; import java.util.List; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index 514a53b0933e9..9b90f411c4eb8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -20,10 +20,6 @@ import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; -import org.elasticsearch.xpack.esql.core.plan.logical.Limit; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; @@ -35,10 +31,15 @@ import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.Filter; +import org.elasticsearch.xpack.esql.plan.logical.Limit; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Lookup; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; import org.elasticsearch.xpack.esql.plan.logical.Row; +import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.stats.FeatureMetric; import org.elasticsearch.xpack.esql.stats.Metrics; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -52,9 +53,9 @@ import java.util.function.Consumer; import java.util.stream.Stream; -import static org.elasticsearch.xpack.esql.core.analyzer.VerifierChecks.checkFilterConditionType; import static org.elasticsearch.xpack.esql.core.common.Failure.fail; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; +import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; public class Verifier { @@ -177,6 +178,15 @@ else if (p instanceof Lookup lookup) { return failures; } + private static void checkFilterConditionType(LogicalPlan p, Set localFailures) { + if (p instanceof Filter f) { + Expression condition = f.condition(); + if (condition.dataType() != BOOLEAN) { + localFailures.add(fail(condition, "Condition expression needs to be boolean, found [{}]", condition.dataType())); + } + } + } + private static void checkAggregate(LogicalPlan p, Set failures) { if (p instanceof Aggregate agg) { List groupings = agg.groupings(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 8034eba20690d..e4051523c7a5e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -26,10 +26,6 @@ import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.Order; import org.elasticsearch.xpack.esql.core.index.EsIndex; -import org.elasticsearch.xpack.esql.core.plan.logical.Filter; -import org.elasticsearch.xpack.esql.core.plan.logical.Limit; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; @@ -38,9 +34,13 @@ import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.Filter; import org.elasticsearch.xpack.esql.plan.logical.Grok; +import org.elasticsearch.xpack.esql.plan.logical.Limit; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Lookup; import org.elasticsearch.xpack.esql.plan.logical.MvExpand; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.TopN; import org.elasticsearch.xpack.esql.plan.logical.join.Join; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java index be2a9454b3bef..0633595a5796d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java @@ -24,9 +24,9 @@ import org.elasticsearch.core.Releasables; import org.elasticsearch.xpack.esql.Column; import org.elasticsearch.xpack.esql.core.expression.NameId; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanNamedReader; import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanReader; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java index 58cd2465e1584..674476ec4f736 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java @@ -19,8 +19,8 @@ import org.elasticsearch.compute.data.LongBigArrayBlock; import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.Column; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanWriter; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java index ba5e8316a666c..9a2ae742c2feb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java @@ -21,11 +21,6 @@ import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.plan.logical.Filter; -import org.elasticsearch.xpack.esql.core.plan.logical.Limit; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.core.rule.ParameterizedRule; import org.elasticsearch.xpack.esql.core.rule.ParameterizedRuleExecutor; import org.elasticsearch.xpack.esql.core.rule.Rule; @@ -34,10 +29,15 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; +import org.elasticsearch.xpack.esql.optimizer.rules.OptimizerRules; import org.elasticsearch.xpack.esql.optimizer.rules.PropagateEmptyRelation; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.Filter; +import org.elasticsearch.xpack.esql.plan.logical.Limit; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; import org.elasticsearch.xpack.esql.plan.logical.TopN; @@ -54,9 +54,9 @@ import static java.util.Arrays.asList; import static java.util.Collections.emptySet; -import static org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.TransformDirection.UP; import static org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer.cleanup; import static org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer.operators; +import static org.elasticsearch.xpack.esql.optimizer.rules.OptimizerRules.TransformDirection.UP; public class LocalLogicalPlanOptimizer extends ParameterizedRuleExecutor { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java index 9447e018bc142..1b40a1c2b02ad 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java @@ -92,7 +92,7 @@ import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.esql.core.expression.predicate.Predicates.splitAnd; -import static org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.TransformDirection.UP; +import static org.elasticsearch.xpack.esql.optimizer.rules.OptimizerRules.TransformDirection.UP; import static org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.StatsType.COUNT; public class LocalPhysicalPlanOptimizer extends ParameterizedRuleExecutor { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index ca4b5d17deed3..284f264b85e1c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -17,9 +17,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.Order; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.core.rule.ParameterizedRule; import org.elasticsearch.xpack.esql.core.rule.ParameterizedRuleExecutor; import org.elasticsearch.xpack.esql.optimizer.rules.AddDefaultTopN; @@ -68,7 +65,10 @@ import org.elasticsearch.xpack.esql.optimizer.rules.SubstituteSurrogates; import org.elasticsearch.xpack.esql.optimizer.rules.TranslateMetricsAggregate; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.Project; +import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java index 2387a4a210de3..007fb3939db0c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java @@ -9,8 +9,8 @@ import org.elasticsearch.xpack.esql.capabilities.Validatable; import org.elasticsearch.xpack.esql.core.common.Failures; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.optimizer.OptimizerRules.LogicalPlanDependencyCheck; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; public final class LogicalVerifier { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java index 4c5d9efb449f7..ecd83fbba022c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java @@ -11,11 +11,11 @@ import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.plan.QueryPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; import org.elasticsearch.xpack.esql.plan.logical.Row; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalOptimizerRules.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalOptimizerRules.java index 1def5a4133a3f..c669853d3357e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalOptimizerRules.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalOptimizerRules.java @@ -8,10 +8,10 @@ package org.elasticsearch.xpack.esql.optimizer; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.TransformDirection; import org.elasticsearch.xpack.esql.core.rule.ParameterizedRule; import org.elasticsearch.xpack.esql.core.rule.Rule; import org.elasticsearch.xpack.esql.core.util.ReflectionUtils; +import org.elasticsearch.xpack.esql.optimizer.rules.OptimizerRules.TransformDirection; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; public class PhysicalOptimizerRules { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/AddDefaultTopN.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/AddDefaultTopN.java index 28a7ba4bf7084..9208eba740100 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/AddDefaultTopN.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/AddDefaultTopN.java @@ -8,14 +8,14 @@ package org.elasticsearch.xpack.esql.optimizer.rules; import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.TopN; +import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; /** * This adds an explicit TopN node to a plan that only has an OrderBy right before Lucene. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/BooleanFunctionEqualsElimination.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/BooleanFunctionEqualsElimination.java index cf62f9219f3c8..1cdc2c02c8469 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/BooleanFunctionEqualsElimination.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/BooleanFunctionEqualsElimination.java @@ -21,11 +21,10 @@ * This rule must always be placed after {@link LiteralsOnTheRight} * since it looks at TRUE/FALSE literals' existence on the right hand-side of the {@link Equals}/{@link NotEquals} expressions. */ -public final class BooleanFunctionEqualsElimination extends - org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.OptimizerExpressionRule { +public final class BooleanFunctionEqualsElimination extends OptimizerRules.OptimizerExpressionRule { public BooleanFunctionEqualsElimination() { - super(org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.TransformDirection.UP); + super(OptimizerRules.TransformDirection.UP); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/BooleanSimplification.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/BooleanSimplification.java index b01525cc447fc..2a3f7fb9d1244 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/BooleanSimplification.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/BooleanSimplification.java @@ -9,7 +9,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; -public final class BooleanSimplification extends org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.BooleanSimplification { +public final class BooleanSimplification extends OptimizerRules.BooleanSimplification { public BooleanSimplification() { super(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/CombineDisjunctionsToIn.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/CombineDisjunctionsToIn.java index c34252300350c..2dc2f0e504303 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/CombineDisjunctionsToIn.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/CombineDisjunctionsToIn.java @@ -35,9 +35,9 @@ * This rule does NOT check for type compatibility as that phase has been * already be verified in the analyzer. */ -public final class CombineDisjunctionsToIn extends org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.OptimizerExpressionRule { +public final class CombineDisjunctionsToIn extends OptimizerRules.OptimizerExpressionRule { public CombineDisjunctionsToIn() { - super(org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.TransformDirection.UP); + super(OptimizerRules.TransformDirection.UP); } protected In createIn(Expression key, List values, ZoneId zoneId) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/CombineEvals.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/CombineEvals.java index 40e9836d0afa1..f8210d06e4439 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/CombineEvals.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/CombineEvals.java @@ -7,10 +7,9 @@ package org.elasticsearch.xpack.esql.optimizer.rules; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.util.CollectionUtils; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; /** * Combine multiple Evals into one in order to reduce the number of nodes in a plan. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/CombineProjections.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/CombineProjections.java index 2070139519ea0..3c0ac9056c8c5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/CombineProjections.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/CombineProjections.java @@ -15,11 +15,10 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Project; +import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import java.util.ArrayList; import java.util.List; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ConstantFolding.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ConstantFolding.java index f2638333c9601..2178013c42148 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ConstantFolding.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ConstantFolding.java @@ -9,7 +9,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; public final class ConstantFolding extends OptimizerRules.OptimizerExpressionRule { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ConvertStringToByteRef.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ConvertStringToByteRef.java index 384f56d96de73..a1969df3f898a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ConvertStringToByteRef.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ConvertStringToByteRef.java @@ -10,7 +10,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; import java.util.ArrayList; import java.util.List; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/DuplicateLimitAfterMvExpand.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/DuplicateLimitAfterMvExpand.java index 6b944bf7adf4f..ab1dc407a7a4a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/DuplicateLimitAfterMvExpand.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/DuplicateLimitAfterMvExpand.java @@ -9,18 +9,17 @@ import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.plan.logical.Filter; -import org.elasticsearch.xpack.esql.core.plan.logical.Limit; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.Filter; +import org.elasticsearch.xpack.esql.plan.logical.Limit; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.MvExpand; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; +import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; public final class DuplicateLimitAfterMvExpand extends OptimizerRules.OptimizerRule { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/FoldNull.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/FoldNull.java index 25ad5e3966f21..6e01811b8527c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/FoldNull.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/FoldNull.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql.optimizer.rules; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; public class FoldNull extends OptimizerRules.FoldNull { @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/LiteralsOnTheRight.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/LiteralsOnTheRight.java index 528fe65766972..36d39e0ee1c73 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/LiteralsOnTheRight.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/LiteralsOnTheRight.java @@ -9,7 +9,6 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; public final class LiteralsOnTheRight extends OptimizerRules.OptimizerExpressionRule> { diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRules.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/OptimizerRules.java similarity index 63% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRules.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/OptimizerRules.java index ba19a73f91c06..6f6260fd0de27 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRules.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/OptimizerRules.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.esql.core.optimizer; +package org.elasticsearch.xpack.esql.optimizer.rules; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.xpack.esql.core.expression.Alias; @@ -12,36 +12,24 @@ import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.Nullability; -import org.elasticsearch.xpack.esql.core.expression.function.Function; import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; -import org.elasticsearch.xpack.esql.core.expression.function.scalar.SurrogateFunction; import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.Negatable; import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.In; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.NotEquals; -import org.elasticsearch.xpack.esql.core.plan.logical.Filter; -import org.elasticsearch.xpack.esql.core.plan.logical.Limit; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.rule.Rule; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.ReflectionUtils; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; -import java.time.ZoneId; -import java.util.ArrayList; -import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; -import java.util.Map; import java.util.Set; import java.util.function.BiFunction; @@ -56,34 +44,6 @@ import static org.elasticsearch.xpack.esql.core.util.CollectionUtils.combine; public final class OptimizerRules { - - /** - * This rule must always be placed after LiteralsOnTheRight, since it looks at TRUE/FALSE literals' existence - * on the right hand-side of the {@link Equals}/{@link NotEquals} expressions. - */ - public static final class BooleanFunctionEqualsElimination extends OptimizerExpressionRule { - - public BooleanFunctionEqualsElimination() { - super(TransformDirection.UP); - } - - @Override - protected Expression rule(BinaryComparison bc) { - if ((bc instanceof Equals || bc instanceof NotEquals) && bc.left() instanceof Function) { - // for expression "==" or "!=" TRUE/FALSE, return the expression itself or its negated variant - - if (TRUE.equals(bc.right())) { - return bc instanceof Equals ? bc.left() : new Not(bc.left().source(), bc.left()); - } - if (FALSE.equals(bc.right())) { - return bc instanceof Equals ? new Not(bc.left().source(), bc.left()) : bc.left(); - } - } - - return bc; - } - } - public static class BooleanSimplification extends OptimizerExpressionRule { public BooleanSimplification() { @@ -220,178 +180,6 @@ protected Expression maybeSimplifyNegatable(Expression e) { } } - /** - * Combine disjunctions on the same field into an In expression. - * This rule looks for both simple equalities: - * 1. a == 1 OR a == 2 becomes a IN (1, 2) - * and combinations of In - * 2. a == 1 OR a IN (2) becomes a IN (1, 2) - * 3. a IN (1) OR a IN (2) becomes a IN (1, 2) - * - * This rule does NOT check for type compatibility as that phase has been - * already be verified in the analyzer. - */ - public static class CombineDisjunctionsToIn extends OptimizerExpressionRule { - public CombineDisjunctionsToIn() { - super(TransformDirection.UP); - } - - @Override - protected Expression rule(Or or) { - Expression e = or; - // look only at equals and In - List exps = splitOr(e); - - Map> found = new LinkedHashMap<>(); - ZoneId zoneId = null; - List ors = new LinkedList<>(); - - for (Expression exp : exps) { - if (exp instanceof Equals eq) { - // consider only equals against foldables - if (eq.right().foldable()) { - found.computeIfAbsent(eq.left(), k -> new LinkedHashSet<>()).add(eq.right()); - } else { - ors.add(exp); - } - if (zoneId == null) { - zoneId = eq.zoneId(); - } - } else if (exp instanceof In in) { - found.computeIfAbsent(in.value(), k -> new LinkedHashSet<>()).addAll(in.list()); - if (zoneId == null) { - zoneId = in.zoneId(); - } - } else { - ors.add(exp); - } - } - - if (found.isEmpty() == false) { - // combine equals alongside the existing ors - final ZoneId finalZoneId = zoneId; - found.forEach( - (k, v) -> { ors.add(v.size() == 1 ? createEquals(k, v, finalZoneId) : createIn(k, new ArrayList<>(v), finalZoneId)); } - ); - - Expression combineOr = combineOr(ors); - // check the result semantically since the result might different in order - // but be actually the same which can trigger a loop - // e.g. a == 1 OR a == 2 OR null --> null OR a in (1,2) --> literalsOnTheRight --> cycle - if (e.semanticEquals(combineOr) == false) { - e = combineOr; - } - } - - return e; - } - - protected Equals createEquals(Expression k, Set v, ZoneId finalZoneId) { - return new Equals(k.source(), k, v.iterator().next(), finalZoneId); - } - - protected In createIn(Expression key, List values, ZoneId zoneId) { - return new In(key.source(), key, values, zoneId); - } - } - - public static class ReplaceSurrogateFunction extends OptimizerExpressionRule { - - public ReplaceSurrogateFunction() { - super(TransformDirection.DOWN); - } - - @Override - protected Expression rule(Expression e) { - if (e instanceof SurrogateFunction) { - e = ((SurrogateFunction) e).substitute(); - } - return e; - } - } - - public abstract static class PruneFilters extends OptimizerRule { - - @Override - protected LogicalPlan rule(Filter filter) { - Expression condition = filter.condition().transformUp(BinaryLogic.class, PruneFilters::foldBinaryLogic); - - if (condition instanceof Literal) { - if (TRUE.equals(condition)) { - return filter.child(); - } - if (FALSE.equals(condition) || Expressions.isNull(condition)) { - return skipPlan(filter); - } - } - - if (condition.equals(filter.condition()) == false) { - return new Filter(filter.source(), filter.child(), condition); - } - return filter; - } - - protected abstract LogicalPlan skipPlan(Filter filter); - - private static Expression foldBinaryLogic(BinaryLogic binaryLogic) { - if (binaryLogic instanceof Or or) { - boolean nullLeft = Expressions.isNull(or.left()); - boolean nullRight = Expressions.isNull(or.right()); - if (nullLeft && nullRight) { - return new Literal(binaryLogic.source(), null, DataType.NULL); - } - if (nullLeft) { - return or.right(); - } - if (nullRight) { - return or.left(); - } - } - if (binaryLogic instanceof And and) { - if (Expressions.isNull(and.left()) || Expressions.isNull(and.right())) { - return new Literal(binaryLogic.source(), null, DataType.NULL); - } - } - return binaryLogic; - } - } - - // NB: it is important to start replacing casts from the bottom to properly replace aliases - public abstract static class PruneCast extends Rule { - - private final Class castType; - - public PruneCast(Class castType) { - this.castType = castType; - } - - @Override - public final LogicalPlan apply(LogicalPlan plan) { - return rule(plan); - } - - protected final LogicalPlan rule(LogicalPlan plan) { - // eliminate redundant casts - return plan.transformExpressionsUp(castType, this::maybePruneCast); - } - - protected abstract Expression maybePruneCast(C cast); - } - - public abstract static class SkipQueryOnLimitZero extends OptimizerRule { - @Override - protected LogicalPlan rule(Limit limit) { - if (limit.limit().foldable()) { - if (Integer.valueOf(0).equals((limit.limit().fold()))) { - return skipPlan(limit); - } - } - return limit; - } - - protected abstract LogicalPlan skipPlan(Limit limit); - } - public static class FoldNull extends OptimizerExpressionRule { public FoldNull() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PartiallyFoldCase.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PartiallyFoldCase.java index 6b900d91eb061..78435f852982e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PartiallyFoldCase.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PartiallyFoldCase.java @@ -8,10 +8,9 @@ package org.elasticsearch.xpack.esql.optimizer.rules; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; -import static org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.TransformDirection.DOWN; +import static org.elasticsearch.xpack.esql.optimizer.rules.OptimizerRules.TransformDirection.DOWN; /** * Fold the arms of {@code CASE} statements. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PropagateEmptyRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PropagateEmptyRelation.java index 8a3281dd7df81..c57e490423ce8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PropagateEmptyRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PropagateEmptyRelation.java @@ -13,13 +13,12 @@ import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.planner.PlannerUtils; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PropagateEquals.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PropagateEquals.java index 5f08363abdbaf..8e5d203942c7a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PropagateEquals.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PropagateEquals.java @@ -35,10 +35,10 @@ * When encountering a different Equals, non-containing {@link Range} or {@link BinaryComparison}, the conjunction becomes false. * When encountering a containing {@link Range}, {@link BinaryComparison} or {@link NotEquals}, these get eliminated by the equality. */ -public final class PropagateEquals extends org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.OptimizerExpressionRule { +public final class PropagateEquals extends OptimizerRules.OptimizerExpressionRule { public PropagateEquals() { - super(org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.TransformDirection.DOWN); + super(OptimizerRules.TransformDirection.DOWN); } public Expression rule(BinaryLogic e) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PropagateEvalFoldables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PropagateEvalFoldables.java index 872bff80926d6..9231105c9b663 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PropagateEvalFoldables.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PropagateEvalFoldables.java @@ -12,10 +12,10 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; -import org.elasticsearch.xpack.esql.core.plan.logical.Filter; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.rule.Rule; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.Filter; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; /** * Replace any reference attribute with its source, if it does not affect the result. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PropagateNullable.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PropagateNullable.java index 73ea21f9c8191..08c560c326e81 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PropagateNullable.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PropagateNullable.java @@ -9,7 +9,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import java.util.ArrayList; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneColumns.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneColumns.java index 9403e3996ec49..baeabb534aa3c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneColumns.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneColumns.java @@ -13,12 +13,12 @@ import org.elasticsearch.xpack.esql.core.expression.EmptyAttribute; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.core.plan.logical.Limit; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.rule.Rule; import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.Limit; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneEmptyPlans.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneEmptyPlans.java index 5c9ef44207366..739d59d8b0df6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneEmptyPlans.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneEmptyPlans.java @@ -7,10 +7,9 @@ package org.elasticsearch.xpack.esql.optimizer.rules; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; public final class PruneEmptyPlans extends OptimizerRules.OptimizerRule { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneFilters.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneFilters.java index 72df4261663e5..7e9ff7c5f5f02 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneFilters.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneFilters.java @@ -7,15 +7,60 @@ package org.elasticsearch.xpack.esql.optimizer.rules; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.plan.logical.Filter; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Expressions; +import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; +import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer; +import org.elasticsearch.xpack.esql.plan.logical.Filter; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; -public final class PruneFilters extends OptimizerRules.PruneFilters { +import static org.elasticsearch.xpack.esql.core.expression.Literal.FALSE; +import static org.elasticsearch.xpack.esql.core.expression.Literal.TRUE; +public final class PruneFilters extends OptimizerRules.OptimizerRule { @Override - protected LogicalPlan skipPlan(Filter filter) { - return LogicalPlanOptimizer.skipPlan(filter); + protected LogicalPlan rule(Filter filter) { + Expression condition = filter.condition().transformUp(BinaryLogic.class, PruneFilters::foldBinaryLogic); + + if (condition instanceof Literal) { + if (TRUE.equals(condition)) { + return filter.child(); + } + if (FALSE.equals(condition) || Expressions.isNull(condition)) { + return LogicalPlanOptimizer.skipPlan(filter); + } + } + + if (condition.equals(filter.condition()) == false) { + return new Filter(filter.source(), filter.child(), condition); + } + return filter; } + + private static Expression foldBinaryLogic(BinaryLogic binaryLogic) { + if (binaryLogic instanceof Or or) { + boolean nullLeft = Expressions.isNull(or.left()); + boolean nullRight = Expressions.isNull(or.right()); + if (nullLeft && nullRight) { + return new Literal(binaryLogic.source(), null, DataType.NULL); + } + if (nullLeft) { + return or.right(); + } + if (nullRight) { + return or.left(); + } + } + if (binaryLogic instanceof And and) { + if (Expressions.isNull(and.left()) || Expressions.isNull(and.right())) { + return new Literal(binaryLogic.source(), null, DataType.NULL); + } + } + return binaryLogic; + } + } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneLiteralsInOrderBy.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneLiteralsInOrderBy.java index 591cfe043c00d..1fe67c2c435c2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneLiteralsInOrderBy.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneLiteralsInOrderBy.java @@ -8,9 +8,8 @@ package org.elasticsearch.xpack.esql.optimizer.rules; import org.elasticsearch.xpack.esql.core.expression.Order; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import java.util.ArrayList; import java.util.List; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneOrderByBeforeStats.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneOrderByBeforeStats.java index 690bc92b1c338..f2ef524f2c91e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneOrderByBeforeStats.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneOrderByBeforeStats.java @@ -7,16 +7,15 @@ package org.elasticsearch.xpack.esql.optimizer.rules; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.plan.logical.Filter; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.Filter; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; +import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; public final class PruneOrderByBeforeStats extends OptimizerRules.OptimizerRule { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneRedundantSortClauses.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneRedundantSortClauses.java index 3a9421ee7f159..dc68ae5981429 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneRedundantSortClauses.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PruneRedundantSortClauses.java @@ -9,9 +9,8 @@ import org.elasticsearch.xpack.esql.core.expression.ExpressionSet; import org.elasticsearch.xpack.esql.core.expression.Order; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import java.util.ArrayList; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PushDownAndCombineFilters.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PushDownAndCombineFilters.java index 647c5c3730157..48013e113fe43 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PushDownAndCombineFilters.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PushDownAndCombineFilters.java @@ -12,18 +12,17 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.plan.logical.Filter; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.Filter; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; +import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import java.util.ArrayList; import java.util.List; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PushDownAndCombineLimits.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PushDownAndCombineLimits.java index 46fb654d03760..62ecf9ccd09be 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PushDownAndCombineLimits.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PushDownAndCombineLimits.java @@ -8,16 +8,15 @@ package org.elasticsearch.xpack.esql.optimizer.rules; import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.plan.logical.Limit; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.Limit; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; +import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PushDownAndCombineOrderBy.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PushDownAndCombineOrderBy.java index f01616953427d..286695abda25b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PushDownAndCombineOrderBy.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PushDownAndCombineOrderBy.java @@ -7,10 +7,9 @@ package org.elasticsearch.xpack.esql.optimizer.rules; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.Project; public final class PushDownAndCombineOrderBy extends OptimizerRules.OptimizerRule { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PushDownEnrich.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PushDownEnrich.java index f6a0154108f2d..7185f63964c34 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PushDownEnrich.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PushDownEnrich.java @@ -7,10 +7,9 @@ package org.elasticsearch.xpack.esql.optimizer.rules; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer; import org.elasticsearch.xpack.esql.plan.logical.Enrich; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import static org.elasticsearch.xpack.esql.core.expression.Expressions.asAttributes; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PushDownEval.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PushDownEval.java index b936e5569c950..92c25a60bba77 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PushDownEval.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PushDownEval.java @@ -7,10 +7,9 @@ package org.elasticsearch.xpack.esql.optimizer.rules; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import static org.elasticsearch.xpack.esql.core.expression.Expressions.asAttributes; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PushDownRegexExtract.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PushDownRegexExtract.java index f247d0a631b29..d24a61f89dd7f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PushDownRegexExtract.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/PushDownRegexExtract.java @@ -7,9 +7,8 @@ package org.elasticsearch.xpack.esql.optimizer.rules; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; public final class PushDownRegexExtract extends OptimizerRules.OptimizerRule { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/RemoveStatsOverride.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/RemoveStatsOverride.java index cbcde663f8b14..5592a04e2f813 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/RemoveStatsOverride.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/RemoveStatsOverride.java @@ -8,11 +8,11 @@ package org.elasticsearch.xpack.esql.optimizer.rules; import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.xpack.esql.core.analyzer.AnalyzerRules; +import org.elasticsearch.xpack.esql.analysis.AnalyzerRules; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import java.util.ArrayList; import java.util.List; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceAliasingEvalWithProject.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceAliasingEvalWithProject.java index 2bbfeaac965ef..34b75cd89f68c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceAliasingEvalWithProject.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceAliasingEvalWithProject.java @@ -12,11 +12,11 @@ import org.elasticsearch.xpack.esql.core.expression.AttributeMap; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.rule.Rule; import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Project; import java.util.ArrayList; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceLimitAndSortAsTopN.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceLimitAndSortAsTopN.java index ec912735f8451..6394d11bb68c8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceLimitAndSortAsTopN.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceLimitAndSortAsTopN.java @@ -7,10 +7,9 @@ package org.elasticsearch.xpack.esql.optimizer.rules; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.plan.logical.Limit; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; +import org.elasticsearch.xpack.esql.plan.logical.Limit; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.TopN; public final class ReplaceLimitAndSortAsTopN extends OptimizerRules.OptimizerRule { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceLookupWithJoin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceLookupWithJoin.java index f6c8f4a59a70c..f258ea97bfa33 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceLookupWithJoin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceLookupWithJoin.java @@ -7,8 +7,7 @@ package org.elasticsearch.xpack.esql.optimizer.rules; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Lookup; import org.elasticsearch.xpack.esql.plan.logical.join.Join; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceOrderByExpressionWithEval.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceOrderByExpressionWithEval.java index 476da7476f7fb..02fc98428f14a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceOrderByExpressionWithEval.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceOrderByExpressionWithEval.java @@ -10,10 +10,9 @@ import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Order; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.Project; import java.util.ArrayList; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceRegexMatch.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceRegexMatch.java index 5cba7349debfd..cc18940e68924 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceRegexMatch.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceRegexMatch.java @@ -15,11 +15,10 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; -public final class ReplaceRegexMatch extends org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.OptimizerExpressionRule< - RegexMatch> { +public final class ReplaceRegexMatch extends OptimizerRules.OptimizerExpressionRule> { public ReplaceRegexMatch() { - super(org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.TransformDirection.DOWN); + super(OptimizerRules.TransformDirection.DOWN); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceStatsAggExpressionWithEval.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceStatsAggExpressionWithEval.java index 012d6e307df6c..31b543cd115df 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceStatsAggExpressionWithEval.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceStatsAggExpressionWithEval.java @@ -12,14 +12,13 @@ import org.elasticsearch.xpack.esql.core.expression.AttributeMap; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.util.CollectionUtils; import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Project; import java.util.ArrayList; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceStatsNestedExpressionWithEval.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceStatsNestedExpressionWithEval.java index 99b0c8047f2ba..0979b745a6607 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceStatsNestedExpressionWithEval.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceStatsNestedExpressionWithEval.java @@ -11,13 +11,12 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.expression.function.grouping.GroupingFunction; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import java.util.ArrayList; import java.util.HashMap; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceTrivialTypeConversions.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceTrivialTypeConversions.java index 2763c71c4bcb6..dc877a99010f8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceTrivialTypeConversions.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/ReplaceTrivialTypeConversions.java @@ -9,10 +9,9 @@ import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.AbstractConvertFunction; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; /** * Replace type converting eval with aliasing eval when type change does not occur. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SetAsOptimized.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SetAsOptimized.java index 168270b68db2d..89d2e7613d2c7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SetAsOptimized.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SetAsOptimized.java @@ -7,8 +7,8 @@ package org.elasticsearch.xpack.esql.optimizer.rules; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.rule.Rule; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; public final class SetAsOptimized extends Rule { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SimplifyComparisonsArithmetics.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SimplifyComparisonsArithmetics.java index 151d11fa575ae..4ef069ea16d04 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SimplifyComparisonsArithmetics.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SimplifyComparisonsArithmetics.java @@ -32,12 +32,11 @@ /** * Simplifies arithmetic expressions with BinaryComparisons and fixed point fields, such as: (int + 2) / 3 > 4 => int > 10 */ -public final class SimplifyComparisonsArithmetics extends - org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.OptimizerExpressionRule { +public final class SimplifyComparisonsArithmetics extends OptimizerRules.OptimizerExpressionRule { BiFunction typesCompatible; public SimplifyComparisonsArithmetics(BiFunction typesCompatible) { - super(org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.TransformDirection.UP); + super(OptimizerRules.TransformDirection.UP); this.typesCompatible = typesCompatible; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SkipQueryOnEmptyMappings.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SkipQueryOnEmptyMappings.java index 7ec215db65626..99efacd4ea39a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SkipQueryOnEmptyMappings.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SkipQueryOnEmptyMappings.java @@ -7,9 +7,8 @@ package org.elasticsearch.xpack.esql.optimizer.rules; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SkipQueryOnLimitZero.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SkipQueryOnLimitZero.java index 7cb4f2926045d..199520d648a26 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SkipQueryOnLimitZero.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SkipQueryOnLimitZero.java @@ -7,15 +7,18 @@ package org.elasticsearch.xpack.esql.optimizer.rules; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.plan.logical.Limit; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer; +import org.elasticsearch.xpack.esql.plan.logical.Limit; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; -public final class SkipQueryOnLimitZero extends OptimizerRules.SkipQueryOnLimitZero { - +public final class SkipQueryOnLimitZero extends OptimizerRules.OptimizerRule { @Override - protected LogicalPlan skipPlan(Limit limit) { - return LogicalPlanOptimizer.skipPlan(limit); + protected LogicalPlan rule(Limit limit) { + if (limit.limit().foldable()) { + if (Integer.valueOf(0).equals((limit.limit().fold()))) { + return LogicalPlanOptimizer.skipPlan(limit); + } + } + return limit; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SplitInWithFoldableValue.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SplitInWithFoldableValue.java index c762f396a6f43..1d4e90fe0d5ca 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SplitInWithFoldableValue.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SplitInWithFoldableValue.java @@ -10,7 +10,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; import java.util.ArrayList; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SubstituteSpatialSurrogates.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SubstituteSpatialSurrogates.java index c5293785bf1ba..e6501452eeb65 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SubstituteSpatialSurrogates.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SubstituteSpatialSurrogates.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.optimizer.rules; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; /** diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SubstituteSurrogates.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SubstituteSurrogates.java index fa4049b0e5a3a..2307f6324e942 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SubstituteSurrogates.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/SubstituteSurrogates.java @@ -15,13 +15,12 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.Rate; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/TranslateMetricsAggregate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/TranslateMetricsAggregate.java index 88486bcb864dc..17b38044c1656 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/TranslateMetricsAggregate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/TranslateMetricsAggregate.java @@ -16,8 +16,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.FromPartial; @@ -27,6 +25,7 @@ import org.elasticsearch.xpack.esql.expression.function.grouping.Bucket; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import java.util.ArrayList; import java.util.HashMap; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java index ddf6031445f7f..70daa5a535fa7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java @@ -20,7 +20,7 @@ import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.xpack.esql.core.parser.CaseChangingCharStream; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import java.util.BitSet; import java.util.function.BiFunction; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index fee51c40a2525..9ee5931c85c36 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -31,10 +31,6 @@ import org.elasticsearch.xpack.esql.core.expression.UnresolvedStar; import org.elasticsearch.xpack.esql.core.parser.ParserUtils; import org.elasticsearch.xpack.esql.core.plan.TableIdentifier; -import org.elasticsearch.xpack.esql.core.plan.logical.Filter; -import org.elasticsearch.xpack.esql.core.plan.logical.Limit; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.Holder; @@ -49,11 +45,15 @@ import org.elasticsearch.xpack.esql.plan.logical.EsqlUnresolvedRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Explain; +import org.elasticsearch.xpack.esql.plan.logical.Filter; import org.elasticsearch.xpack.esql.plan.logical.Grok; import org.elasticsearch.xpack.esql.plan.logical.InlineStats; import org.elasticsearch.xpack.esql.plan.logical.Keep; +import org.elasticsearch.xpack.esql.plan.logical.Limit; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Lookup; import org.elasticsearch.xpack.esql.plan.logical.MvExpand; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.Rename; import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.meta.MetaFunctions; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java index bc7282857dbbe..5ab483e60d7b0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Aggregate.java @@ -14,8 +14,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/BinaryPlan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/BinaryPlan.java similarity index 95% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/BinaryPlan.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/BinaryPlan.java index 051c3d7946b4b..579b67eb891ac 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/BinaryPlan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/BinaryPlan.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.esql.core.plan.logical; +package org.elasticsearch.xpack.esql.plan.logical; import org.elasticsearch.xpack.esql.core.tree.Source; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Dissect.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Dissect.java index 1307d1870bba4..c0c564b1b36eb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Dissect.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Dissect.java @@ -10,8 +10,6 @@ import org.elasticsearch.dissect.DissectParser; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Drop.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Drop.java index 2946287ae21f0..d1c5d70018d91 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Drop.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Drop.java @@ -9,8 +9,6 @@ import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java index f418ab5da1c9d..a4d553eae4749 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java @@ -14,8 +14,6 @@ import org.elasticsearch.xpack.esql.core.expression.EmptyAttribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java index 08916c14e91bf..382838a5968cc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java @@ -10,7 +10,6 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.index.EsIndex; -import org.elasticsearch.xpack.esql.core.plan.logical.LeafPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.NodeUtils; import org.elasticsearch.xpack.esql.core.tree.Source; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsqlAggregate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsqlAggregate.java index 7f16ecd24dc1a..cc72823507f02 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsqlAggregate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsqlAggregate.java @@ -11,7 +11,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Eval.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Eval.java index bfe11c3d33d87..20117a873c143 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Eval.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Eval.java @@ -10,8 +10,6 @@ import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Explain.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Explain.java index 86f3e0bdf349a..8d2640a43f38c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Explain.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Explain.java @@ -9,8 +9,6 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; -import org.elasticsearch.xpack.esql.core.plan.logical.LeafPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/Filter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Filter.java similarity index 97% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/Filter.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Filter.java index a09ffb3e07c96..46fafe57e7d26 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/Filter.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Filter.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.esql.core.plan.logical; +package org.elasticsearch.xpack.esql.plan.logical; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Grok.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Grok.java index 0c1c400f3ab4d..e084f6d3e5e3a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Grok.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Grok.java @@ -15,8 +15,6 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java index 4e7dc70904189..46ec56223384c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java @@ -12,8 +12,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Keep.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Keep.java index a4e733437e80f..c1c8c9aff5ca6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Keep.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Keep.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql.plan.logical; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/LeafPlan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LeafPlan.java similarity index 92% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/LeafPlan.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LeafPlan.java index 4def8356b316a..d21b61a81cd9e 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/LeafPlan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LeafPlan.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.esql.core.plan.logical; +package org.elasticsearch.xpack.esql.plan.logical; import org.elasticsearch.xpack.esql.core.tree.Source; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/Limit.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Limit.java similarity index 96% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/Limit.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Limit.java index 610572f1e73ed..df5e1cf23275c 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/Limit.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Limit.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.esql.core.plan.logical; +package org.elasticsearch.xpack.esql.plan.logical; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/LogicalPlan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java similarity index 97% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/LogicalPlan.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java index 56e09b4e1189a..0397183c6a6c3 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/LogicalPlan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.esql.core.plan.logical; +package org.elasticsearch.xpack.esql.plan.logical; import org.elasticsearch.xpack.esql.core.capabilities.Resolvable; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java index f28a1d11a5990..6893935f20b5b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java @@ -11,8 +11,6 @@ import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java index 869d8d7dc3a26..5e9dca26a6863 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java @@ -9,8 +9,6 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/OrderBy.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/OrderBy.java similarity index 96% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/OrderBy.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/OrderBy.java index c13b3a028f0e8..68d089980074c 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/OrderBy.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/OrderBy.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.esql.core.plan.logical; +package org.elasticsearch.xpack.esql.plan.logical; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Order; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Project.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Project.java index fe28ddcc43b40..d3896b1dfc844 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Project.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Project.java @@ -10,8 +10,6 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.Functions; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/RegexExtract.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/RegexExtract.java index 5bf45fc0f61ad..649173f11dfaf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/RegexExtract.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/RegexExtract.java @@ -9,8 +9,6 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.core.tree.Source; import java.util.List; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Rename.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Rename.java index 7d99c566aa0c7..5e4b45d7127fe 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Rename.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Rename.java @@ -8,8 +8,6 @@ package org.elasticsearch.xpack.esql.plan.logical; import org.elasticsearch.xpack.esql.core.expression.Alias; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Row.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Row.java index 9af3e08a6734b..30e16d9e1b227 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Row.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Row.java @@ -11,8 +11,6 @@ import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expressions; -import org.elasticsearch.xpack.esql.core.plan.logical.LeafPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/TopN.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/TopN.java index ac576eaa2cb96..227d7785804d4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/TopN.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/TopN.java @@ -10,8 +10,6 @@ import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Order; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/UnaryPlan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnaryPlan.java similarity index 96% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/UnaryPlan.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnaryPlan.java index 75ce38127394e..ea9a760ef5dc4 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plan/logical/UnaryPlan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnaryPlan.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.esql.core.plan.logical; +package org.elasticsearch.xpack.esql.plan.logical; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.tree.Source; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java index eb6627bbdd0f8..af19bc87f2c54 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java @@ -9,7 +9,6 @@ import org.elasticsearch.xpack.esql.core.capabilities.Unresolvable; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.plan.TableIdentifier; -import org.elasticsearch.xpack.esql.core.plan.logical.LeafPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java index d6d328686d8f1..79278995b29bd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java @@ -12,12 +12,12 @@ import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; -import org.elasticsearch.xpack.esql.core.plan.logical.BinaryPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; +import org.elasticsearch.xpack.esql.plan.logical.BinaryPlan; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import java.io.IOException; import java.util.ArrayList; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/EsqlProject.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/EsqlProject.java index 03a9c2b68b327..e359c6f928f7c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/EsqlProject.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/EsqlProject.java @@ -8,10 +8,10 @@ package org.elasticsearch.xpack.esql.plan.logical.local; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Project; import java.util.List; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelation.java index 862098621e9ee..195eb3b6304e4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelation.java @@ -7,11 +7,11 @@ package org.elasticsearch.xpack.esql.plan.logical.local; import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.plan.logical.LeafPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; +import org.elasticsearch.xpack.esql.plan.logical.LeafPlan; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/meta/MetaFunctions.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/meta/MetaFunctions.java index f137cf392f8ad..9ac9ccdf2a876 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/meta/MetaFunctions.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/meta/MetaFunctions.java @@ -11,11 +11,11 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; -import org.elasticsearch.xpack.esql.core.plan.logical.LeafPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; +import org.elasticsearch.xpack.esql.plan.logical.LeafPlan; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import java.util.ArrayList; import java.util.Arrays; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowInfo.java index 4867d8ca77a39..6e98df32580ae 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowInfo.java @@ -11,10 +11,10 @@ import org.elasticsearch.Build; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; -import org.elasticsearch.xpack.esql.core.plan.logical.LeafPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.plan.logical.LeafPlan; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import java.util.ArrayList; import java.util.List; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/FragmentExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/FragmentExec.java index 95cd732eabd45..5c01658760632 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/FragmentExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/FragmentExec.java @@ -9,9 +9,9 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import java.util.List; import java.util.Objects; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java index 5ba2a205d52d0..84ed4663496de 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java @@ -9,23 +9,23 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; -import org.elasticsearch.xpack.esql.core.plan.logical.BinaryPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.Filter; -import org.elasticsearch.xpack.esql.core.plan.logical.Limit; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; +import org.elasticsearch.xpack.esql.plan.logical.BinaryPlan; import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.Filter; import org.elasticsearch.xpack.esql.plan.logical.Grok; +import org.elasticsearch.xpack.esql.plan.logical.Limit; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.MvExpand; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.TopN; +import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java index a729cec893126..d9f073d952a37 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java @@ -21,11 +21,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; -import org.elasticsearch.xpack.esql.core.plan.logical.Filter; -import org.elasticsearch.xpack.esql.core.plan.logical.Limit; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.Holder; @@ -36,7 +31,12 @@ import org.elasticsearch.xpack.esql.optimizer.LocalPhysicalPlanOptimizer; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; +import org.elasticsearch.xpack.esql.plan.logical.Filter; +import org.elasticsearch.xpack.esql.plan.logical.Limit; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.TopN; +import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.EsSourceExec; import org.elasticsearch.xpack.esql.plan.physical.EstimatesRowSize; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 370de6bb2ce8e..2a4f07a1aa319 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -31,7 +31,6 @@ import org.elasticsearch.xpack.esql.core.index.IndexResolution; import org.elasticsearch.xpack.esql.core.index.MappingException; import org.elasticsearch.xpack.esql.core.plan.TableIdentifier; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.type.InvalidMappedField; import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolver; @@ -46,6 +45,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Keep; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; import org.elasticsearch.xpack.esql.plan.physical.EstimatesRowSize; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java index d5c4a67b01e8b..c4d890a818ec7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java @@ -7,18 +7,18 @@ package org.elasticsearch.xpack.esql.stats; -import org.elasticsearch.xpack.esql.core.plan.logical.Filter; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Drop; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.Filter; import org.elasticsearch.xpack.esql.plan.logical.Grok; import org.elasticsearch.xpack.esql.plan.logical.Keep; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.MvExpand; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.Rename; import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.meta.MetaFunctions; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index b63a24556c31f..f61f581f29a13 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -57,7 +57,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.index.EsIndex; import org.elasticsearch.xpack.esql.core.index.IndexResolution; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; @@ -73,6 +72,7 @@ import org.elasticsearch.xpack.esql.optimizer.TestPhysicalPlanOptimizer; import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.plan.logical.Enrich; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.physical.EstimatesRowSize; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; import org.elasticsearch.xpack.esql.plan.physical.OutputExec; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java index fd811a2f2e217..8c5a5a4b3ba3b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java @@ -26,7 +26,6 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; @@ -34,6 +33,7 @@ import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java index c78baabcd03a7..7c5dc73fb62af 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java @@ -11,11 +11,11 @@ import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.core.index.EsIndex; import org.elasticsearch.xpack.esql.core.index.IndexResolution; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.plan.logical.Enrich; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; import java.util.ArrayList; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 1f2ec0c236ecf..06191d42c92de 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -32,10 +32,6 @@ import org.elasticsearch.xpack.esql.core.index.EsIndex; import org.elasticsearch.xpack.esql.core.index.IndexResolution; import org.elasticsearch.xpack.esql.core.plan.TableIdentifier; -import org.elasticsearch.xpack.esql.core.plan.logical.Filter; -import org.elasticsearch.xpack.esql.core.plan.logical.Limit; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.TypesTests; import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; @@ -49,7 +45,11 @@ import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.EsqlUnresolvedRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.Filter; +import org.elasticsearch.xpack.esql.plan.logical.Limit; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Lookup; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java index 8dfd8eee58c24..0231dc1f4a82b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/ParsingTests.java @@ -15,12 +15,12 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.index.EsIndex; import org.elasticsearch.xpack.esql.core.index.IndexResolution; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.TypesTests; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.expression.function.FunctionDefinition; import org.elasticsearch.xpack.esql.parser.EsqlParser; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java index 5a398ed3e4370..55691526ea428 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java @@ -24,10 +24,6 @@ import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.arithmetic.ArithmeticOperation; import org.elasticsearch.xpack.esql.core.index.EsIndex; -import org.elasticsearch.xpack.esql.core.plan.logical.Filter; -import org.elasticsearch.xpack.esql.core.plan.logical.Limit; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; @@ -50,9 +46,13 @@ import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.Filter; import org.elasticsearch.xpack.esql.plan.logical.Grok; +import org.elasticsearch.xpack.esql.plan.logical.Limit; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Lookup; import org.elasticsearch.xpack.esql.plan.logical.MvExpand; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.TopN; import org.elasticsearch.xpack.esql.plan.logical.join.Join; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInputTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInputTests.java index 5788f218564c9..55763d9ec6e7b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInputTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInputTests.java @@ -10,10 +10,10 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.NameId; -import org.elasticsearch.xpack.esql.core.plan.logical.Filter; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.Filter; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; import java.util.ArrayList; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java index af6c065abbeee..2049fd5592d82 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java @@ -24,10 +24,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.core.index.EsIndex; import org.elasticsearch.xpack.esql.core.index.IndexResolution; -import org.elasticsearch.xpack.esql.core.plan.logical.Filter; -import org.elasticsearch.xpack.esql.core.plan.logical.Limit; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -39,9 +35,13 @@ import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.Filter; +import org.elasticsearch.xpack.esql.plan.logical.Limit; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.Row; +import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 6a9e7a4000734..ee987f7a5a48a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -42,11 +42,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardPattern; import org.elasticsearch.xpack.esql.core.index.EsIndex; import org.elasticsearch.xpack.esql.core.index.IndexResolution; -import org.elasticsearch.xpack.esql.core.plan.logical.Filter; -import org.elasticsearch.xpack.esql.core.plan.logical.Limit; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; -import org.elasticsearch.xpack.esql.core.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; @@ -122,11 +117,16 @@ import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.Filter; import org.elasticsearch.xpack.esql.plan.logical.Grok; +import org.elasticsearch.xpack.esql.plan.logical.Limit; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.MvExpand; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.TopN; +import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java index b550f6e6090da..ee1b2a9c7dc56 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java @@ -29,10 +29,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLikePattern; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardLike; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardPattern; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.FoldNull; -import org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.PropagateNullable; -import org.elasticsearch.xpack.esql.core.plan.logical.Filter; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.StringUtils; @@ -52,8 +48,13 @@ import org.elasticsearch.xpack.esql.optimizer.rules.CombineDisjunctionsToIn; import org.elasticsearch.xpack.esql.optimizer.rules.ConstantFolding; import org.elasticsearch.xpack.esql.optimizer.rules.LiteralsOnTheRight; +import org.elasticsearch.xpack.esql.optimizer.rules.OptimizerRules; +import org.elasticsearch.xpack.esql.optimizer.rules.OptimizerRules.FoldNull; +import org.elasticsearch.xpack.esql.optimizer.rules.OptimizerRules.PropagateNullable; import org.elasticsearch.xpack.esql.optimizer.rules.PropagateEquals; import org.elasticsearch.xpack.esql.optimizer.rules.ReplaceRegexMatch; +import org.elasticsearch.xpack.esql.plan.logical.Filter; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import java.util.List; @@ -603,8 +604,7 @@ public void testGenericNullableExpression() { } public void testNullFoldingDoesNotApplyOnLogicalExpressions() { - org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.FoldNull rule = - new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.FoldNull(); + OptimizerRules.FoldNull rule = new OptimizerRules.FoldNull(); Or or = new Or(EMPTY, NULL, TRUE); assertEquals(or, rule.rule(or)); @@ -626,7 +626,7 @@ public void testIsNullAndNotNull() { FieldAttribute fa = getFieldAttribute(); And and = new And(EMPTY, new IsNull(EMPTY, fa), new IsNotNull(EMPTY, fa)); - assertEquals(FALSE, new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.PropagateNullable().rule(and)); + assertEquals(FALSE, new OptimizerRules.PropagateNullable().rule(and)); } // a IS NULL AND b IS NOT NULL AND c IS NULL AND d IS NOT NULL AND e IS NULL AND a IS NOT NULL => false @@ -639,7 +639,7 @@ public void testIsNullAndNotNullMultiField() { And and = new And(EMPTY, andOne, new And(EMPTY, andThree, andTwo)); - assertEquals(FALSE, new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.PropagateNullable().rule(and)); + assertEquals(FALSE, new OptimizerRules.PropagateNullable().rule(and)); } // a IS NULL AND a > 1 => a IS NULL AND false @@ -818,8 +818,7 @@ public void testLiteralsOnTheRight() { } public void testBoolSimplifyOr() { - org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.BooleanSimplification simplification = - new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.BooleanSimplification(); + OptimizerRules.BooleanSimplification simplification = new OptimizerRules.BooleanSimplification(); assertEquals(TRUE, simplification.rule(new Or(EMPTY, TRUE, TRUE))); assertEquals(TRUE, simplification.rule(new Or(EMPTY, TRUE, DUMMY_EXPRESSION))); @@ -831,8 +830,7 @@ public void testBoolSimplifyOr() { } public void testBoolSimplifyAnd() { - org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.BooleanSimplification simplification = - new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.BooleanSimplification(); + OptimizerRules.BooleanSimplification simplification = new OptimizerRules.BooleanSimplification(); assertEquals(TRUE, simplification.rule(new And(EMPTY, TRUE, TRUE))); assertEquals(DUMMY_EXPRESSION, simplification.rule(new And(EMPTY, TRUE, DUMMY_EXPRESSION))); @@ -844,8 +842,7 @@ public void testBoolSimplifyAnd() { } public void testBoolCommonFactorExtraction() { - org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.BooleanSimplification simplification = - new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRules.BooleanSimplification(); + OptimizerRules.BooleanSimplification simplification = new OptimizerRules.BooleanSimplification(); Expression a1 = new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRulesTests.DummyBooleanExpression(EMPTY, 1); Expression a2 = new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRulesTests.DummyBooleanExpression(EMPTY, 1); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index a418670e98eac..a99ce5d873b44 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -48,9 +48,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.esql.core.index.EsIndex; import org.elasticsearch.xpack.esql.core.index.IndexResolution; -import org.elasticsearch.xpack.esql.core.plan.logical.Filter; -import org.elasticsearch.xpack.esql.core.plan.logical.Limit; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; @@ -79,6 +76,9 @@ import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.Filter; +import org.elasticsearch.xpack.esql.plan.logical.Limit; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.TopN; import org.elasticsearch.xpack.esql.plan.logical.join.Join; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/AbstractStatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/AbstractStatementParserTests.java index 545f3efe8ca79..d575ba1fcb55a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/AbstractStatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/AbstractStatementParserTests.java @@ -12,8 +12,8 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import java.math.BigInteger; import java.util.ArrayList; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java index ac89298ffcfbb..80a2d49d0d94a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java @@ -16,8 +16,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; -import org.elasticsearch.xpack.esql.core.plan.logical.Filter; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.UnresolvedNamePattern; import org.elasticsearch.xpack.esql.expression.function.UnresolvedFunction; @@ -31,6 +29,8 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThanOrEqual; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThanOrEqual; import org.elasticsearch.xpack.esql.plan.logical.Drop; +import org.elasticsearch.xpack.esql.plan.logical.Filter; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.Rename; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 2e2ca4feafa41..eee40b25176ab 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -21,10 +21,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.esql.core.plan.TableIdentifier; -import org.elasticsearch.xpack.esql.core.plan.logical.Filter; -import org.elasticsearch.xpack.esql.core.plan.logical.Limit; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; @@ -41,10 +37,14 @@ import org.elasticsearch.xpack.esql.plan.logical.EsqlUnresolvedRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Explain; +import org.elasticsearch.xpack.esql.plan.logical.Filter; import org.elasticsearch.xpack.esql.plan.logical.Grok; import org.elasticsearch.xpack.esql.plan.logical.InlineStats; +import org.elasticsearch.xpack.esql.plan.logical.Limit; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.Lookup; import org.elasticsearch.xpack.esql.plan.logical.MvExpand; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.Row; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/QueryPlanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/QueryPlanTests.java index a62a515ee551b..a254207865ad5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/QueryPlanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/QueryPlanTests.java @@ -13,11 +13,11 @@ import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.core.plan.logical.Filter; -import org.elasticsearch.xpack.esql.core.plan.logical.Limit; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; +import org.elasticsearch.xpack.esql.plan.logical.Filter; +import org.elasticsearch.xpack.esql.plan.logical.Limit; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.Project; import java.util.ArrayList; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java index 7454b25377594..06c6b5de3cdea 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.core.index.EsIndex; import org.elasticsearch.xpack.esql.core.index.IndexResolution; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; @@ -29,6 +28,7 @@ import org.elasticsearch.xpack.esql.optimizer.PhysicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.PhysicalPlanOptimizer; import org.elasticsearch.xpack.esql.parser.EsqlParser; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.planner.Mapper; import org.elasticsearch.xpack.esql.session.EsqlConfigurationSerializationTests; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java index 50fe272caa076..fa20cfdec0ca0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java @@ -28,7 +28,6 @@ import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.FullTextPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.Like; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.LikePattern; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.core.tree.AbstractNodeTestCase; import org.elasticsearch.xpack.esql.core.tree.Node; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -44,6 +43,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Grok; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.Stat; From 7089d806f3528074b4f6e456462a647e3fade4aa Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 5 Jul 2024 10:29:23 -0400 Subject: [PATCH 202/216] ESQL: Remove unused code from esql-core (#110440) This removes a few unused classes from esql-core. we got them as part of our clone of the shared ql code. --- .../esql/core/async/QlStatusResponse.java | 200 ------------------ ...stractTransportQlAsyncGetStatusAction.java | 111 ---------- .../core/plugin/TransportActionUtils.java | 81 ------- .../core/action/QlStatusResponseTests.java | 83 -------- 4 files changed, 475 deletions(-) delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/async/QlStatusResponse.java delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/AbstractTransportQlAsyncGetStatusAction.java delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/TransportActionUtils.java delete mode 100644 x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/action/QlStatusResponseTests.java diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/async/QlStatusResponse.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/async/QlStatusResponse.java deleted file mode 100644 index 8c28f08e8d882..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/async/QlStatusResponse.java +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.async; - -import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.core.async.StoredAsyncResponse; -import org.elasticsearch.xpack.core.search.action.SearchStatusResponse; - -import java.io.IOException; -import java.util.Objects; - -/** - * A response for *QL search status request - */ -public class QlStatusResponse extends ActionResponse implements SearchStatusResponse, ToXContentObject { - private final String id; - private final boolean isRunning; - private final boolean isPartial; - private final Long startTimeMillis; - private final long expirationTimeMillis; - private final RestStatus completionStatus; - - public interface AsyncStatus { - String id(); - - boolean isRunning(); - - boolean isPartial(); - } - - public QlStatusResponse( - String id, - boolean isRunning, - boolean isPartial, - Long startTimeMillis, - long expirationTimeMillis, - RestStatus completionStatus - ) { - this.id = id; - this.isRunning = isRunning; - this.isPartial = isPartial; - this.startTimeMillis = startTimeMillis; - this.expirationTimeMillis = expirationTimeMillis; - this.completionStatus = completionStatus; - } - - /** - * Get status from the stored Ql search response - * @param storedResponse - a response from a stored search - * @param expirationTimeMillis – expiration time in milliseconds - * @param id – encoded async search id - * @return a status response - */ - public static QlStatusResponse getStatusFromStoredSearch( - StoredAsyncResponse storedResponse, - long expirationTimeMillis, - String id - ) { - S searchResponse = storedResponse.getResponse(); - if (searchResponse != null) { - assert searchResponse.isRunning() == false : "Stored Ql search response must have a completed status!"; - return new QlStatusResponse( - searchResponse.id(), - false, - searchResponse.isPartial(), - null, // we don't store in the index the start time for completed response - expirationTimeMillis, - RestStatus.OK - ); - } else { - Exception exc = storedResponse.getException(); - assert exc != null : "Stored Ql response must either have a search response or an exception!"; - return new QlStatusResponse( - id, - false, - false, - null, // we don't store in the index the start time for completed response - expirationTimeMillis, - ExceptionsHelper.status(exc) - ); - } - } - - public QlStatusResponse(StreamInput in) throws IOException { - this.id = in.readString(); - this.isRunning = in.readBoolean(); - this.isPartial = in.readBoolean(); - this.startTimeMillis = in.readOptionalLong(); - this.expirationTimeMillis = in.readLong(); - this.completionStatus = (this.isRunning == false) ? RestStatus.readFrom(in) : null; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(id); - out.writeBoolean(isRunning); - out.writeBoolean(isPartial); - out.writeOptionalLong(startTimeMillis); - out.writeLong(expirationTimeMillis); - if (isRunning == false) { - RestStatus.writeTo(out, completionStatus); - } - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - { - builder.field("id", id); - builder.field("is_running", isRunning); - builder.field("is_partial", isPartial); - if (startTimeMillis != null) { // start time is available only for a running eql search - builder.timeField("start_time_in_millis", "start_time", startTimeMillis); - } - builder.timeField("expiration_time_in_millis", "expiration_time", expirationTimeMillis); - if (isRunning == false) { // completion status is available only for a completed eql search - builder.field("completion_status", completionStatus.getStatus()); - } - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null || getClass() != obj.getClass()) return false; - QlStatusResponse other = (QlStatusResponse) obj; - return id.equals(other.id) - && isRunning == other.isRunning - && isPartial == other.isPartial - && Objects.equals(startTimeMillis, other.startTimeMillis) - && expirationTimeMillis == other.expirationTimeMillis - && Objects.equals(completionStatus, other.completionStatus); - } - - @Override - public int hashCode() { - return Objects.hash(id, isRunning, isPartial, startTimeMillis, expirationTimeMillis, completionStatus); - } - - /** - * Returns the id of the eql search status request. - */ - public String getId() { - return id; - } - - /** - * Returns {@code true} if the eql search is still running in the cluster, - * or {@code false} if the search has been completed. - */ - public boolean isRunning() { - return isRunning; - } - - /** - * Returns {@code true} if the eql search results are partial. - * This could be either because eql search hasn't finished yet, - * or if it finished and some shards have failed or timed out. - */ - public boolean isPartial() { - return isPartial; - } - - /** - * Returns a timestamp when the eql search task started, in milliseconds since epoch. - * For a completed eql search returns {@code null}, as we don't store start time for completed searches. - */ - public Long getStartTime() { - return startTimeMillis; - } - - /** - * Returns a timestamp when the eql search will be expired, in milliseconds since epoch. - */ - @Override - public long getExpirationTime() { - return expirationTimeMillis; - } - - /** - * For a completed eql search returns the completion status. - * For a still running eql search returns {@code null}. - */ - public RestStatus getCompletionStatus() { - return completionStatus; - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/AbstractTransportQlAsyncGetStatusAction.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/AbstractTransportQlAsyncGetStatusAction.java deleted file mode 100644 index cb21272758d1b..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/AbstractTransportQlAsyncGetStatusAction.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.plugin; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionListenerResponseHandler; -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.XPackPlugin; -import org.elasticsearch.xpack.core.async.AsyncExecutionId; -import org.elasticsearch.xpack.core.async.AsyncTaskIndexService; -import org.elasticsearch.xpack.core.async.GetAsyncStatusRequest; -import org.elasticsearch.xpack.core.async.StoredAsyncResponse; -import org.elasticsearch.xpack.core.async.StoredAsyncTask; -import org.elasticsearch.xpack.esql.core.async.QlStatusResponse; - -import java.util.Objects; - -import static org.elasticsearch.xpack.core.ClientHelper.ASYNC_SEARCH_ORIGIN; - -public abstract class AbstractTransportQlAsyncGetStatusAction< - Response extends ActionResponse & QlStatusResponse.AsyncStatus, - AsyncTask extends StoredAsyncTask> extends HandledTransportAction { - private final String actionName; - private final TransportService transportService; - private final ClusterService clusterService; - private final Class asyncTaskClass; - private final AsyncTaskIndexService> store; - - @SuppressWarnings("this-escape") - public AbstractTransportQlAsyncGetStatusAction( - String actionName, - TransportService transportService, - ActionFilters actionFilters, - ClusterService clusterService, - NamedWriteableRegistry registry, - Client client, - ThreadPool threadPool, - BigArrays bigArrays, - Class asyncTaskClass - ) { - super(actionName, transportService, actionFilters, GetAsyncStatusRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); - this.actionName = actionName; - this.transportService = transportService; - this.clusterService = clusterService; - this.asyncTaskClass = asyncTaskClass; - Writeable.Reader> reader = in -> new StoredAsyncResponse<>(responseReader(), in); - this.store = new AsyncTaskIndexService<>( - XPackPlugin.ASYNC_RESULTS_INDEX, - clusterService, - threadPool.getThreadContext(), - client, - ASYNC_SEARCH_ORIGIN, - reader, - registry, - bigArrays - ); - } - - @Override - protected void doExecute(Task task, GetAsyncStatusRequest request, ActionListener listener) { - AsyncExecutionId searchId = AsyncExecutionId.decode(request.getId()); - DiscoveryNode node = clusterService.state().nodes().get(searchId.getTaskId().getNodeId()); - DiscoveryNode localNode = clusterService.state().getNodes().getLocalNode(); - if (node == null || Objects.equals(node, localNode)) { - store.retrieveStatus( - request, - taskManager, - asyncTaskClass, - AbstractTransportQlAsyncGetStatusAction::getStatusResponse, - QlStatusResponse::getStatusFromStoredSearch, - listener - ); - } else { - transportService.sendRequest( - node, - actionName, - request, - new ActionListenerResponseHandler<>(listener, QlStatusResponse::new, EsExecutors.DIRECT_EXECUTOR_SERVICE) - ); - } - } - - private static QlStatusResponse getStatusResponse(StoredAsyncTask asyncTask) { - return new QlStatusResponse( - asyncTask.getExecutionId().getEncoded(), - true, - true, - asyncTask.getStartTime(), - asyncTask.getExpirationTimeMillis(), - null - ); - } - - protected abstract Writeable.Reader responseReader(); -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/TransportActionUtils.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/TransportActionUtils.java deleted file mode 100644 index 4d6fc9d1d18d5..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/TransportActionUtils.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.plugin; - -import org.apache.logging.log4j.Logger; -import org.elasticsearch.action.search.SearchPhaseExecutionException; -import org.elasticsearch.action.search.VersionMismatchException; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.xpack.esql.core.util.Holder; - -import java.util.function.Consumer; - -public final class TransportActionUtils { - - /** - * Execute a *QL request and re-try it in case the first request failed with a {@code VersionMismatchException} - * - * @param clusterService The cluster service instance - * @param onFailure On-failure handler in case the request doesn't fail with a {@code VersionMismatchException} - * @param queryRunner *QL query execution code, typically a Plan Executor running the query - * @param retryRequest Re-trial logic - * @param log Log4j logger - */ - public static void executeRequestWithRetryAttempt( - ClusterService clusterService, - Consumer onFailure, - Consumer> queryRunner, - Consumer retryRequest, - Logger log - ) { - - Holder retrySecondTime = new Holder(false); - queryRunner.accept(e -> { - // the search request likely ran on nodes with different versions of ES - // we will retry on a node with an older version that should generate a backwards compatible _search request - if (e instanceof SearchPhaseExecutionException - && ((SearchPhaseExecutionException) e).getCause() instanceof VersionMismatchException) { - if (log.isDebugEnabled()) { - log.debug("Caught exception type [{}] with cause [{}].", e.getClass().getName(), e.getCause()); - } - DiscoveryNode localNode = clusterService.state().nodes().getLocalNode(); - DiscoveryNode candidateNode = null; - for (DiscoveryNode node : clusterService.state().nodes()) { - // find the first node that's older than the current node - if (node != localNode && node.getVersion().before(localNode.getVersion())) { - candidateNode = node; - break; - } - } - if (candidateNode != null) { - if (log.isDebugEnabled()) { - log.debug( - "Candidate node to resend the request to: address [{}], id [{}], name [{}], version [{}]", - candidateNode.getAddress(), - candidateNode.getId(), - candidateNode.getName(), - candidateNode.getVersion() - ); - } - // re-send the request to the older node - retryRequest.accept(candidateNode); - } else { - retrySecondTime.set(true); - } - } else { - onFailure.accept(e); - } - }); - if (retrySecondTime.get()) { - if (log.isDebugEnabled()) { - log.debug("No candidate node found, likely all were upgraded in the meantime. Re-trying the original request."); - } - queryRunner.accept(onFailure); - } - } -} diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/action/QlStatusResponseTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/action/QlStatusResponseTests.java deleted file mode 100644 index e38755b703913..0000000000000 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/action/QlStatusResponseTests.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.action; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.esql.core.async.QlStatusResponse; - -import java.io.IOException; -import java.util.Date; - -import static org.elasticsearch.xpack.core.async.GetAsyncResultRequestTests.randomSearchId; - -public class QlStatusResponseTests extends AbstractWireSerializingTestCase { - - @Override - protected QlStatusResponse createTestInstance() { - String id = randomSearchId(); - boolean isRunning = randomBoolean(); - boolean isPartial = isRunning ? randomBoolean() : false; - long randomDate = (new Date(randomLongBetween(0, 3000000000000L))).getTime(); - Long startTimeMillis = randomBoolean() ? null : randomDate; - long expirationTimeMillis = startTimeMillis == null ? randomDate : startTimeMillis + 3600000L; - RestStatus completionStatus = isRunning ? null : randomBoolean() ? RestStatus.OK : RestStatus.SERVICE_UNAVAILABLE; - return new QlStatusResponse(id, isRunning, isPartial, startTimeMillis, expirationTimeMillis, completionStatus); - } - - @Override - protected Writeable.Reader instanceReader() { - return QlStatusResponse::new; - } - - @Override - protected QlStatusResponse mutateInstance(QlStatusResponse instance) { - // return a response with the opposite running status - boolean isRunning = instance.isRunning() == false; - boolean isPartial = isRunning ? randomBoolean() : false; - RestStatus completionStatus = isRunning ? null : randomBoolean() ? RestStatus.OK : RestStatus.SERVICE_UNAVAILABLE; - return new QlStatusResponse( - instance.getId(), - isRunning, - isPartial, - instance.getStartTime(), - instance.getExpirationTime(), - completionStatus - ); - } - - public void testToXContent() throws IOException { - QlStatusResponse response = createTestInstance(); - try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { - Object[] args = new Object[] { - response.getId(), - response.isRunning(), - response.isPartial(), - response.getStartTime() != null ? "\"start_time_in_millis\" : " + response.getStartTime() + "," : "", - response.getExpirationTime(), - response.getCompletionStatus() != null ? ", \"completion_status\" : " + response.getCompletionStatus().getStatus() : "" }; - String expectedJson = Strings.format(""" - { - "id" : "%s", - "is_running" : %s, - "is_partial" : %s, - %s - "expiration_time_in_millis" : %s - %s - } - """, args); - response.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertEquals(XContentHelper.stripWhitespace(expectedJson), Strings.toString(builder)); - } - } -} From b7d9ccbeb4df657b7db64bc6de0fba05e9da7748 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 5 Jul 2024 10:41:05 -0400 Subject: [PATCH 203/216] ESQL: Fix compilation Two PRs passing in the night, breaking each other. This passes precommit locally but I haven't double checked tests. But at least Elasticsearch compiles again. --- .../main/java/org/elasticsearch/xpack/esql/session/Result.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/Result.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/Result.java index 5abaa78f54196..42beb88bbe38b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/Result.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/Result.java @@ -11,7 +11,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverProfile; import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import java.util.List; From 6abef3a2f0d4acf8df315d6676402cd4fb6a7238 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 5 Jul 2024 08:01:52 -0700 Subject: [PATCH 204/216] Fix node tests for ToPartial (#110448) This change makes the three-parameter constructor of ToPartial public so that EsqlNodeSubclassTests can pick it up properly. Closes #110310 --- muted-tests.yml | 6 ------ .../esql/expression/function/aggregate/ToPartial.java | 7 +------ .../esql/optimizer/rules/TranslateMetricsAggregate.java | 2 +- 3 files changed, 2 insertions(+), 13 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 71e7d050c0e19..990b7d5dc5130 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -88,12 +88,6 @@ tests: - class: org.elasticsearch.backwards.SearchWithMinCompatibleSearchNodeIT method: testMinVersionAsOldVersion issue: https://github.com/elastic/elasticsearch/issues/109454 -- class: org.elasticsearch.xpack.esql.tree.EsqlNodeSubclassTests - method: testReplaceChildren {class org.elasticsearch.xpack.esql.expression.function.aggregate.ToPartial} - issue: https://github.com/elastic/elasticsearch/issues/110310 -- class: org.elasticsearch.xpack.esql.tree.EsqlNodeSubclassTests - method: testInfoParameters {class org.elasticsearch.xpack.esql.expression.function.aggregate.ToPartial} - issue: https://github.com/elastic/elasticsearch/issues/110310 - class: org.elasticsearch.search.vectors.ExactKnnQueryBuilderTests method: testToQuery issue: https://github.com/elastic/elasticsearch/issues/110357 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java index f94c8e0508cd7..c1da400185944 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ToPartial.java @@ -65,12 +65,7 @@ public class ToPartial extends AggregateFunction implements ToAggregator { private final Expression function; - public ToPartial(Source source, AggregateFunction function) { - super(source, function.field(), List.of(function)); - this.function = function; - } - - private ToPartial(Source source, Expression field, Expression function) { + public ToPartial(Source source, Expression field, Expression function) { super(source, field, List.of(function)); this.function = function; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/TranslateMetricsAggregate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/TranslateMetricsAggregate.java index 17b38044c1656..64555184be12d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/TranslateMetricsAggregate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/TranslateMetricsAggregate.java @@ -149,7 +149,7 @@ LogicalPlan translate(Aggregate metrics) { if (changed.get()) { secondPassAggs.add(new Alias(alias.source(), alias.name(), null, outerAgg, agg.id())); } else { - var toPartial = new Alias(agg.source(), alias.name(), new ToPartial(agg.source(), af)); + var toPartial = new Alias(agg.source(), alias.name(), new ToPartial(agg.source(), af.field(), af)); var fromPartial = new FromPartial(agg.source(), toPartial.toAttribute(), af); firstPassAggs.add(toPartial); secondPassAggs.add(new Alias(alias.source(), alias.name(), null, fromPartial, alias.id())); From b78efa0babd5c347f3943ecd6025d5fea6318004 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 6 Jul 2024 01:37:55 +1000 Subject: [PATCH 205/216] Mute org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentNodeServiceTests testLoadQueuedModelsWhenOneFails #110536 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 990b7d5dc5130..099a48cd34c58 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -100,6 +100,9 @@ tests: - class: org.elasticsearch.test.rest.yaml.CcsCommonYamlTestSuiteIT method: test {p0=search.vectors/41_knn_search_half_byte_quantized/Test create, merge, and search cosine} issue: https://github.com/elastic/elasticsearch/issues/109978 +- class: org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentNodeServiceTests + method: testLoadQueuedModelsWhenOneFails + issue: https://github.com/elastic/elasticsearch/issues/110536 # Examples: # From 1fafdb1da0034d3e69163f13f141a7bf99ca923f Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 5 Jul 2024 19:00:25 +0200 Subject: [PATCH 206/216] Cleanup some outdated BwC in DocWriteRequests (#110386) It's in the title, lots of the BwC is outdated + cleaning up one instance of duplicating in the writing of update requests. --- .../action/index/IndexRequest.java | 42 ++++--------------- .../action/update/UpdateRequest.java | 38 ++++++----------- .../action/index/IndexRequestTests.java | 16 ------- 3 files changed, 20 insertions(+), 76 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java index 794a3f38b56bb..efe43fdff4efd 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -23,7 +23,6 @@ import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.routing.IndexRouting; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -165,10 +164,8 @@ public IndexRequest(@Nullable ShardId shardId, StreamInput in) throws IOExceptio version = in.readLong(); versionType = VersionType.fromValue(in.readByte()); pipeline = readPipelineName(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_5_0)) { - finalPipeline = readPipelineName(in); - isPipelineResolved = in.readBoolean(); - } + finalPipeline = readPipelineName(in); + isPipelineResolved = in.readBoolean(); isRetry = in.readBoolean(); autoGeneratedTimestamp = in.readLong(); if (in.readBoolean()) { @@ -179,14 +176,8 @@ public IndexRequest(@Nullable ShardId shardId, StreamInput in) throws IOExceptio } ifSeqNo = in.readZLong(); ifPrimaryTerm = in.readVLong(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - requireAlias = in.readBoolean(); - } else { - requireAlias = false; - } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_13_0)) { - dynamicTemplates = in.readMap(StreamInput::readString); - } + requireAlias = in.readBoolean(); + dynamicTemplates = in.readMap(StreamInput::readString); if (in.getTransportVersion().onOrAfter(PIPELINES_HAVE_RUN_FIELD_ADDED) && in.getTransportVersion().before(TransportVersions.V_8_13_0)) { in.readBoolean(); @@ -737,12 +728,8 @@ private void writeBody(StreamOutput out) throws IOException { out.writeLong(version); out.writeByte(versionType.getValue()); out.writeOptionalString(pipeline); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_5_0)) { - out.writeOptionalString(finalPipeline); - } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_5_0)) { - out.writeBoolean(isPipelineResolved); - } + out.writeOptionalString(finalPipeline); + out.writeBoolean(isPipelineResolved); out.writeBoolean(isRetry); out.writeLong(autoGeneratedTimestamp); if (contentType != null) { @@ -753,21 +740,8 @@ private void writeBody(StreamOutput out) throws IOException { } out.writeZLong(ifSeqNo); out.writeVLong(ifPrimaryTerm); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - out.writeBoolean(requireAlias); - } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_13_0)) { - out.writeMap(dynamicTemplates, StreamOutput::writeString); - } else { - if (dynamicTemplates.isEmpty() == false) { - throw new IllegalArgumentException( - Strings.format( - "[dynamic_templates] parameter requires all nodes on %s or later", - TransportVersions.V_7_13_0.toReleaseVersion() - ) - ); - } - } + out.writeBoolean(requireAlias); + out.writeMap(dynamicTemplates, StreamOutput::writeString); if (out.getTransportVersion().onOrAfter(PIPELINES_HAVE_RUN_FIELD_ADDED) && out.getTransportVersion().before(TransportVersions.V_8_13_0)) { out.writeBoolean(normalisedBytesParsed != -1L); diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java b/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java index 2cd5258bf4376..211daf2369d99 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java @@ -157,11 +157,7 @@ public UpdateRequest(@Nullable ShardId shardId, StreamInput in) throws IOExcepti ifPrimaryTerm = in.readVLong(); detectNoop = in.readBoolean(); scriptedUpsert = in.readBoolean(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - requireAlias = in.readBoolean(); - } else { - requireAlias = false; - } + requireAlias = in.readBoolean(); } public UpdateRequest(String index, String id) { @@ -728,20 +724,18 @@ private void doWrite(StreamOutput out, boolean thin) throws IOException { } out.writeVInt(retryOnConflict); refreshPolicy.writeTo(out); - if (doc == null) { - out.writeBoolean(false); - } else { - out.writeBoolean(true); - // make sure the basics are set - doc.index(index); - doc.id(id); - if (thin) { - doc.writeThin(out); - } else { - doc.writeTo(out); - } - } + writeIndexRequest(out, thin, doc); out.writeOptionalWriteable(fetchSourceContext); + writeIndexRequest(out, thin, upsertRequest); + out.writeBoolean(docAsUpsert); + out.writeZLong(ifSeqNo); + out.writeVLong(ifPrimaryTerm); + out.writeBoolean(detectNoop); + out.writeBoolean(scriptedUpsert); + out.writeBoolean(requireAlias); + } + + private void writeIndexRequest(StreamOutput out, boolean thin, IndexRequest upsertRequest) throws IOException { if (upsertRequest == null) { out.writeBoolean(false); } else { @@ -755,14 +749,6 @@ private void doWrite(StreamOutput out, boolean thin) throws IOException { upsertRequest.writeTo(out); } } - out.writeBoolean(docAsUpsert); - out.writeZLong(ifSeqNo); - out.writeVLong(ifPrimaryTerm); - out.writeBoolean(detectNoop); - out.writeBoolean(scriptedUpsert); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - out.writeBoolean(requireAlias); - } } @Override diff --git a/server/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java b/server/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java index 6106dbf1fbc5a..c05cb054ce391 100644 --- a/server/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java @@ -217,22 +217,6 @@ public void testSerializeDynamicTemplates() throws Exception { IndexRequest serialized = new IndexRequest(in); assertThat(serialized.getDynamicTemplates(), anEmptyMap()); } - // old version - { - Map dynamicTemplates = IntStream.range(0, randomIntBetween(1, 10)) - .boxed() - .collect(Collectors.toMap(n -> "field-" + n, n -> "name-" + n)); - indexRequest.setDynamicTemplates(dynamicTemplates); - TransportVersion ver = TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.V_7_0_0, - TransportVersionUtils.getPreviousVersion(TransportVersions.V_7_13_0) - ); - BytesStreamOutput out = new BytesStreamOutput(); - out.setTransportVersion(ver); - IllegalArgumentException error = expectThrows(IllegalArgumentException.class, () -> indexRequest.writeTo(out)); - assertThat(error.getMessage(), equalTo("[dynamic_templates] parameter requires all nodes on 7.13.0 or later")); - } // new version { Map dynamicTemplates = IntStream.range(0, randomIntBetween(0, 10)) From 52e591d6a61c5093535e683bdec1880ef671584f Mon Sep 17 00:00:00 2001 From: "Mark J. Hoy" Date: Fri, 5 Jul 2024 13:01:29 -0400 Subject: [PATCH 207/216] [Inference API] Add Amazon Bedrock support to Inference API (#110248) * Initial commit; setup Gradle; start service * initial commit * minor cleanups, builds green; needs tests * bug fixes; tested working embeddings & completion * use custom json builder for embeddings request * Ensure auto-close; fix forbidden API * start of adding unit tests; abstraction layers * adding additional tests; cleanups * add requests unit tests * all tests created * fix cohere embeddings response * fix cohere embeddings response * fix lint * better test coverage for secrets; inference client * update thread-safe syncs; make dims/tokens + int * add tests for dims and max tokens positive integer * use requireNonNull;override settings type;cleanups * use r/w lock for client cache * remove client reference counting * update locking in cache; client errors; noop doc * remove extra block in internalGetOrCreateClient * remove duplicate dependencies; cleanup * add fxn to get default embeddings similarity * use async calls to Amazon Bedrock; cleanups * use Clock in cache; simplify locking; cleanups * cleanups around executor; remove some instanceof * cleanups; use EmbeddingRequestChunker * move max chunk size to constants * oof - swapped transport vers w/ master node req * use XContent instead of Jackson JsonFactory * remove gradle versions; do not allow dimensions --- gradle/verification-metadata.xml | 5 + .../org/elasticsearch/TransportVersions.java | 1 + x-pack/plugin/inference/build.gradle | 29 +- .../licenses/aws-java-sdk-LICENSE.txt | 63 + .../licenses/aws-java-sdk-NOTICE.txt | 15 + .../inference/licenses/jaxb-LICENSE.txt | 274 ++++ .../plugin/inference/licenses/jaxb-NOTICE.txt | 1 + .../inference/licenses/joda-time-LICENSE.txt | 202 +++ .../inference/licenses/joda-time-NOTICE.txt | 5 + .../inference/src/main/java/module-info.java | 5 + .../InferenceNamedWriteablesProvider.java | 40 + .../xpack/inference/InferencePlugin.java | 7 + .../AmazonBedrockActionCreator.java | 56 + .../AmazonBedrockActionVisitor.java | 20 + .../AmazonBedrockChatCompletionAction.java | 47 + .../AmazonBedrockEmbeddingsAction.java | 48 + .../AmazonBedrockBaseClient.java | 37 + .../AmazonBedrockChatCompletionExecutor.java | 43 + .../amazonbedrock/AmazonBedrockClient.java | 29 + .../AmazonBedrockClientCache.java | 19 + .../AmazonBedrockEmbeddingsExecutor.java | 44 + ...AmazonBedrockExecuteOnlyRequestSender.java | 124 ++ .../amazonbedrock/AmazonBedrockExecutor.java | 68 + .../AmazonBedrockInferenceClient.java | 166 +++ .../AmazonBedrockInferenceClientCache.java | 137 ++ .../AmazonBedrockRequestSender.java | 126 ++ ...onBedrockChatCompletionRequestManager.java | 65 + ...AmazonBedrockEmbeddingsRequestManager.java | 74 ++ .../AmazonBedrockRequestExecutorService.java | 42 + .../sender/AmazonBedrockRequestManager.java | 54 + .../AmazonBedrockJsonBuilder.java | 30 + .../AmazonBedrockJsonWriter.java | 20 + .../amazonbedrock/AmazonBedrockRequest.java | 85 ++ .../amazonbedrock/NoOpHttpRequest.java | 20 + ...edrockAI21LabsCompletionRequestEntity.java | 63 + ...drockAnthropicCompletionRequestEntity.java | 70 + ...zonBedrockChatCompletionEntityFactory.java | 78 ++ .../AmazonBedrockChatCompletionRequest.java | 69 + ...nBedrockCohereCompletionRequestEntity.java | 70 + .../AmazonBedrockConverseRequestEntity.java | 18 + .../AmazonBedrockConverseUtils.java | 29 + ...zonBedrockMetaCompletionRequestEntity.java | 63 + ...BedrockMistralCompletionRequestEntity.java | 70 + ...onBedrockTitanCompletionRequestEntity.java | 63 + ...nBedrockCohereEmbeddingsRequestEntity.java | 35 + .../AmazonBedrockEmbeddingsEntityFactory.java | 45 + .../AmazonBedrockEmbeddingsRequest.java | 99 ++ ...onBedrockTitanEmbeddingsRequestEntity.java | 31 + .../amazonbedrock/AmazonBedrockResponse.java | 15 + .../AmazonBedrockResponseHandler.java | 23 + .../AmazonBedrockResponseListener.java | 30 + .../AmazonBedrockChatCompletionResponse.java | 49 + ...nBedrockChatCompletionResponseHandler.java | 39 + ...BedrockChatCompletionResponseListener.java | 40 + .../AmazonBedrockEmbeddingsResponse.java | 132 ++ ...mazonBedrockEmbeddingsResponseHandler.java | 37 + ...azonBedrockEmbeddingsResponseListener.java | 38 + .../amazonbedrock/AmazonBedrockConstants.java | 27 + .../amazonbedrock/AmazonBedrockModel.java | 88 ++ .../amazonbedrock/AmazonBedrockProvider.java | 30 + .../AmazonBedrockProviderCapabilities.java | 102 ++ .../AmazonBedrockSecretSettings.java | 110 ++ .../amazonbedrock/AmazonBedrockService.java | 350 +++++ .../AmazonBedrockServiceSettings.java | 141 ++ .../AmazonBedrockChatCompletionModel.java | 83 ++ ...rockChatCompletionRequestTaskSettings.java | 90 ++ ...nBedrockChatCompletionServiceSettings.java | 93 ++ ...azonBedrockChatCompletionTaskSettings.java | 190 +++ .../AmazonBedrockEmbeddingsModel.java | 85 ++ ...mazonBedrockEmbeddingsServiceSettings.java | 220 ++++ .../plugin-metadata/plugin-security.policy | 8 +- .../AmazonBedrockActionCreatorTests.java | 175 +++ .../AmazonBedrockExecutorTests.java | 172 +++ ...mazonBedrockInferenceClientCacheTests.java | 108 ++ .../AmazonBedrockMockClientCache.java | 62 + ...AmazonBedrockMockExecuteRequestSender.java | 80 ++ .../AmazonBedrockMockInferenceClient.java | 133 ++ .../AmazonBedrockMockRequestSender.java | 91 ++ .../AmazonBedrockRequestSenderTests.java | 127 ++ ...kAI21LabsCompletionRequestEntityTests.java | 70 + ...AnthropicCompletionRequestEntityTests.java | 82 ++ ...ockCohereCompletionRequestEntityTests.java | 82 ++ .../AmazonBedrockConverseRequestUtils.java | 94 ++ ...drockMetaCompletionRequestEntityTests.java | 70 + ...ckMistralCompletionRequestEntityTests.java | 82 ++ ...rockTitanCompletionRequestEntityTests.java | 70 + ...ockCohereEmbeddingsRequestEntityTests.java | 25 + ...rockTitanEmbeddingsRequestEntityTests.java | 24 + .../AmazonBedrockSecretSettingsTests.java | 120 ++ .../AmazonBedrockServiceTests.java | 1131 +++++++++++++++++ ...AmazonBedrockChatCompletionModelTests.java | 221 ++++ ...hatCompletionRequestTaskSettingsTests.java | 107 ++ ...ockChatCompletionServiceSettingsTests.java | 131 ++ ...edrockChatCompletionTaskSettingsTests.java | 226 ++++ .../AmazonBedrockEmbeddingsModelTests.java | 81 ++ ...BedrockEmbeddingsServiceSettingsTests.java | 404 ++++++ 96 files changed, 8790 insertions(+), 2 deletions(-) create mode 100644 x-pack/plugin/inference/licenses/aws-java-sdk-LICENSE.txt create mode 100644 x-pack/plugin/inference/licenses/aws-java-sdk-NOTICE.txt create mode 100644 x-pack/plugin/inference/licenses/jaxb-LICENSE.txt create mode 100644 x-pack/plugin/inference/licenses/jaxb-NOTICE.txt create mode 100644 x-pack/plugin/inference/licenses/joda-time-LICENSE.txt create mode 100644 x-pack/plugin/inference/licenses/joda-time-NOTICE.txt create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/amazonbedrock/AmazonBedrockActionCreator.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/amazonbedrock/AmazonBedrockActionVisitor.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/amazonbedrock/AmazonBedrockChatCompletionAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/amazonbedrock/AmazonBedrockEmbeddingsAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockBaseClient.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockChatCompletionExecutor.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockClient.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockClientCache.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockEmbeddingsExecutor.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockExecuteOnlyRequestSender.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockExecutor.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockInferenceClient.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockInferenceClientCache.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockRequestSender.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AmazonBedrockChatCompletionRequestManager.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AmazonBedrockEmbeddingsRequestManager.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AmazonBedrockRequestExecutorService.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AmazonBedrockRequestManager.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/AmazonBedrockJsonBuilder.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/AmazonBedrockJsonWriter.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/AmazonBedrockRequest.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/NoOpHttpRequest.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAI21LabsCompletionRequestEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAnthropicCompletionRequestEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockChatCompletionEntityFactory.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockChatCompletionRequest.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockCohereCompletionRequestEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockConverseRequestEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockConverseUtils.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMetaCompletionRequestEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMistralCompletionRequestEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockTitanCompletionRequestEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/embeddings/AmazonBedrockCohereEmbeddingsRequestEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/embeddings/AmazonBedrockEmbeddingsEntityFactory.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/embeddings/AmazonBedrockEmbeddingsRequest.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/embeddings/AmazonBedrockTitanEmbeddingsRequestEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/AmazonBedrockResponse.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/AmazonBedrockResponseHandler.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/AmazonBedrockResponseListener.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/completion/AmazonBedrockChatCompletionResponse.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/completion/AmazonBedrockChatCompletionResponseHandler.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/completion/AmazonBedrockChatCompletionResponseListener.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/embeddings/AmazonBedrockEmbeddingsResponse.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/embeddings/AmazonBedrockEmbeddingsResponseHandler.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/embeddings/AmazonBedrockEmbeddingsResponseListener.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockConstants.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockModel.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockProvider.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockProviderCapabilities.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockSecretSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionModel.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionRequestTaskSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionServiceSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionTaskSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsModel.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsServiceSettings.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/amazonbedrock/AmazonBedrockActionCreatorTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockExecutorTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockInferenceClientCacheTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockClientCache.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockExecuteRequestSender.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockInferenceClient.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockRequestSender.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockRequestSenderTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAI21LabsCompletionRequestEntityTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAnthropicCompletionRequestEntityTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockCohereCompletionRequestEntityTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockConverseRequestUtils.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMetaCompletionRequestEntityTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMistralCompletionRequestEntityTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockTitanCompletionRequestEntityTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/embeddings/AmazonBedrockCohereEmbeddingsRequestEntityTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/embeddings/AmazonBedrockTitanEmbeddingsRequestEntityTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockSecretSettingsTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionModelTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionRequestTaskSettingsTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionServiceSettingsTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionTaskSettingsTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsModelTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsServiceSettingsTests.java diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index cd408ba75aa10..02313c5ed82a2 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -84,6 +84,11 @@ + + + + + diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 2004c6fda8ce5..ff50d1513d28a 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -208,6 +208,7 @@ static TransportVersion def(int id) { public static final TransportVersion TEXT_SIMILARITY_RERANKER_RETRIEVER = def(8_699_00_0); public static final TransportVersion ML_INFERENCE_GOOGLE_VERTEX_AI_RERANKING_ADDED = def(8_700_00_0); public static final TransportVersion VERSIONED_MASTER_NODE_REQUESTS = def(8_701_00_0); + public static final TransportVersion ML_INFERENCE_AMAZON_BEDROCK_ADDED = def(8_702_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/inference/build.gradle b/x-pack/plugin/inference/build.gradle index 41ca9966c1336..beeec94f21ebf 100644 --- a/x-pack/plugin/inference/build.gradle +++ b/x-pack/plugin/inference/build.gradle @@ -27,6 +27,10 @@ base { archivesName = 'x-pack-inference' } +versions << [ + 'awsbedrockruntime': '1.12.740' +] + dependencies { implementation project(path: ':libs:elasticsearch-logging') compileOnly project(":server") @@ -53,10 +57,19 @@ dependencies { implementation 'com.google.http-client:google-http-client-appengine:1.42.3' implementation 'com.google.http-client:google-http-client-jackson2:1.42.3' implementation "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" + implementation "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}" + implementation "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}" + implementation "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:${versions.jackson}" + implementation "com.fasterxml.jackson:jackson-bom:${versions.jackson}" implementation 'com.google.api:gax-httpjson:0.105.1' implementation 'io.grpc:grpc-context:1.49.2' implementation 'io.opencensus:opencensus-api:0.31.1' implementation 'io.opencensus:opencensus-contrib-http-util:0.31.1' + implementation "com.amazonaws:aws-java-sdk-bedrockruntime:${versions.awsbedrockruntime}" + implementation "com.amazonaws:aws-java-sdk-core:${versions.aws}" + implementation "com.amazonaws:jmespath-java:${versions.aws}" + implementation "joda-time:joda-time:2.10.10" + implementation 'javax.xml.bind:jaxb-api:2.2.2' } tasks.named("dependencyLicenses").configure { @@ -66,6 +79,9 @@ tasks.named("dependencyLicenses").configure { mapping from: /protobuf.*/, to: 'protobuf' mapping from: /proto-google.*/, to: 'proto-google' mapping from: /jackson.*/, to: 'jackson' + mapping from: /aws-java-sdk-.*/, to: 'aws-java-sdk' + mapping from: /jmespath-java.*/, to: 'aws-java-sdk' + mapping from: /jaxb-.*/, to: 'jaxb' } tasks.named("thirdPartyAudit").configure { @@ -199,10 +215,21 @@ tasks.named("thirdPartyAudit").configure { 'com.google.appengine.api.urlfetch.HTTPRequest', 'com.google.appengine.api.urlfetch.HTTPResponse', 'com.google.appengine.api.urlfetch.URLFetchService', - 'com.google.appengine.api.urlfetch.URLFetchServiceFactory' + 'com.google.appengine.api.urlfetch.URLFetchServiceFactory', + 'software.amazon.ion.IonReader', + 'software.amazon.ion.IonSystem', + 'software.amazon.ion.IonType', + 'software.amazon.ion.IonWriter', + 'software.amazon.ion.Timestamp', + 'software.amazon.ion.system.IonBinaryWriterBuilder', + 'software.amazon.ion.system.IonSystemBuilder', + 'software.amazon.ion.system.IonTextWriterBuilder', + 'software.amazon.ion.system.IonWriterBuilder', + 'javax.activation.DataHandler' ) } tasks.named('yamlRestTest') { usesDefaultDistribution() } + diff --git a/x-pack/plugin/inference/licenses/aws-java-sdk-LICENSE.txt b/x-pack/plugin/inference/licenses/aws-java-sdk-LICENSE.txt new file mode 100644 index 0000000000000..98d1f9319f374 --- /dev/null +++ b/x-pack/plugin/inference/licenses/aws-java-sdk-LICENSE.txt @@ -0,0 +1,63 @@ +Apache License +Version 2.0, January 2004 + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + + 1. You must give any other recipients of the Work or Derivative Works a copy of this License; and + 2. You must cause any modified files to carry prominent notices stating that You changed the files; and + 3. You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + 4. If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. + +You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +Note: Other license terms may apply to certain, identified software files contained within or distributed with the accompanying software if such terms are included in the directory containing the accompanying software. Such other license terms will then apply in lieu of the terms of the software license above. + +JSON processing code subject to the JSON License from JSON.org: + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +The Software shall be used for Good, not Evil. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/x-pack/plugin/inference/licenses/aws-java-sdk-NOTICE.txt b/x-pack/plugin/inference/licenses/aws-java-sdk-NOTICE.txt new file mode 100644 index 0000000000000..565bd6085c71a --- /dev/null +++ b/x-pack/plugin/inference/licenses/aws-java-sdk-NOTICE.txt @@ -0,0 +1,15 @@ +AWS SDK for Java +Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. + +This product includes software developed by +Amazon Technologies, Inc (http://www.amazon.com/). + +********************** +THIRD PARTY COMPONENTS +********************** +This software includes third party software subject to the following copyrights: +- XML parsing and utility functions from JetS3t - Copyright 2006-2009 James Murty. +- JSON parsing and utility functions from JSON.org - Copyright 2002 JSON.org. +- PKCS#1 PEM encoded private key parsing and utility functions from oauth.googlecode.com - Copyright 1998-2010 AOL Inc. + +The licenses for these third party components are included in LICENSE.txt diff --git a/x-pack/plugin/inference/licenses/jaxb-LICENSE.txt b/x-pack/plugin/inference/licenses/jaxb-LICENSE.txt new file mode 100644 index 0000000000000..833a843cfeee1 --- /dev/null +++ b/x-pack/plugin/inference/licenses/jaxb-LICENSE.txt @@ -0,0 +1,274 @@ +COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL)Version 1.1 + +1. Definitions. + + 1.1. "Contributor" means each individual or entity that creates or contributes to the creation of Modifications. + + 1.2. "Contributor Version" means the combination of the Original Software, prior Modifications used by a Contributor (if any), and the Modifications made by that particular Contributor. + + 1.3. "Covered Software" means (a) the Original Software, or (b) Modifications, or (c) the combination of files containing Original Software with files containing Modifications, in each case including portions thereof. + + 1.4. "Executable" means the Covered Software in any form other than Source Code. + + 1.5. "Initial Developer" means the individual or entity that first makes Original Software available under this License. + + 1.6. "Larger Work" means a work which combines Covered Software or portions thereof with code not governed by the terms of this License. + + 1.7. "License" means this document. + + 1.8. "Licensable" means having the right to grant, to the maximum extent possible, whether at the time of the initial grant or subsequently acquired, any and all of the rights conveyed herein. + + 1.9. "Modifications" means the Source Code and Executable form of any of the following: + + A. Any file that results from an addition to, deletion from or modification of the contents of a file containing Original Software or previous Modifications; + + B. Any new file that contains any part of the Original Software or previous Modification; or + + C. Any new file that is contributed or otherwise made available under the terms of this License. + + 1.10. "Original Software" means the Source Code and Executable form of computer software code that is originally released under this License. + + 1.11. "Patent Claims" means any patent claim(s), now owned or hereafter acquired, including without limitation, method, process, and apparatus claims, in any patent Licensable by grantor. + + 1.12. "Source Code" means (a) the common form of computer software code in which modifications are made and (b) associated documentation included in or with such code. + + 1.13. "You" (or "Your") means an individual or a legal entity exercising rights under, and complying with all of the terms of, this License. For legal entities, "You" includes any entity which controls, is controlled by, or is under common control with You. For purposes of this definition, "control" means (a) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (b) ownership of more than fifty percent (50%) of the outstanding shares or beneficial ownership of such entity. + +2. License Grants. + + 2.1. The Initial Developer Grant. + + Conditioned upon Your compliance with Section 3.1 below and subject to third party intellectual property claims, the Initial Developer hereby grants You a world-wide, royalty-free, non-exclusive license: + + (a) under intellectual property rights (other than patent or trademark) Licensable by Initial Developer, to use, reproduce, modify, display, perform, sublicense and distribute the Original Software (or portions thereof), with or without Modifications, and/or as part of a Larger Work; and + + (b) under Patent Claims infringed by the making, using or selling of Original Software, to make, have made, use, practice, sell, and offer for sale, and/or otherwise dispose of the Original Software (or portions thereof). + + (c) The licenses granted in Sections 2.1(a) and (b) are effective on the date Initial Developer first distributes or otherwise makes the Original Software available to a third party under the terms of this License. + + (d) Notwithstanding Section 2.1(b) above, no patent license is granted: (1) for code that You delete from the Original Software, or (2) for infringements caused by: (i) the modification of the Original Software, or (ii) the combination of the Original Software with other software or devices. + + 2.2. Contributor Grant. + + Conditioned upon Your compliance with Section 3.1 below and subject to third party intellectual property claims, each Contributor hereby grants You a world-wide, royalty-free, non-exclusive license: + + (a) under intellectual property rights (other than patent or trademark) Licensable by Contributor to use, reproduce, modify, display, perform, sublicense and distribute the Modifications created by such Contributor (or portions thereof), either on an unmodified basis, with other Modifications, as Covered Software and/or as part of a Larger Work; and + + (b) under Patent Claims infringed by the making, using, or selling of Modifications made by that Contributor either alone and/or in combination with its Contributor Version (or portions of such combination), to make, use, sell, offer for sale, have made, and/or otherwise dispose of: (1) Modifications made by that Contributor (or portions thereof); and (2) the combination of Modifications made by that Contributor with its Contributor Version (or portions of such combination). + + (c) The licenses granted in Sections 2.2(a) and 2.2(b) are effective on the date Contributor first distributes or otherwise makes the Modifications available to a third party. + + (d) Notwithstanding Section 2.2(b) above, no patent license is granted: (1) for any code that Contributor has deleted from the Contributor Version; (2) for infringements caused by: (i) third party modifications of Contributor Version, or (ii) the combination of Modifications made by that Contributor with other software (except as part of the Contributor Version) or other devices; or (3) under Patent Claims infringed by Covered Software in the absence of Modifications made by that Contributor. + +3. Distribution Obligations. + + 3.1. Availability of Source Code. + + Any Covered Software that You distribute or otherwise make available in Executable form must also be made available in Source Code form and that Source Code form must be distributed only under the terms of this License. You must include a copy of this License with every copy of the Source Code form of the Covered Software You distribute or otherwise make available. You must inform recipients of any such Covered Software in Executable form as to how they can obtain such Covered Software in Source Code form in a reasonable manner on or through a medium customarily used for software exchange. + + 3.2. Modifications. + + The Modifications that You create or to which You contribute are governed by the terms of this License. You represent that You believe Your Modifications are Your original creation(s) and/or You have sufficient rights to grant the rights conveyed by this License. + + 3.3. Required Notices. + + You must include a notice in each of Your Modifications that identifies You as the Contributor of the Modification. You may not remove or alter any copyright, patent or trademark notices contained within the Covered Software, or any notices of licensing or any descriptive text giving attribution to any Contributor or the Initial Developer. + + 3.4. Application of Additional Terms. + + You may not offer or impose any terms on any Covered Software in Source Code form that alters or restricts the applicable version of this License or the recipients' rights hereunder. You may choose to offer, and to charge a fee for, warranty, support, indemnity or liability obligations to one or more recipients of Covered Software. However, you may do so only on Your own behalf, and not on behalf of the Initial Developer or any Contributor. You must make it absolutely clear that any such warranty, support, indemnity or liability obligation is offered by You alone, and You hereby agree to indemnify the Initial Developer and every Contributor for any liability incurred by the Initial Developer or such Contributor as a result of warranty, support, indemnity or liability terms You offer. + + 3.5. Distribution of Executable Versions. + + You may distribute the Executable form of the Covered Software under the terms of this License or under the terms of a license of Your choice, which may contain terms different from this License, provided that You are in compliance with the terms of this License and that the license for the Executable form does not attempt to limit or alter the recipient's rights in the Source Code form from the rights set forth in this License. If You distribute the Covered Software in Executable form under a different license, You must make it absolutely clear that any terms which differ from this License are offered by You alone, not by the Initial Developer or Contributor. You hereby agree to indemnify the Initial Developer and every Contributor for any liability incurred by the Initial Developer or such Contributor as a result of any such terms You offer. + + 3.6. Larger Works. + + You may create a Larger Work by combining Covered Software with other code not governed by the terms of this License and distribute the Larger Work as a single product. In such a case, You must make sure the requirements of this License are fulfilled for the Covered Software. + +4. Versions of the License. + + 4.1. New Versions. + + Oracle is the initial license steward and may publish revised and/or new versions of this License from time to time. Each version will be given a distinguishing version number. Except as provided in Section 4.3, no one other than the license steward has the right to modify this License. + + 4.2. Effect of New Versions. + + You may always continue to use, distribute or otherwise make the Covered Software available under the terms of the version of the License under which You originally received the Covered Software. If the Initial Developer includes a notice in the Original Software prohibiting it from being distributed or otherwise made available under any subsequent version of the License, You must distribute and make the Covered Software available under the terms of the version of the License under which You originally received the Covered Software. Otherwise, You may also choose to use, distribute or otherwise make the Covered Software available under the terms of any subsequent version of the License published by the license steward. + + 4.3. Modified Versions. + + When You are an Initial Developer and You want to create a new license for Your Original Software, You may create and use a modified version of this License if You: (a) rename the license and remove any references to the name of the license steward (except to note that the license differs from this License); and (b) otherwise make it clear that the license contains terms which differ from this License. + +5. DISCLAIMER OF WARRANTY. + + COVERED SOFTWARE IS PROVIDED UNDER THIS LICENSE ON AN "AS IS" BASIS, WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES THAT THE COVERED SOFTWARE IS FREE OF DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR NON-INFRINGING. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE COVERED SOFTWARE IS WITH YOU. SHOULD ANY COVERED SOFTWARE PROVE DEFECTIVE IN ANY RESPECT, YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER CONTRIBUTOR) ASSUME THE COST OF ANY NECESSARY SERVICING, REPAIR OR CORRECTION. THIS DISCLAIMER OF WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE. NO USE OF ANY COVERED SOFTWARE IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER. + +6. TERMINATION. + + 6.1. This License and the rights granted hereunder will terminate automatically if You fail to comply with terms herein and fail to cure such breach within 30 days of becoming aware of the breach. Provisions which, by their nature, must remain in effect beyond the termination of this License shall survive. + + 6.2. If You assert a patent infringement claim (excluding declaratory judgment actions) against Initial Developer or a Contributor (the Initial Developer or Contributor against whom You assert such claim is referred to as "Participant") alleging that the Participant Software (meaning the Contributor Version where the Participant is a Contributor or the Original Software where the Participant is the Initial Developer) directly or indirectly infringes any patent, then any and all rights granted directly or indirectly to You by such Participant, the Initial Developer (if the Initial Developer is not the Participant) and all Contributors under Sections 2.1 and/or 2.2 of this License shall, upon 60 days notice from Participant terminate prospectively and automatically at the expiration of such 60 day notice period, unless if within such 60 day period You withdraw Your claim with respect to the Participant Software against such Participant either unilaterally or pursuant to a written agreement with Participant. + + 6.3. If You assert a patent infringement claim against Participant alleging that the Participant Software directly or indirectly infringes any patent where such claim is resolved (such as by license or settlement) prior to the initiation of patent infringement litigation, then the reasonable value of the licenses granted by such Participant under Sections 2.1 or 2.2 shall be taken into account in determining the amount or value of any payment or license. + + 6.4. In the event of termination under Sections 6.1 or 6.2 above, all end user licenses that have been validly granted by You or any distributor hereunder prior to termination (excluding licenses granted to You by any distributor) shall survive termination. + +7. LIMITATION OF LIABILITY. + + UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE INITIAL DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF COVERED SOFTWARE, OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE TO ANY PERSON FOR ANY INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY CHARACTER INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOSS OF GOODWILL, WORK STOPPAGE, COMPUTER FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER COMMERCIAL DAMAGES OR LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN INFORMED OF THE POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF LIABILITY SHALL NOT APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY RESULTING FROM SUCH PARTY'S NEGLIGENCE TO THE EXTENT APPLICABLE LAW PROHIBITS SUCH LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OR LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO THIS EXCLUSION AND LIMITATION MAY NOT APPLY TO YOU. + +8. U.S. GOVERNMENT END USERS. + + The Covered Software is a "commercial item," as that term is defined in 48 C.F.R. 2.101 (Oct. 1995), consisting of "commercial computer software" (as that term is defined at 48 C.F.R. ? 252.227-7014(a)(1)) and "commercial computer software documentation" as such terms are used in 48 C.F.R. 12.212 (Sept. 1995). Consistent with 48 C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 (June 1995), all U.S. Government End Users acquire Covered Software with only those rights set forth herein. This U.S. Government Rights clause is in lieu of, and supersedes, any other FAR, DFAR, or other clause or provision that addresses Government rights in computer software under this License. + +9. MISCELLANEOUS. + + This License represents the complete agreement concerning subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. This License shall be governed by the law of the jurisdiction specified in a notice contained within the Original Software (except to the extent applicable law, if any, provides otherwise), excluding such jurisdiction's conflict-of-law provisions. Any litigation relating to this License shall be subject to the jurisdiction of the courts located in the jurisdiction and venue specified in a notice contained within the Original Software, with the losing party responsible for costs, including, without limitation, court costs and reasonable attorneys' fees and expenses. The application of the United Nations Convention on Contracts for the International Sale of Goods is expressly excluded. Any law or regulation which provides that the language of a contract shall be construed against the drafter shall not apply to this License. You agree that You alone are responsible for compliance with the United States export administration regulations (and the export control laws and regulation of any other countries) when You use, distribute or otherwise make available any Covered Software. + +10. RESPONSIBILITY FOR CLAIMS. + + As between Initial Developer and the Contributors, each party is responsible for claims and damages arising, directly or indirectly, out of its utilization of rights under this License and You agree to work with Initial Developer and Contributors to distribute such responsibility on an equitable basis. Nothing herein is intended or shall be deemed to constitute any admission of liability. + +---------- +NOTICE PURSUANT TO SECTION 9 OF THE COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) +The code released under the CDDL shall be governed by the laws of the State of California (excluding conflict-of-law provisions). Any litigation relating to this License shall be subject to the jurisdiction of the Federal Courts of the Northern District of California and the state courts of the State of California, with venue lying in Santa Clara County, California. + + + + +The GNU General Public License (GPL) Version 2, June 1991 + + +Copyright (C) 1989, 1991 Free Software Foundation, Inc. 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. + +Preamble + +The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Library General Public License instead.) You can apply it to your programs, too. + +When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things. + +To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it. + +For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. + +We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software. + +Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations. + +Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all. + +The precise terms and conditions for copying, distribution and modification follow. + + +TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + +0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does. + +1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program. + +You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. + +2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License. + + c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. + +3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.) + +The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. + +If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code. + +4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. + +5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it. + +6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License. + +7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances. + +It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. + +This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. + +8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. + +9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation. + +10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. + +NO WARRANTY + +11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + +12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. + +END OF TERMS AND CONDITIONS + + +How to Apply These Terms to Your New Programs + +If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. + +To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. + + One line to give the program's name and a brief idea of what it does. + + Copyright (C) + + This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. + + This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program `Gnomovision' (which makes passes at compilers) written by James Hacker. + + signature of Ty Coon, 1 April 1989 + Ty Coon, President of Vice + +This General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Library General Public License instead of this License. + + +"CLASSPATH" EXCEPTION TO THE GPL VERSION 2 + +Certain source files distributed by Oracle are subject to the following clarification and special exception to the GPL Version 2, but only where Oracle has expressly included in the particular source file's header the words "Oracle designates this particular file as subject to the "Classpath" exception as provided by Oracle in the License file that accompanied this code." + +Linking this library statically or dynamically with other modules is making a combined work based on this library. Thus, the terms and conditions of the GNU General Public License Version 2 cover the whole combination. + +As a special exception, the copyright holders of this library give you permission to link this library with independent modules to produce an executable, regardless of the license terms of these independent modules, and to copy and distribute the resulting executable under terms of your choice, provided that you also meet, for each linked independent module, the terms and conditions of the license of that module. An independent module is a module which is not derived from or based on this library. If you modify this library, you may extend this exception to your version of the library, but you are not obligated to do so. If you do not wish to do so, delete this exception statement from your version. diff --git a/x-pack/plugin/inference/licenses/jaxb-NOTICE.txt b/x-pack/plugin/inference/licenses/jaxb-NOTICE.txt new file mode 100644 index 0000000000000..8d1c8b69c3fce --- /dev/null +++ b/x-pack/plugin/inference/licenses/jaxb-NOTICE.txt @@ -0,0 +1 @@ + diff --git a/x-pack/plugin/inference/licenses/joda-time-LICENSE.txt b/x-pack/plugin/inference/licenses/joda-time-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/x-pack/plugin/inference/licenses/joda-time-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/x-pack/plugin/inference/licenses/joda-time-NOTICE.txt b/x-pack/plugin/inference/licenses/joda-time-NOTICE.txt new file mode 100644 index 0000000000000..dffbcf31cacf6 --- /dev/null +++ b/x-pack/plugin/inference/licenses/joda-time-NOTICE.txt @@ -0,0 +1,5 @@ +============================================================================= += NOTICE file corresponding to section 4d of the Apache License Version 2.0 = +============================================================================= +This product includes software developed by +Joda.org (http://www.joda.org/). diff --git a/x-pack/plugin/inference/src/main/java/module-info.java b/x-pack/plugin/inference/src/main/java/module-info.java index aa907a236884a..a7e5718a0920e 100644 --- a/x-pack/plugin/inference/src/main/java/module-info.java +++ b/x-pack/plugin/inference/src/main/java/module-info.java @@ -20,8 +20,13 @@ requires org.apache.lucene.join; requires com.ibm.icu; requires com.google.auth.oauth2; + requires com.google.auth; requires com.google.api.client; requires com.google.gson; + requires aws.java.sdk.bedrockruntime; + requires aws.java.sdk.core; + requires com.fasterxml.jackson.databind; + requires org.joda.time; exports org.elasticsearch.xpack.inference.action; exports org.elasticsearch.xpack.inference.registry; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java index f3799b824fc0e..f8ce9df1fb194 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java @@ -24,6 +24,10 @@ import org.elasticsearch.xpack.core.inference.results.LegacyTextEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockSecretSettings; +import org.elasticsearch.xpack.inference.services.amazonbedrock.completion.AmazonBedrockChatCompletionServiceSettings; +import org.elasticsearch.xpack.inference.services.amazonbedrock.completion.AmazonBedrockChatCompletionTaskSettings; +import org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings.AmazonBedrockEmbeddingsServiceSettings; import org.elasticsearch.xpack.inference.services.anthropic.completion.AnthropicChatCompletionServiceSettings; import org.elasticsearch.xpack.inference.services.anthropic.completion.AnthropicChatCompletionTaskSettings; import org.elasticsearch.xpack.inference.services.azureaistudio.completion.AzureAiStudioChatCompletionServiceSettings; @@ -122,10 +126,46 @@ public static List getNamedWriteables() { addMistralNamedWriteables(namedWriteables); addCustomElandWriteables(namedWriteables); addAnthropicNamedWritables(namedWriteables); + addAmazonBedrockNamedWriteables(namedWriteables); return namedWriteables; } + private static void addAmazonBedrockNamedWriteables(List namedWriteables) { + namedWriteables.add( + new NamedWriteableRegistry.Entry( + AmazonBedrockSecretSettings.class, + AmazonBedrockSecretSettings.NAME, + AmazonBedrockSecretSettings::new + ) + ); + + namedWriteables.add( + new NamedWriteableRegistry.Entry( + ServiceSettings.class, + AmazonBedrockEmbeddingsServiceSettings.NAME, + AmazonBedrockEmbeddingsServiceSettings::new + ) + ); + + // no task settings for Amazon Bedrock Embeddings + + namedWriteables.add( + new NamedWriteableRegistry.Entry( + ServiceSettings.class, + AmazonBedrockChatCompletionServiceSettings.NAME, + AmazonBedrockChatCompletionServiceSettings::new + ) + ); + namedWriteables.add( + new NamedWriteableRegistry.Entry( + TaskSettings.class, + AmazonBedrockChatCompletionTaskSettings.NAME, + AmazonBedrockChatCompletionTaskSettings::new + ) + ); + } + private static void addMistralNamedWriteables(List namedWriteables) { namedWriteables.add( new NamedWriteableRegistry.Entry( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 1db5b4135ee94..1c388f7399260 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -53,6 +53,7 @@ import org.elasticsearch.xpack.inference.action.TransportPutInferenceModelAction; import org.elasticsearch.xpack.inference.action.filter.ShardBulkInferenceActionFilter; import org.elasticsearch.xpack.inference.common.Truncator; +import org.elasticsearch.xpack.inference.external.amazonbedrock.AmazonBedrockRequestSender; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.HttpSettings; import org.elasticsearch.xpack.inference.external.http.retry.RetrySettings; @@ -70,6 +71,7 @@ import org.elasticsearch.xpack.inference.rest.RestInferenceAction; import org.elasticsearch.xpack.inference.rest.RestPutInferenceModelAction; import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockService; import org.elasticsearch.xpack.inference.services.anthropic.AnthropicService; import org.elasticsearch.xpack.inference.services.azureaistudio.AzureAiStudioService; import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiService; @@ -117,6 +119,7 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP private final Settings settings; private final SetOnce httpFactory = new SetOnce<>(); + private final SetOnce amazonBedrockFactory = new SetOnce<>(); private final SetOnce serviceComponents = new SetOnce<>(); private final SetOnce inferenceServiceRegistry = new SetOnce<>(); @@ -170,6 +173,9 @@ public Collection createComponents(PluginServices services) { var httpRequestSenderFactory = new HttpRequestSender.Factory(serviceComponents.get(), httpClientManager, services.clusterService()); httpFactory.set(httpRequestSenderFactory); + var amazonBedrockRequestSenderFactory = new AmazonBedrockRequestSender.Factory(serviceComponents.get(), services.clusterService()); + amazonBedrockFactory.set(amazonBedrockRequestSenderFactory); + ModelRegistry modelRegistry = new ModelRegistry(services.client()); if (inferenceServiceExtensions == null) { @@ -209,6 +215,7 @@ public List getInferenceServiceFactories() { context -> new GoogleVertexAiService(httpFactory.get(), serviceComponents.get()), context -> new MistralService(httpFactory.get(), serviceComponents.get()), context -> new AnthropicService(httpFactory.get(), serviceComponents.get()), + context -> new AmazonBedrockService(httpFactory.get(), amazonBedrockFactory.get(), serviceComponents.get()), ElasticsearchInternalService::new ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/amazonbedrock/AmazonBedrockActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/amazonbedrock/AmazonBedrockActionCreator.java new file mode 100644 index 0000000000000..5f9fc532e33b2 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/amazonbedrock/AmazonBedrockActionCreator.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.amazonbedrock; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.http.sender.AmazonBedrockChatCompletionRequestManager; +import org.elasticsearch.xpack.inference.external.http.sender.AmazonBedrockEmbeddingsRequestManager; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.amazonbedrock.completion.AmazonBedrockChatCompletionModel; +import org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings.AmazonBedrockEmbeddingsModel; + +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; + +public class AmazonBedrockActionCreator implements AmazonBedrockActionVisitor { + private final Sender sender; + private final ServiceComponents serviceComponents; + private final TimeValue timeout; + + public AmazonBedrockActionCreator(Sender sender, ServiceComponents serviceComponents, @Nullable TimeValue timeout) { + this.sender = Objects.requireNonNull(sender); + this.serviceComponents = Objects.requireNonNull(serviceComponents); + this.timeout = timeout; + } + + @Override + public ExecutableAction create(AmazonBedrockEmbeddingsModel embeddingsModel, Map taskSettings) { + var overriddenModel = AmazonBedrockEmbeddingsModel.of(embeddingsModel, taskSettings); + var requestManager = new AmazonBedrockEmbeddingsRequestManager( + overriddenModel, + serviceComponents.truncator(), + serviceComponents.threadPool(), + timeout + ); + var errorMessage = constructFailedToSendRequestMessage(null, "Amazon Bedrock embeddings"); + return new AmazonBedrockEmbeddingsAction(sender, requestManager, errorMessage); + } + + @Override + public ExecutableAction create(AmazonBedrockChatCompletionModel completionModel, Map taskSettings) { + var overriddenModel = AmazonBedrockChatCompletionModel.of(completionModel, taskSettings); + var requestManager = new AmazonBedrockChatCompletionRequestManager(overriddenModel, serviceComponents.threadPool(), timeout); + var errorMessage = constructFailedToSendRequestMessage(null, "Amazon Bedrock completion"); + return new AmazonBedrockChatCompletionAction(sender, requestManager, errorMessage); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/amazonbedrock/AmazonBedrockActionVisitor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/amazonbedrock/AmazonBedrockActionVisitor.java new file mode 100644 index 0000000000000..b540d030eb3f7 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/amazonbedrock/AmazonBedrockActionVisitor.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.amazonbedrock; + +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.services.amazonbedrock.completion.AmazonBedrockChatCompletionModel; +import org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings.AmazonBedrockEmbeddingsModel; + +import java.util.Map; + +public interface AmazonBedrockActionVisitor { + ExecutableAction create(AmazonBedrockEmbeddingsModel embeddingsModel, Map taskSettings); + + ExecutableAction create(AmazonBedrockChatCompletionModel completionModel, Map taskSettings); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/amazonbedrock/AmazonBedrockChatCompletionAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/amazonbedrock/AmazonBedrockChatCompletionAction.java new file mode 100644 index 0000000000000..9d3c39d3ac4d9 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/amazonbedrock/AmazonBedrockChatCompletionAction.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.amazonbedrock; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; +import org.elasticsearch.xpack.inference.external.http.sender.RequestManager; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; + +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.createInternalServerError; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.wrapFailuresInElasticsearchException; + +public class AmazonBedrockChatCompletionAction implements ExecutableAction { + private final Sender sender; + private final RequestManager requestManager; + private final String errorMessage; + + public AmazonBedrockChatCompletionAction(Sender sender, RequestManager requestManager, String errorMessage) { + this.sender = Objects.requireNonNull(sender); + this.requestManager = Objects.requireNonNull(requestManager); + this.errorMessage = Objects.requireNonNull(errorMessage); + } + + @Override + public void execute(InferenceInputs inferenceInputs, TimeValue timeout, ActionListener listener) { + try { + ActionListener wrappedListener = wrapFailuresInElasticsearchException(errorMessage, listener); + + sender.send(requestManager, inferenceInputs, timeout, wrappedListener); + } catch (ElasticsearchException e) { + listener.onFailure(e); + } catch (Exception e) { + listener.onFailure(createInternalServerError(e, errorMessage)); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/amazonbedrock/AmazonBedrockEmbeddingsAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/amazonbedrock/AmazonBedrockEmbeddingsAction.java new file mode 100644 index 0000000000000..3f8be0c3cccbe --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/amazonbedrock/AmazonBedrockEmbeddingsAction.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.amazonbedrock; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; +import org.elasticsearch.xpack.inference.external.http.sender.RequestManager; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; + +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.createInternalServerError; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.wrapFailuresInElasticsearchException; + +public class AmazonBedrockEmbeddingsAction implements ExecutableAction { + + private final Sender sender; + private final RequestManager requestManager; + private final String errorMessage; + + public AmazonBedrockEmbeddingsAction(Sender sender, RequestManager requestManager, String errorMessage) { + this.sender = Objects.requireNonNull(sender); + this.requestManager = Objects.requireNonNull(requestManager); + this.errorMessage = Objects.requireNonNull(errorMessage); + } + + @Override + public void execute(InferenceInputs inferenceInputs, TimeValue timeout, ActionListener listener) { + try { + ActionListener wrappedListener = wrapFailuresInElasticsearchException(errorMessage, listener); + + sender.send(requestManager, inferenceInputs, timeout, wrappedListener); + } catch (ElasticsearchException e) { + listener.onFailure(e); + } catch (Exception e) { + listener.onFailure(createInternalServerError(e, errorMessage)); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockBaseClient.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockBaseClient.java new file mode 100644 index 0000000000000..f9e403582a0ec --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockBaseClient.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.amazonbedrock; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockModel; + +import java.time.Clock; +import java.util.Objects; + +public abstract class AmazonBedrockBaseClient implements AmazonBedrockClient { + protected final Integer modelKeysAndRegionHashcode; + protected Clock clock = Clock.systemUTC(); + + protected AmazonBedrockBaseClient(AmazonBedrockModel model, @Nullable TimeValue timeout) { + Objects.requireNonNull(model); + this.modelKeysAndRegionHashcode = getModelKeysAndRegionHashcode(model, timeout); + } + + public static Integer getModelKeysAndRegionHashcode(AmazonBedrockModel model, @Nullable TimeValue timeout) { + var secretSettings = model.getSecretSettings(); + var serviceSettings = model.getServiceSettings(); + return Objects.hash(secretSettings.accessKey, secretSettings.secretKey, serviceSettings.region(), timeout); + } + + public final void setClock(Clock clock) { + this.clock = clock; + } + + abstract void close(); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockChatCompletionExecutor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockChatCompletionExecutor.java new file mode 100644 index 0000000000000..a4e0c399517c1 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockChatCompletionExecutor.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.amazonbedrock; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockChatCompletionRequest; +import org.elasticsearch.xpack.inference.external.response.amazonbedrock.AmazonBedrockResponseHandler; +import org.elasticsearch.xpack.inference.external.response.amazonbedrock.completion.AmazonBedrockChatCompletionResponseListener; + +import java.util.function.Supplier; + +public class AmazonBedrockChatCompletionExecutor extends AmazonBedrockExecutor { + private final AmazonBedrockChatCompletionRequest chatCompletionRequest; + + protected AmazonBedrockChatCompletionExecutor( + AmazonBedrockChatCompletionRequest request, + AmazonBedrockResponseHandler responseHandler, + Logger logger, + Supplier hasRequestCompletedFunction, + ActionListener inferenceResultsListener, + AmazonBedrockClientCache clientCache + ) { + super(request, responseHandler, logger, hasRequestCompletedFunction, inferenceResultsListener, clientCache); + this.chatCompletionRequest = request; + } + + @Override + protected void executeClientRequest(AmazonBedrockBaseClient awsBedrockClient) { + var chatCompletionResponseListener = new AmazonBedrockChatCompletionResponseListener( + chatCompletionRequest, + responseHandler, + inferenceResultsListener + ); + chatCompletionRequest.executeChatCompletionRequest(awsBedrockClient, chatCompletionResponseListener); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockClient.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockClient.java new file mode 100644 index 0000000000000..812e76129c420 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockClient.java @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.amazonbedrock; + +import com.amazonaws.services.bedrockruntime.model.ConverseRequest; +import com.amazonaws.services.bedrockruntime.model.ConverseResult; +import com.amazonaws.services.bedrockruntime.model.InvokeModelRequest; +import com.amazonaws.services.bedrockruntime.model.InvokeModelResult; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; + +import java.time.Instant; + +public interface AmazonBedrockClient { + void converse(ConverseRequest converseRequest, ActionListener responseListener) throws ElasticsearchException; + + void invokeModel(InvokeModelRequest invokeModelRequest, ActionListener responseListener) + throws ElasticsearchException; + + boolean isExpired(Instant currentTimestampMs); + + void resetExpiration(); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockClientCache.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockClientCache.java new file mode 100644 index 0000000000000..e6bb99620b581 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockClientCache.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.amazonbedrock; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockModel; + +import java.io.Closeable; +import java.io.IOException; + +public interface AmazonBedrockClientCache extends Closeable { + AmazonBedrockBaseClient getOrCreateClient(AmazonBedrockModel model, @Nullable TimeValue timeout) throws IOException; +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockEmbeddingsExecutor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockEmbeddingsExecutor.java new file mode 100644 index 0000000000000..6da3f86e0909a --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockEmbeddingsExecutor.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.amazonbedrock; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.embeddings.AmazonBedrockEmbeddingsRequest; +import org.elasticsearch.xpack.inference.external.response.amazonbedrock.AmazonBedrockResponseHandler; +import org.elasticsearch.xpack.inference.external.response.amazonbedrock.embeddings.AmazonBedrockEmbeddingsResponseListener; + +import java.util.function.Supplier; + +public class AmazonBedrockEmbeddingsExecutor extends AmazonBedrockExecutor { + + private final AmazonBedrockEmbeddingsRequest embeddingsRequest; + + protected AmazonBedrockEmbeddingsExecutor( + AmazonBedrockEmbeddingsRequest request, + AmazonBedrockResponseHandler responseHandler, + Logger logger, + Supplier hasRequestCompletedFunction, + ActionListener inferenceResultsListener, + AmazonBedrockClientCache clientCache + ) { + super(request, responseHandler, logger, hasRequestCompletedFunction, inferenceResultsListener, clientCache); + this.embeddingsRequest = request; + } + + @Override + protected void executeClientRequest(AmazonBedrockBaseClient awsBedrockClient) { + var embeddingsResponseListener = new AmazonBedrockEmbeddingsResponseListener( + embeddingsRequest, + responseHandler, + inferenceResultsListener + ); + embeddingsRequest.executeEmbeddingsRequest(awsBedrockClient, embeddingsResponseListener); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockExecuteOnlyRequestSender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockExecuteOnlyRequestSender.java new file mode 100644 index 0000000000000..a08acab655936 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockExecuteOnlyRequestSender.java @@ -0,0 +1,124 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.amazonbedrock; + +import org.apache.http.client.protocol.HttpClientContext; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; +import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.AmazonBedrockRequest; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockChatCompletionRequest; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.embeddings.AmazonBedrockEmbeddingsRequest; +import org.elasticsearch.xpack.inference.external.response.amazonbedrock.AmazonBedrockResponseHandler; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; + +import java.io.IOException; +import java.util.Objects; +import java.util.function.Supplier; + +import static org.elasticsearch.core.Strings.format; + +/** + * The AWS SDK uses its own internal retrier and timeout values on the client + */ +public class AmazonBedrockExecuteOnlyRequestSender implements RequestSender { + + protected final AmazonBedrockClientCache clientCache; + private final ThrottlerManager throttleManager; + + public AmazonBedrockExecuteOnlyRequestSender(AmazonBedrockClientCache clientCache, ThrottlerManager throttlerManager) { + this.clientCache = Objects.requireNonNull(clientCache); + this.throttleManager = Objects.requireNonNull(throttlerManager); + } + + @Override + public void send( + Logger logger, + Request request, + HttpClientContext context, + Supplier hasRequestTimedOutFunction, + ResponseHandler responseHandler, + ActionListener listener + ) { + if (request instanceof AmazonBedrockRequest awsRequest && responseHandler instanceof AmazonBedrockResponseHandler awsResponse) { + try { + var executor = createExecutor(awsRequest, awsResponse, logger, hasRequestTimedOutFunction, listener); + + // the run method will call the listener to return the proper value + executor.run(); + return; + } catch (Exception e) { + logException(logger, request, e); + listener.onFailure(wrapWithElasticsearchException(e, request.getInferenceEntityId())); + } + } + + listener.onFailure(new ElasticsearchException("Amazon Bedrock request was not the correct type")); + } + + // allow this to be overridden for testing + protected AmazonBedrockExecutor createExecutor( + AmazonBedrockRequest awsRequest, + AmazonBedrockResponseHandler awsResponse, + Logger logger, + Supplier hasRequestTimedOutFunction, + ActionListener listener + ) { + switch (awsRequest.taskType()) { + case COMPLETION -> { + return new AmazonBedrockChatCompletionExecutor( + (AmazonBedrockChatCompletionRequest) awsRequest, + awsResponse, + logger, + hasRequestTimedOutFunction, + listener, + clientCache + ); + } + case TEXT_EMBEDDING -> { + return new AmazonBedrockEmbeddingsExecutor( + (AmazonBedrockEmbeddingsRequest) awsRequest, + awsResponse, + logger, + hasRequestTimedOutFunction, + listener, + clientCache + ); + } + default -> { + throw new UnsupportedOperationException("Unsupported task type [" + awsRequest.taskType() + "] for Amazon Bedrock request"); + } + } + } + + private void logException(Logger logger, Request request, Exception exception) { + var causeException = ExceptionsHelper.unwrapCause(exception); + + throttleManager.warn( + logger, + format("Failed while sending request from inference entity id [%s] of type [amazonbedrock]", request.getInferenceEntityId()), + causeException + ); + } + + private Exception wrapWithElasticsearchException(Exception e, String inferenceEntityId) { + return new ElasticsearchException( + format("Amazon Bedrock client failed to send request from inference entity id [%s]", inferenceEntityId), + e + ); + } + + public void shutdown() throws IOException { + this.clientCache.close(); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockExecutor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockExecutor.java new file mode 100644 index 0000000000000..fa220ee5d2831 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockExecutor.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.amazonbedrock; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.Strings; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.AmazonBedrockRequest; +import org.elasticsearch.xpack.inference.external.response.amazonbedrock.AmazonBedrockResponseHandler; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockModel; + +import java.util.Objects; +import java.util.function.Supplier; + +public abstract class AmazonBedrockExecutor implements Runnable { + protected final AmazonBedrockModel baseModel; + protected final AmazonBedrockResponseHandler responseHandler; + protected final Logger logger; + protected final AmazonBedrockRequest request; + protected final Supplier hasRequestCompletedFunction; + protected final ActionListener inferenceResultsListener; + protected final AmazonBedrockClientCache clientCache; + + protected AmazonBedrockExecutor( + AmazonBedrockRequest request, + AmazonBedrockResponseHandler responseHandler, + Logger logger, + Supplier hasRequestCompletedFunction, + ActionListener inferenceResultsListener, + AmazonBedrockClientCache clientCache + ) { + this.request = Objects.requireNonNull(request); + this.responseHandler = Objects.requireNonNull(responseHandler); + this.logger = Objects.requireNonNull(logger); + this.hasRequestCompletedFunction = Objects.requireNonNull(hasRequestCompletedFunction); + this.inferenceResultsListener = Objects.requireNonNull(inferenceResultsListener); + this.clientCache = Objects.requireNonNull(clientCache); + this.baseModel = request.model(); + } + + @Override + public void run() { + if (hasRequestCompletedFunction.get()) { + // has already been run + return; + } + + var inferenceEntityId = baseModel.getInferenceEntityId(); + + try { + var awsBedrockClient = clientCache.getOrCreateClient(baseModel, request.timeout()); + executeClientRequest(awsBedrockClient); + } catch (Exception e) { + var errorMessage = Strings.format("Failed to send request from inference entity id [%s]", inferenceEntityId); + logger.warn(errorMessage, e); + inferenceResultsListener.onFailure(new ElasticsearchException(errorMessage, e)); + } + } + + protected abstract void executeClientRequest(AmazonBedrockBaseClient awsBedrockClient); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockInferenceClient.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockInferenceClient.java new file mode 100644 index 0000000000000..c3d458925268c --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockInferenceClient.java @@ -0,0 +1,166 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.amazonbedrock; + +import com.amazonaws.ClientConfiguration; +import com.amazonaws.auth.AWSStaticCredentialsProvider; +import com.amazonaws.auth.BasicAWSCredentials; +import com.amazonaws.services.bedrockruntime.AmazonBedrockRuntimeAsync; +import com.amazonaws.services.bedrockruntime.AmazonBedrockRuntimeAsyncClientBuilder; +import com.amazonaws.services.bedrockruntime.model.AmazonBedrockRuntimeException; +import com.amazonaws.services.bedrockruntime.model.ConverseRequest; +import com.amazonaws.services.bedrockruntime.model.ConverseResult; +import com.amazonaws.services.bedrockruntime.model.InvokeModelRequest; +import com.amazonaws.services.bedrockruntime.model.InvokeModelResult; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.SpecialPermission; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Strings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.xpack.core.common.socket.SocketAccess; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockModel; + +import java.security.AccessController; +import java.security.PrivilegedExceptionAction; +import java.time.Duration; +import java.time.Instant; +import java.util.Objects; + +/** + * Not marking this as "final" so we can subclass it for mocking + */ +public class AmazonBedrockInferenceClient extends AmazonBedrockBaseClient { + + // package-private for testing + static final int CLIENT_CACHE_EXPIRY_MINUTES = 5; + private static final int DEFAULT_CLIENT_TIMEOUT_MS = 10000; + + private final AmazonBedrockRuntimeAsync internalClient; + private volatile Instant expiryTimestamp; + + public static AmazonBedrockBaseClient create(AmazonBedrockModel model, @Nullable TimeValue timeout) { + try { + return new AmazonBedrockInferenceClient(model, timeout); + } catch (Exception e) { + throw new ElasticsearchException("Failed to create Amazon Bedrock Client", e); + } + } + + protected AmazonBedrockInferenceClient(AmazonBedrockModel model, @Nullable TimeValue timeout) { + super(model, timeout); + this.internalClient = createAmazonBedrockClient(model, timeout); + setExpiryTimestamp(); + } + + @Override + public void converse(ConverseRequest converseRequest, ActionListener responseListener) throws ElasticsearchException { + try { + var responseFuture = internalClient.converseAsync(converseRequest); + responseListener.onResponse(responseFuture.get()); + } catch (AmazonBedrockRuntimeException amazonBedrockRuntimeException) { + responseListener.onFailure( + new ElasticsearchException( + Strings.format("AmazonBedrock converse failure: [%s]", amazonBedrockRuntimeException.getMessage()), + amazonBedrockRuntimeException + ) + ); + } catch (ElasticsearchException elasticsearchException) { + // just throw the exception if we have one + responseListener.onFailure(elasticsearchException); + } catch (Exception e) { + responseListener.onFailure(new ElasticsearchException("Amazon Bedrock client converse call failed", e)); + } + } + + @Override + public void invokeModel(InvokeModelRequest invokeModelRequest, ActionListener responseListener) + throws ElasticsearchException { + try { + var responseFuture = internalClient.invokeModelAsync(invokeModelRequest); + responseListener.onResponse(responseFuture.get()); + } catch (AmazonBedrockRuntimeException amazonBedrockRuntimeException) { + responseListener.onFailure( + new ElasticsearchException( + Strings.format("AmazonBedrock invoke model failure: [%s]", amazonBedrockRuntimeException.getMessage()), + amazonBedrockRuntimeException + ) + ); + } catch (ElasticsearchException elasticsearchException) { + // just throw the exception if we have one + responseListener.onFailure(elasticsearchException); + } catch (Exception e) { + responseListener.onFailure(new ElasticsearchException(e)); + } + } + + // allow this to be overridden for test mocks + protected AmazonBedrockRuntimeAsync createAmazonBedrockClient(AmazonBedrockModel model, @Nullable TimeValue timeout) { + var secretSettings = model.getSecretSettings(); + var credentials = new BasicAWSCredentials(secretSettings.accessKey.toString(), secretSettings.secretKey.toString()); + var credentialsProvider = new AWSStaticCredentialsProvider(credentials); + var clientConfig = timeout == null + ? new ClientConfiguration().withConnectionTimeout(DEFAULT_CLIENT_TIMEOUT_MS) + : new ClientConfiguration().withConnectionTimeout((int) timeout.millis()); + + var serviceSettings = model.getServiceSettings(); + + try { + SpecialPermission.check(); + AmazonBedrockRuntimeAsyncClientBuilder builder = AccessController.doPrivileged( + (PrivilegedExceptionAction) () -> AmazonBedrockRuntimeAsyncClientBuilder.standard() + .withCredentials(credentialsProvider) + .withRegion(serviceSettings.region()) + .withClientConfiguration(clientConfig) + ); + + return SocketAccess.doPrivileged(builder::build); + } catch (AmazonBedrockRuntimeException amazonBedrockRuntimeException) { + throw new ElasticsearchException( + Strings.format("failed to create AmazonBedrockRuntime client: [%s]", amazonBedrockRuntimeException.getMessage()), + amazonBedrockRuntimeException + ); + } catch (Exception e) { + throw new ElasticsearchException("failed to create AmazonBedrockRuntime client", e); + } + } + + private void setExpiryTimestamp() { + this.expiryTimestamp = clock.instant().plus(Duration.ofMinutes(CLIENT_CACHE_EXPIRY_MINUTES)); + } + + @Override + public boolean isExpired(Instant currentTimestampMs) { + Objects.requireNonNull(currentTimestampMs); + return currentTimestampMs.isAfter(expiryTimestamp); + } + + public void resetExpiration() { + setExpiryTimestamp(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AmazonBedrockInferenceClient that = (AmazonBedrockInferenceClient) o; + return Objects.equals(modelKeysAndRegionHashcode, that.modelKeysAndRegionHashcode); + } + + @Override + public int hashCode() { + return this.modelKeysAndRegionHashcode; + } + + // make this package-private so only the cache can close it + @Override + void close() { + internalClient.shutdown(); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockInferenceClientCache.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockInferenceClientCache.java new file mode 100644 index 0000000000000..e245365c214af --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockInferenceClientCache.java @@ -0,0 +1,137 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.amazonbedrock; + +import com.amazonaws.http.IdleConnectionReaper; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockModel; + +import java.io.IOException; +import java.time.Clock; +import java.util.ArrayList; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.function.BiFunction; + +public final class AmazonBedrockInferenceClientCache implements AmazonBedrockClientCache { + + private final BiFunction creator; + private final Map clientsCache = new ConcurrentHashMap<>(); + private final ReentrantReadWriteLock cacheLock = new ReentrantReadWriteLock(); + + // not final for testing + private Clock clock; + + public AmazonBedrockInferenceClientCache( + BiFunction creator, + @Nullable Clock clock + ) { + this.creator = Objects.requireNonNull(creator); + this.clock = Objects.requireNonNullElse(clock, Clock.systemUTC()); + } + + public AmazonBedrockBaseClient getOrCreateClient(AmazonBedrockModel model, @Nullable TimeValue timeout) { + var returnClient = internalGetOrCreateClient(model, timeout); + flushExpiredClients(); + return returnClient; + } + + private AmazonBedrockBaseClient internalGetOrCreateClient(AmazonBedrockModel model, @Nullable TimeValue timeout) { + final Integer modelHash = AmazonBedrockInferenceClient.getModelKeysAndRegionHashcode(model, timeout); + cacheLock.readLock().lock(); + try { + return clientsCache.computeIfAbsent(modelHash, hashKey -> { + final AmazonBedrockBaseClient builtClient = creator.apply(model, timeout); + builtClient.setClock(clock); + builtClient.resetExpiration(); + return builtClient; + }); + } finally { + cacheLock.readLock().unlock(); + } + } + + private void flushExpiredClients() { + var currentTimestampMs = clock.instant(); + var expiredClients = new ArrayList>(); + + cacheLock.readLock().lock(); + try { + for (final Map.Entry client : clientsCache.entrySet()) { + if (client.getValue().isExpired(currentTimestampMs)) { + expiredClients.add(client); + } + } + + if (expiredClients.isEmpty()) { + return; + } + + cacheLock.readLock().unlock(); + cacheLock.writeLock().lock(); + try { + for (final Map.Entry client : expiredClients) { + var removed = clientsCache.remove(client.getKey()); + if (removed != null) { + removed.close(); + } + } + } finally { + cacheLock.readLock().lock(); + cacheLock.writeLock().unlock(); + } + } finally { + cacheLock.readLock().unlock(); + } + } + + @Override + public void close() throws IOException { + releaseCachedClients(); + } + + private void releaseCachedClients() { + // as we're closing and flushing all of these - we'll use a write lock + // across the whole operation to ensure this stays in sync + cacheLock.writeLock().lock(); + try { + // ensure all the clients are closed before we clear + for (final AmazonBedrockBaseClient client : clientsCache.values()) { + client.close(); + } + + // clear previously cached clients, they will be build lazily + clientsCache.clear(); + } finally { + cacheLock.writeLock().unlock(); + } + + // shutdown IdleConnectionReaper background thread + // it will be restarted on new client usage + IdleConnectionReaper.shutdown(); + } + + // used for testing + int clientCount() { + cacheLock.readLock().lock(); + try { + return clientsCache.size(); + } finally { + cacheLock.readLock().unlock(); + } + } + + // used for testing + void setClock(Clock newClock) { + this.clock = Objects.requireNonNull(newClock); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockRequestSender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockRequestSender.java new file mode 100644 index 0000000000000..e23b0274ede26 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockRequestSender.java @@ -0,0 +1,126 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.amazonbedrock; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.http.sender.AmazonBedrockRequestExecutorService; +import org.elasticsearch.xpack.inference.external.http.sender.AmazonBedrockRequestManager; +import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; +import org.elasticsearch.xpack.inference.external.http.sender.RequestExecutorServiceSettings; +import org.elasticsearch.xpack.inference.external.http.sender.RequestManager; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.services.ServiceComponents; + +import java.io.IOException; +import java.util.Objects; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.elasticsearch.xpack.inference.InferencePlugin.UTILITY_THREAD_POOL_NAME; + +public class AmazonBedrockRequestSender implements Sender { + + public static class Factory { + private final ServiceComponents serviceComponents; + private final ClusterService clusterService; + + public Factory(ServiceComponents serviceComponents, ClusterService clusterService) { + this.serviceComponents = Objects.requireNonNull(serviceComponents); + this.clusterService = Objects.requireNonNull(clusterService); + } + + public Sender createSender() { + var clientCache = new AmazonBedrockInferenceClientCache(AmazonBedrockInferenceClient::create, null); + return createSender(new AmazonBedrockExecuteOnlyRequestSender(clientCache, serviceComponents.throttlerManager())); + } + + Sender createSender(AmazonBedrockExecuteOnlyRequestSender requestSender) { + var sender = new AmazonBedrockRequestSender( + serviceComponents.threadPool(), + clusterService, + serviceComponents.settings(), + Objects.requireNonNull(requestSender) + ); + // ensure this is started + sender.start(); + return sender; + } + } + + private static final TimeValue START_COMPLETED_WAIT_TIME = TimeValue.timeValueSeconds(5); + + private final ThreadPool threadPool; + private final AmazonBedrockRequestExecutorService executorService; + private final AtomicBoolean started = new AtomicBoolean(false); + private final CountDownLatch startCompleted = new CountDownLatch(1); + + protected AmazonBedrockRequestSender( + ThreadPool threadPool, + ClusterService clusterService, + Settings settings, + AmazonBedrockExecuteOnlyRequestSender requestSender + ) { + this.threadPool = Objects.requireNonNull(threadPool); + executorService = new AmazonBedrockRequestExecutorService( + threadPool, + startCompleted, + new RequestExecutorServiceSettings(settings, clusterService), + requestSender + ); + } + + @Override + public void start() { + if (started.compareAndSet(false, true)) { + // The manager must be started before the executor service. That way we guarantee that the http client + // is ready prior to the service attempting to use the http client to send a request + threadPool.executor(UTILITY_THREAD_POOL_NAME).execute(executorService::start); + waitForStartToComplete(); + } + } + + private void waitForStartToComplete() { + try { + if (startCompleted.await(START_COMPLETED_WAIT_TIME.getSeconds(), TimeUnit.SECONDS) == false) { + throw new IllegalStateException("Amazon Bedrock sender startup did not complete in time"); + } + } catch (InterruptedException e) { + throw new IllegalStateException("Amazon Bedrock sender interrupted while waiting for startup to complete"); + } + } + + @Override + public void send( + RequestManager requestCreator, + InferenceInputs inferenceInputs, + TimeValue timeout, + ActionListener listener + ) { + assert started.get() : "Amazon Bedrock request sender: call start() before sending a request"; + waitForStartToComplete(); + + if (requestCreator instanceof AmazonBedrockRequestManager amazonBedrockRequestManager) { + executorService.execute(amazonBedrockRequestManager, inferenceInputs, timeout, listener); + return; + } + + listener.onFailure(new ElasticsearchException("Amazon Bedrock request sender did not receive a valid request request manager")); + } + + @Override + public void close() throws IOException { + executorService.shutdown(); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AmazonBedrockChatCompletionRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AmazonBedrockChatCompletionRequestManager.java new file mode 100644 index 0000000000000..1d8226664979c --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AmazonBedrockChatCompletionRequestManager.java @@ -0,0 +1,65 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.apache.http.client.protocol.HttpClientContext; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.Strings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockChatCompletionEntityFactory; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockChatCompletionRequest; +import org.elasticsearch.xpack.inference.external.response.amazonbedrock.completion.AmazonBedrockChatCompletionResponseHandler; +import org.elasticsearch.xpack.inference.services.amazonbedrock.completion.AmazonBedrockChatCompletionModel; + +import java.util.List; +import java.util.function.Supplier; + +public class AmazonBedrockChatCompletionRequestManager extends AmazonBedrockRequestManager { + private static final Logger logger = LogManager.getLogger(AmazonBedrockChatCompletionRequestManager.class); + private final AmazonBedrockChatCompletionModel model; + + public AmazonBedrockChatCompletionRequestManager( + AmazonBedrockChatCompletionModel model, + ThreadPool threadPool, + @Nullable TimeValue timeout + ) { + super(model, threadPool, timeout); + this.model = model; + } + + @Override + public void execute( + String query, + List input, + RequestSender requestSender, + Supplier hasRequestCompletedFunction, + ActionListener listener + ) { + var requestEntity = AmazonBedrockChatCompletionEntityFactory.createEntity(model, input); + var request = new AmazonBedrockChatCompletionRequest(model, requestEntity, timeout); + var responseHandler = new AmazonBedrockChatCompletionResponseHandler(); + + try { + requestSender.send(logger, request, HttpClientContext.create(), hasRequestCompletedFunction, responseHandler, listener); + } catch (Exception e) { + var errorMessage = Strings.format( + "Failed to send [completion] request from inference entity id [%s]", + request.getInferenceEntityId() + ); + logger.warn(errorMessage, e); + listener.onFailure(new ElasticsearchException(errorMessage, e)); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AmazonBedrockEmbeddingsRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AmazonBedrockEmbeddingsRequestManager.java new file mode 100644 index 0000000000000..e9bc6b574865c --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AmazonBedrockEmbeddingsRequestManager.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.apache.http.client.protocol.HttpClientContext; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.Strings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.common.Truncator; +import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.embeddings.AmazonBedrockEmbeddingsEntityFactory; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.embeddings.AmazonBedrockEmbeddingsRequest; +import org.elasticsearch.xpack.inference.external.response.amazonbedrock.embeddings.AmazonBedrockEmbeddingsResponseHandler; +import org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings.AmazonBedrockEmbeddingsModel; + +import java.util.List; +import java.util.Objects; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.inference.common.Truncator.truncate; + +public class AmazonBedrockEmbeddingsRequestManager extends AmazonBedrockRequestManager { + private static final Logger logger = LogManager.getLogger(AmazonBedrockEmbeddingsRequestManager.class); + + private final AmazonBedrockEmbeddingsModel embeddingsModel; + private final Truncator truncator; + + public AmazonBedrockEmbeddingsRequestManager( + AmazonBedrockEmbeddingsModel model, + Truncator truncator, + ThreadPool threadPool, + @Nullable TimeValue timeout + ) { + super(model, threadPool, timeout); + this.embeddingsModel = model; + this.truncator = Objects.requireNonNull(truncator); + } + + @Override + public void execute( + String query, + List input, + RequestSender requestSender, + Supplier hasRequestCompletedFunction, + ActionListener listener + ) { + var serviceSettings = embeddingsModel.getServiceSettings(); + var truncatedInput = truncate(input, serviceSettings.maxInputTokens()); + var requestEntity = AmazonBedrockEmbeddingsEntityFactory.createEntity(embeddingsModel, truncatedInput); + var responseHandler = new AmazonBedrockEmbeddingsResponseHandler(); + var request = new AmazonBedrockEmbeddingsRequest(truncator, truncatedInput, embeddingsModel, requestEntity, timeout); + try { + requestSender.send(logger, request, HttpClientContext.create(), hasRequestCompletedFunction, responseHandler, listener); + } catch (Exception e) { + var errorMessage = Strings.format( + "Failed to send [text_embedding] request from inference entity id [%s]", + request.getInferenceEntityId() + ); + logger.warn(errorMessage, e); + listener.onFailure(new ElasticsearchException(errorMessage, e)); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AmazonBedrockRequestExecutorService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AmazonBedrockRequestExecutorService.java new file mode 100644 index 0000000000000..8b4672d45c250 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AmazonBedrockRequestExecutorService.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.amazonbedrock.AmazonBedrockExecuteOnlyRequestSender; + +import java.io.IOException; +import java.util.concurrent.CountDownLatch; + +/** + * Allows this to have a public interface for Amazon Bedrock support + */ +public class AmazonBedrockRequestExecutorService extends RequestExecutorService { + + private final AmazonBedrockExecuteOnlyRequestSender requestSender; + + public AmazonBedrockRequestExecutorService( + ThreadPool threadPool, + CountDownLatch startupLatch, + RequestExecutorServiceSettings settings, + AmazonBedrockExecuteOnlyRequestSender requestSender + ) { + super(threadPool, startupLatch, settings, requestSender); + this.requestSender = requestSender; + } + + @Override + public void shutdown() { + super.shutdown(); + try { + requestSender.shutdown(); + } catch (IOException e) { + // swallow the exception + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AmazonBedrockRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AmazonBedrockRequestManager.java new file mode 100644 index 0000000000000..f75343b038368 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AmazonBedrockRequestManager.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockModel; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; + +import java.util.Objects; + +public abstract class AmazonBedrockRequestManager implements RequestManager { + + protected final ThreadPool threadPool; + protected final TimeValue timeout; + private final AmazonBedrockModel baseModel; + + protected AmazonBedrockRequestManager(AmazonBedrockModel baseModel, ThreadPool threadPool, @Nullable TimeValue timeout) { + this.baseModel = Objects.requireNonNull(baseModel); + this.threadPool = Objects.requireNonNull(threadPool); + this.timeout = timeout; + } + + @Override + public String inferenceEntityId() { + return baseModel.getInferenceEntityId(); + } + + @Override + public RateLimitSettings rateLimitSettings() { + return baseModel.rateLimitSettings(); + } + + record RateLimitGrouping(int keyHash) { + public static AmazonBedrockRequestManager.RateLimitGrouping of(AmazonBedrockModel model) { + Objects.requireNonNull(model); + + var awsSecretSettings = model.getSecretSettings(); + + return new RateLimitGrouping(Objects.hash(awsSecretSettings.accessKey, awsSecretSettings.secretKey)); + } + } + + @Override + public Object rateLimitGrouping() { + return RateLimitGrouping.of(this.baseModel); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/AmazonBedrockJsonBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/AmazonBedrockJsonBuilder.java new file mode 100644 index 0000000000000..829e899beba5e --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/AmazonBedrockJsonBuilder.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.xcontent.ToXContent; + +import java.io.IOException; + +import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; + +public class AmazonBedrockJsonBuilder { + + private final ToXContent jsonWriter; + + public AmazonBedrockJsonBuilder(ToXContent jsonWriter) { + this.jsonWriter = jsonWriter; + } + + public String getStringContent() throws IOException { + try (var builder = jsonBuilder()) { + return Strings.toString(jsonWriter.toXContent(builder, null)); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/AmazonBedrockJsonWriter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/AmazonBedrockJsonWriter.java new file mode 100644 index 0000000000000..83ebcb4563a8c --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/AmazonBedrockJsonWriter.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock; + +import com.fasterxml.jackson.core.JsonGenerator; + +import java.io.IOException; + +/** + * This is needed as the input for the Amazon Bedrock SDK does not like + * the formatting of XContent JSON output + */ +public interface AmazonBedrockJsonWriter { + JsonGenerator writeJson(JsonGenerator generator) throws IOException; +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/AmazonBedrockRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/AmazonBedrockRequest.java new file mode 100644 index 0000000000000..e356212ed07fb --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/AmazonBedrockRequest.java @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.inference.external.amazonbedrock.AmazonBedrockBaseClient; +import org.elasticsearch.xpack.inference.external.request.HttpRequest; +import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockModel; + +import java.net.URI; + +public abstract class AmazonBedrockRequest implements Request { + + protected final AmazonBedrockModel amazonBedrockModel; + protected final String inferenceId; + protected final TimeValue timeout; + + protected AmazonBedrockRequest(AmazonBedrockModel model, @Nullable TimeValue timeout) { + this.amazonBedrockModel = model; + this.inferenceId = model.getInferenceEntityId(); + this.timeout = timeout; + } + + protected abstract void executeRequest(AmazonBedrockBaseClient client); + + public AmazonBedrockModel model() { + return amazonBedrockModel; + } + + /** + * Amazon Bedrock uses the AWS SDK, and will not create its own Http Request + * But, this is needed for the ExecutableInferenceRequest to get the inferenceEntityId + * @return NoOp request + */ + @Override + public final HttpRequest createHttpRequest() { + return new HttpRequest(new NoOpHttpRequest(), inferenceId); + } + + /** + * Amazon Bedrock uses the AWS SDK, and will not create its own URI + * @return null + */ + @Override + public final URI getURI() { + throw new UnsupportedOperationException(); + } + + /** + * Should be overridden for text embeddings requests + * @return null + */ + @Override + public Request truncate() { + return this; + } + + /** + * Should be overridden for text embeddings requests + * @return boolean[0] + */ + @Override + public boolean[] getTruncationInfo() { + return new boolean[0]; + } + + @Override + public String getInferenceEntityId() { + return amazonBedrockModel.getInferenceEntityId(); + } + + public TimeValue timeout() { + return timeout; + } + + public abstract TaskType taskType(); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/NoOpHttpRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/NoOpHttpRequest.java new file mode 100644 index 0000000000000..7087bb03bca5e --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/NoOpHttpRequest.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock; + +import org.apache.http.client.methods.HttpRequestBase; + +/** + * Needed for compatibility with RequestSender + */ +public class NoOpHttpRequest extends HttpRequestBase { + @Override + public String getMethod() { + return "NOOP"; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAI21LabsCompletionRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAI21LabsCompletionRequestEntity.java new file mode 100644 index 0000000000000..6e2f2f6702005 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAI21LabsCompletionRequestEntity.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; + +import com.amazonaws.services.bedrockruntime.model.ConverseRequest; +import com.amazonaws.services.bedrockruntime.model.InferenceConfiguration; + +import org.elasticsearch.core.Nullable; + +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseUtils.getConverseMessageList; + +public record AmazonBedrockAI21LabsCompletionRequestEntity( + List messages, + @Nullable Double temperature, + @Nullable Double topP, + @Nullable Integer maxTokenCount +) implements AmazonBedrockConverseRequestEntity { + + public AmazonBedrockAI21LabsCompletionRequestEntity { + Objects.requireNonNull(messages); + } + + @Override + public ConverseRequest addMessages(ConverseRequest request) { + return request.withMessages(getConverseMessageList(messages)); + } + + @Override + public ConverseRequest addInferenceConfig(ConverseRequest request) { + if (temperature == null && topP == null && maxTokenCount == null) { + return request; + } + + InferenceConfiguration inferenceConfig = new InferenceConfiguration(); + + if (temperature != null) { + inferenceConfig = inferenceConfig.withTemperature(temperature.floatValue()); + } + + if (topP != null) { + inferenceConfig = inferenceConfig.withTopP(topP.floatValue()); + } + + if (maxTokenCount != null) { + inferenceConfig = inferenceConfig.withMaxTokens(maxTokenCount); + } + + return request.withInferenceConfig(inferenceConfig); + } + + @Override + public ConverseRequest addAdditionalModelFields(ConverseRequest request) { + return request; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAnthropicCompletionRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAnthropicCompletionRequestEntity.java new file mode 100644 index 0000000000000..a8b0032af09c5 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAnthropicCompletionRequestEntity.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; + +import com.amazonaws.services.bedrockruntime.model.ConverseRequest; +import com.amazonaws.services.bedrockruntime.model.InferenceConfiguration; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Strings; + +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseUtils.getConverseMessageList; + +public record AmazonBedrockAnthropicCompletionRequestEntity( + List messages, + @Nullable Double temperature, + @Nullable Double topP, + @Nullable Double topK, + @Nullable Integer maxTokenCount +) implements AmazonBedrockConverseRequestEntity { + + public AmazonBedrockAnthropicCompletionRequestEntity { + Objects.requireNonNull(messages); + } + + @Override + public ConverseRequest addMessages(ConverseRequest request) { + return request.withMessages(getConverseMessageList(messages)); + } + + @Override + public ConverseRequest addInferenceConfig(ConverseRequest request) { + if (temperature == null && topP == null && maxTokenCount == null) { + return request; + } + + InferenceConfiguration inferenceConfig = new InferenceConfiguration(); + + if (temperature != null) { + inferenceConfig = inferenceConfig.withTemperature(temperature.floatValue()); + } + + if (topP != null) { + inferenceConfig = inferenceConfig.withTopP(topP.floatValue()); + } + + if (maxTokenCount != null) { + inferenceConfig = inferenceConfig.withMaxTokens(maxTokenCount); + } + + return request.withInferenceConfig(inferenceConfig); + } + + @Override + public ConverseRequest addAdditionalModelFields(ConverseRequest request) { + if (topK == null) { + return request; + } + + String topKField = Strings.format("{\"top_k\":%f}", topK.floatValue()); + return request.withAdditionalModelResponseFieldPaths(topKField); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockChatCompletionEntityFactory.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockChatCompletionEntityFactory.java new file mode 100644 index 0000000000000..f86d2229d42ad --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockChatCompletionEntityFactory.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; + +import org.elasticsearch.xpack.inference.services.amazonbedrock.completion.AmazonBedrockChatCompletionModel; + +import java.util.List; +import java.util.Objects; + +public final class AmazonBedrockChatCompletionEntityFactory { + public static AmazonBedrockConverseRequestEntity createEntity(AmazonBedrockChatCompletionModel model, List messages) { + Objects.requireNonNull(model); + Objects.requireNonNull(messages); + var serviceSettings = model.getServiceSettings(); + var taskSettings = model.getTaskSettings(); + switch (serviceSettings.provider()) { + case AI21LABS -> { + return new AmazonBedrockAI21LabsCompletionRequestEntity( + messages, + taskSettings.temperature(), + taskSettings.topP(), + taskSettings.maxNewTokens() + ); + } + case AMAZONTITAN -> { + return new AmazonBedrockTitanCompletionRequestEntity( + messages, + taskSettings.temperature(), + taskSettings.topP(), + taskSettings.maxNewTokens() + ); + } + case ANTHROPIC -> { + return new AmazonBedrockAnthropicCompletionRequestEntity( + messages, + taskSettings.temperature(), + taskSettings.topP(), + taskSettings.topK(), + taskSettings.maxNewTokens() + ); + } + case COHERE -> { + return new AmazonBedrockCohereCompletionRequestEntity( + messages, + taskSettings.temperature(), + taskSettings.topP(), + taskSettings.topK(), + taskSettings.maxNewTokens() + ); + } + case META -> { + return new AmazonBedrockMetaCompletionRequestEntity( + messages, + taskSettings.temperature(), + taskSettings.topP(), + taskSettings.maxNewTokens() + ); + } + case MISTRAL -> { + return new AmazonBedrockMistralCompletionRequestEntity( + messages, + taskSettings.temperature(), + taskSettings.topP(), + taskSettings.topK(), + taskSettings.maxNewTokens() + ); + } + default -> { + return null; + } + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockChatCompletionRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockChatCompletionRequest.java new file mode 100644 index 0000000000000..f02f05f2d3b17 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockChatCompletionRequest.java @@ -0,0 +1,69 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; + +import com.amazonaws.services.bedrockruntime.model.ConverseRequest; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.core.common.socket.SocketAccess; +import org.elasticsearch.xpack.inference.external.amazonbedrock.AmazonBedrockBaseClient; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.AmazonBedrockRequest; +import org.elasticsearch.xpack.inference.external.response.amazonbedrock.completion.AmazonBedrockChatCompletionResponseListener; +import org.elasticsearch.xpack.inference.services.amazonbedrock.completion.AmazonBedrockChatCompletionModel; + +import java.io.IOException; +import java.util.Objects; + +public class AmazonBedrockChatCompletionRequest extends AmazonBedrockRequest { + public static final String USER_ROLE = "user"; + private final AmazonBedrockConverseRequestEntity requestEntity; + private AmazonBedrockChatCompletionResponseListener listener; + + public AmazonBedrockChatCompletionRequest( + AmazonBedrockChatCompletionModel model, + AmazonBedrockConverseRequestEntity requestEntity, + @Nullable TimeValue timeout + ) { + super(model, timeout); + this.requestEntity = Objects.requireNonNull(requestEntity); + } + + @Override + protected void executeRequest(AmazonBedrockBaseClient client) { + var converseRequest = getConverseRequest(); + + try { + SocketAccess.doPrivileged(() -> client.converse(converseRequest, listener)); + } catch (IOException e) { + listener.onFailure(new RuntimeException(e)); + } + } + + @Override + public TaskType taskType() { + return TaskType.COMPLETION; + } + + private ConverseRequest getConverseRequest() { + var converseRequest = new ConverseRequest().withModelId(amazonBedrockModel.model()); + converseRequest = requestEntity.addMessages(converseRequest); + converseRequest = requestEntity.addInferenceConfig(converseRequest); + converseRequest = requestEntity.addAdditionalModelFields(converseRequest); + return converseRequest; + } + + public void executeChatCompletionRequest( + AmazonBedrockBaseClient awsBedrockClient, + AmazonBedrockChatCompletionResponseListener chatCompletionResponseListener + ) { + this.listener = chatCompletionResponseListener; + this.executeRequest(awsBedrockClient); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockCohereCompletionRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockCohereCompletionRequestEntity.java new file mode 100644 index 0000000000000..17a264ef820ff --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockCohereCompletionRequestEntity.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; + +import com.amazonaws.services.bedrockruntime.model.ConverseRequest; +import com.amazonaws.services.bedrockruntime.model.InferenceConfiguration; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Strings; + +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseUtils.getConverseMessageList; + +public record AmazonBedrockCohereCompletionRequestEntity( + List messages, + @Nullable Double temperature, + @Nullable Double topP, + @Nullable Double topK, + @Nullable Integer maxTokenCount +) implements AmazonBedrockConverseRequestEntity { + + public AmazonBedrockCohereCompletionRequestEntity { + Objects.requireNonNull(messages); + } + + @Override + public ConverseRequest addMessages(ConverseRequest request) { + return request.withMessages(getConverseMessageList(messages)); + } + + @Override + public ConverseRequest addInferenceConfig(ConverseRequest request) { + if (temperature == null && topP == null && maxTokenCount == null) { + return request; + } + + InferenceConfiguration inferenceConfig = new InferenceConfiguration(); + + if (temperature != null) { + inferenceConfig = inferenceConfig.withTemperature(temperature.floatValue()); + } + + if (topP != null) { + inferenceConfig = inferenceConfig.withTopP(topP.floatValue()); + } + + if (maxTokenCount != null) { + inferenceConfig = inferenceConfig.withMaxTokens(maxTokenCount); + } + + return request.withInferenceConfig(inferenceConfig); + } + + @Override + public ConverseRequest addAdditionalModelFields(ConverseRequest request) { + if (topK == null) { + return request; + } + + String topKField = Strings.format("{\"top_k\":%f}", topK.floatValue()); + return request.withAdditionalModelResponseFieldPaths(topKField); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockConverseRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockConverseRequestEntity.java new file mode 100644 index 0000000000000..fbd55e76e509b --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockConverseRequestEntity.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; + +import com.amazonaws.services.bedrockruntime.model.ConverseRequest; + +public interface AmazonBedrockConverseRequestEntity { + ConverseRequest addMessages(ConverseRequest request); + + ConverseRequest addInferenceConfig(ConverseRequest request); + + ConverseRequest addAdditionalModelFields(ConverseRequest request); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockConverseUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockConverseUtils.java new file mode 100644 index 0000000000000..2cfb56a94b319 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockConverseUtils.java @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; + +import com.amazonaws.services.bedrockruntime.model.ContentBlock; +import com.amazonaws.services.bedrockruntime.model.Message; + +import java.util.ArrayList; +import java.util.List; + +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockChatCompletionRequest.USER_ROLE; + +public final class AmazonBedrockConverseUtils { + + public static List getConverseMessageList(List messages) { + List messageList = new ArrayList<>(); + for (String message : messages) { + var messageContent = new ContentBlock().withText(message); + var returnMessage = (new Message()).withRole(USER_ROLE).withContent(messageContent); + messageList.add(returnMessage); + } + return messageList; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMetaCompletionRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMetaCompletionRequestEntity.java new file mode 100644 index 0000000000000..cdabdd4cbebff --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMetaCompletionRequestEntity.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; + +import com.amazonaws.services.bedrockruntime.model.ConverseRequest; +import com.amazonaws.services.bedrockruntime.model.InferenceConfiguration; + +import org.elasticsearch.core.Nullable; + +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseUtils.getConverseMessageList; + +public record AmazonBedrockMetaCompletionRequestEntity( + List messages, + @Nullable Double temperature, + @Nullable Double topP, + @Nullable Integer maxTokenCount +) implements AmazonBedrockConverseRequestEntity { + + public AmazonBedrockMetaCompletionRequestEntity { + Objects.requireNonNull(messages); + } + + @Override + public ConverseRequest addMessages(ConverseRequest request) { + return request.withMessages(getConverseMessageList(messages)); + } + + @Override + public ConverseRequest addInferenceConfig(ConverseRequest request) { + if (temperature == null && topP == null && maxTokenCount == null) { + return request; + } + + InferenceConfiguration inferenceConfig = new InferenceConfiguration(); + + if (temperature != null) { + inferenceConfig = inferenceConfig.withTemperature(temperature.floatValue()); + } + + if (topP != null) { + inferenceConfig = inferenceConfig.withTopP(topP.floatValue()); + } + + if (maxTokenCount != null) { + inferenceConfig = inferenceConfig.withMaxTokens(maxTokenCount); + } + + return request.withInferenceConfig(inferenceConfig); + } + + @Override + public ConverseRequest addAdditionalModelFields(ConverseRequest request) { + return request; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMistralCompletionRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMistralCompletionRequestEntity.java new file mode 100644 index 0000000000000..c68eaa1b81f54 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMistralCompletionRequestEntity.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; + +import com.amazonaws.services.bedrockruntime.model.ConverseRequest; +import com.amazonaws.services.bedrockruntime.model.InferenceConfiguration; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Strings; + +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseUtils.getConverseMessageList; + +public record AmazonBedrockMistralCompletionRequestEntity( + List messages, + @Nullable Double temperature, + @Nullable Double topP, + @Nullable Double topK, + @Nullable Integer maxTokenCount +) implements AmazonBedrockConverseRequestEntity { + + public AmazonBedrockMistralCompletionRequestEntity { + Objects.requireNonNull(messages); + } + + @Override + public ConverseRequest addMessages(ConverseRequest request) { + return request.withMessages(getConverseMessageList(messages)); + } + + @Override + public ConverseRequest addInferenceConfig(ConverseRequest request) { + if (temperature == null && topP == null && maxTokenCount == null) { + return request; + } + + InferenceConfiguration inferenceConfig = new InferenceConfiguration(); + + if (temperature != null) { + inferenceConfig = inferenceConfig.withTemperature(temperature.floatValue()); + } + + if (topP != null) { + inferenceConfig = inferenceConfig.withTopP(topP.floatValue()); + } + + if (maxTokenCount != null) { + inferenceConfig = inferenceConfig.withMaxTokens(maxTokenCount); + } + + return request.withInferenceConfig(inferenceConfig); + } + + @Override + public ConverseRequest addAdditionalModelFields(ConverseRequest request) { + if (topK == null) { + return request; + } + + String topKField = Strings.format("{\"top_k\":%f}", topK.floatValue()); + return request.withAdditionalModelResponseFieldPaths(topKField); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockTitanCompletionRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockTitanCompletionRequestEntity.java new file mode 100644 index 0000000000000..d56035b80e9ef --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockTitanCompletionRequestEntity.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; + +import com.amazonaws.services.bedrockruntime.model.ConverseRequest; +import com.amazonaws.services.bedrockruntime.model.InferenceConfiguration; + +import org.elasticsearch.core.Nullable; + +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseUtils.getConverseMessageList; + +public record AmazonBedrockTitanCompletionRequestEntity( + List messages, + @Nullable Double temperature, + @Nullable Double topP, + @Nullable Integer maxTokenCount +) implements AmazonBedrockConverseRequestEntity { + + public AmazonBedrockTitanCompletionRequestEntity { + Objects.requireNonNull(messages); + } + + @Override + public ConverseRequest addMessages(ConverseRequest request) { + return request.withMessages(getConverseMessageList(messages)); + } + + @Override + public ConverseRequest addInferenceConfig(ConverseRequest request) { + if (temperature == null && topP == null && maxTokenCount == null) { + return request; + } + + InferenceConfiguration inferenceConfig = new InferenceConfiguration(); + + if (temperature != null) { + inferenceConfig = inferenceConfig.withTemperature(temperature.floatValue()); + } + + if (topP != null) { + inferenceConfig = inferenceConfig.withTopP(topP.floatValue()); + } + + if (maxTokenCount != null) { + inferenceConfig = inferenceConfig.withMaxTokens(maxTokenCount); + } + + return request.withInferenceConfig(inferenceConfig); + } + + @Override + public ConverseRequest addAdditionalModelFields(ConverseRequest request) { + return request; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/embeddings/AmazonBedrockCohereEmbeddingsRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/embeddings/AmazonBedrockCohereEmbeddingsRequestEntity.java new file mode 100644 index 0000000000000..edca5bc1bdf9c --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/embeddings/AmazonBedrockCohereEmbeddingsRequestEntity.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock.embeddings; + +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public record AmazonBedrockCohereEmbeddingsRequestEntity(List input) implements ToXContentObject { + + private static final String TEXTS_FIELD = "texts"; + private static final String INPUT_TYPE_FIELD = "input_type"; + private static final String INPUT_TYPE_SEARCH_DOCUMENT = "search_document"; + + public AmazonBedrockCohereEmbeddingsRequestEntity { + Objects.requireNonNull(input); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(TEXTS_FIELD, input); + builder.field(INPUT_TYPE_FIELD, INPUT_TYPE_SEARCH_DOCUMENT); + builder.endObject(); + return builder; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/embeddings/AmazonBedrockEmbeddingsEntityFactory.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/embeddings/AmazonBedrockEmbeddingsEntityFactory.java new file mode 100644 index 0000000000000..a31b033507264 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/embeddings/AmazonBedrockEmbeddingsEntityFactory.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock.embeddings; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xpack.inference.common.Truncator; +import org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings.AmazonBedrockEmbeddingsModel; + +import java.util.Objects; + +public final class AmazonBedrockEmbeddingsEntityFactory { + public static ToXContent createEntity(AmazonBedrockEmbeddingsModel model, Truncator.TruncationResult truncationResult) { + Objects.requireNonNull(model); + Objects.requireNonNull(truncationResult); + + var serviceSettings = model.getServiceSettings(); + + var truncatedInput = truncationResult.input(); + if (truncatedInput == null || truncatedInput.isEmpty()) { + throw new ElasticsearchException("[input] cannot be null or empty"); + } + + switch (serviceSettings.provider()) { + case AMAZONTITAN -> { + if (truncatedInput.size() > 1) { + throw new ElasticsearchException("[input] cannot contain more than one string"); + } + return new AmazonBedrockTitanEmbeddingsRequestEntity(truncatedInput.get(0)); + } + case COHERE -> { + return new AmazonBedrockCohereEmbeddingsRequestEntity(truncatedInput); + } + default -> { + return null; + } + } + + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/embeddings/AmazonBedrockEmbeddingsRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/embeddings/AmazonBedrockEmbeddingsRequest.java new file mode 100644 index 0000000000000..96d3b3a3cc057 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/embeddings/AmazonBedrockEmbeddingsRequest.java @@ -0,0 +1,99 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock.embeddings; + +import com.amazonaws.services.bedrockruntime.model.InvokeModelRequest; +import com.amazonaws.services.bedrockruntime.model.InvokeModelResult; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xpack.core.common.socket.SocketAccess; +import org.elasticsearch.xpack.inference.common.Truncator; +import org.elasticsearch.xpack.inference.external.amazonbedrock.AmazonBedrockBaseClient; +import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.AmazonBedrockJsonBuilder; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.AmazonBedrockRequest; +import org.elasticsearch.xpack.inference.external.response.amazonbedrock.embeddings.AmazonBedrockEmbeddingsResponseListener; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockProvider; +import org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings.AmazonBedrockEmbeddingsModel; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.Objects; + +public class AmazonBedrockEmbeddingsRequest extends AmazonBedrockRequest { + private final AmazonBedrockEmbeddingsModel embeddingsModel; + private final ToXContent requestEntity; + private final Truncator truncator; + private final Truncator.TruncationResult truncationResult; + private final AmazonBedrockProvider provider; + private ActionListener listener = null; + + public AmazonBedrockEmbeddingsRequest( + Truncator truncator, + Truncator.TruncationResult input, + AmazonBedrockEmbeddingsModel model, + ToXContent requestEntity, + @Nullable TimeValue timeout + ) { + super(model, timeout); + this.truncator = Objects.requireNonNull(truncator); + this.truncationResult = Objects.requireNonNull(input); + this.requestEntity = Objects.requireNonNull(requestEntity); + this.embeddingsModel = model; + this.provider = model.provider(); + } + + public AmazonBedrockProvider provider() { + return provider; + } + + @Override + protected void executeRequest(AmazonBedrockBaseClient client) { + try { + var jsonBuilder = new AmazonBedrockJsonBuilder(requestEntity); + var bodyAsString = jsonBuilder.getStringContent(); + + var charset = StandardCharsets.UTF_8; + var bodyBuffer = charset.encode(bodyAsString); + + var invokeModelRequest = new InvokeModelRequest().withModelId(embeddingsModel.model()).withBody(bodyBuffer); + + SocketAccess.doPrivileged(() -> client.invokeModel(invokeModelRequest, listener)); + } catch (IOException e) { + listener.onFailure(new RuntimeException(e)); + } + } + + @Override + public Request truncate() { + var truncatedInput = truncator.truncate(truncationResult.input()); + return new AmazonBedrockEmbeddingsRequest(truncator, truncatedInput, embeddingsModel, requestEntity, timeout); + } + + @Override + public boolean[] getTruncationInfo() { + return truncationResult.truncated().clone(); + } + + @Override + public TaskType taskType() { + return TaskType.TEXT_EMBEDDING; + } + + public void executeEmbeddingsRequest( + AmazonBedrockBaseClient awsBedrockClient, + AmazonBedrockEmbeddingsResponseListener embeddingsResponseListener + ) { + this.listener = embeddingsResponseListener; + this.executeRequest(awsBedrockClient); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/embeddings/AmazonBedrockTitanEmbeddingsRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/embeddings/AmazonBedrockTitanEmbeddingsRequestEntity.java new file mode 100644 index 0000000000000..f55edd0442913 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/embeddings/AmazonBedrockTitanEmbeddingsRequestEntity.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock.embeddings; + +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +public record AmazonBedrockTitanEmbeddingsRequestEntity(String inputText) implements ToXContentObject { + + private static final String INPUT_TEXT_FIELD = "inputText"; + + public AmazonBedrockTitanEmbeddingsRequestEntity { + Objects.requireNonNull(inputText); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(INPUT_TEXT_FIELD, inputText); + builder.endObject(); + return builder; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/AmazonBedrockResponse.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/AmazonBedrockResponse.java new file mode 100644 index 0000000000000..54b05137acda3 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/AmazonBedrockResponse.java @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.amazonbedrock; + +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.AmazonBedrockRequest; + +public abstract class AmazonBedrockResponse { + public abstract InferenceServiceResults accept(AmazonBedrockRequest request); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/AmazonBedrockResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/AmazonBedrockResponseHandler.java new file mode 100644 index 0000000000000..9dc15ea667c1d --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/AmazonBedrockResponseHandler.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.amazonbedrock; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; +import org.elasticsearch.xpack.inference.external.http.retry.RetryException; +import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; + +public abstract class AmazonBedrockResponseHandler implements ResponseHandler { + @Override + public final void validateResponse(ThrottlerManager throttlerManager, Logger logger, Request request, HttpResult result) + throws RetryException { + // do nothing as the AWS SDK will take care of validation for us + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/AmazonBedrockResponseListener.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/AmazonBedrockResponseListener.java new file mode 100644 index 0000000000000..ce4d6d1dea655 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/AmazonBedrockResponseListener.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.amazonbedrock; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.AmazonBedrockRequest; + +import java.util.Objects; + +public class AmazonBedrockResponseListener { + protected final AmazonBedrockRequest request; + protected final ActionListener inferenceResultsListener; + protected final AmazonBedrockResponseHandler responseHandler; + + public AmazonBedrockResponseListener( + AmazonBedrockRequest request, + AmazonBedrockResponseHandler responseHandler, + ActionListener inferenceResultsListener + ) { + this.request = Objects.requireNonNull(request); + this.responseHandler = Objects.requireNonNull(responseHandler); + this.inferenceResultsListener = Objects.requireNonNull(inferenceResultsListener); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/completion/AmazonBedrockChatCompletionResponse.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/completion/AmazonBedrockChatCompletionResponse.java new file mode 100644 index 0000000000000..5b3872e2c416a --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/completion/AmazonBedrockChatCompletionResponse.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.amazonbedrock.completion; + +import com.amazonaws.services.bedrockruntime.model.ConverseResult; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.AmazonBedrockRequest; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockChatCompletionRequest; +import org.elasticsearch.xpack.inference.external.response.amazonbedrock.AmazonBedrockResponse; + +import java.util.ArrayList; + +public class AmazonBedrockChatCompletionResponse extends AmazonBedrockResponse { + + private final ConverseResult result; + + public AmazonBedrockChatCompletionResponse(ConverseResult responseResult) { + this.result = responseResult; + } + + @Override + public InferenceServiceResults accept(AmazonBedrockRequest request) { + if (request instanceof AmazonBedrockChatCompletionRequest asChatCompletionRequest) { + return fromResponse(result); + } + + throw new ElasticsearchException("unexpected request type [" + request.getClass() + "]"); + } + + public static ChatCompletionResults fromResponse(ConverseResult response) { + var responseMessage = response.getOutput().getMessage(); + + var messageContents = responseMessage.getContent(); + var resultTexts = new ArrayList(); + for (var messageContent : messageContents) { + resultTexts.add(new ChatCompletionResults.Result(messageContent.getText())); + } + + return new ChatCompletionResults(resultTexts); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/completion/AmazonBedrockChatCompletionResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/completion/AmazonBedrockChatCompletionResponseHandler.java new file mode 100644 index 0000000000000..a24f54c50eef3 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/completion/AmazonBedrockChatCompletionResponseHandler.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.amazonbedrock.completion; + +import com.amazonaws.services.bedrockruntime.model.ConverseResult; + +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.RetryException; +import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.AmazonBedrockRequest; +import org.elasticsearch.xpack.inference.external.response.amazonbedrock.AmazonBedrockResponseHandler; + +public class AmazonBedrockChatCompletionResponseHandler extends AmazonBedrockResponseHandler { + + private ConverseResult responseResult; + + public AmazonBedrockChatCompletionResponseHandler() {} + + @Override + public InferenceServiceResults parseResult(Request request, HttpResult result) throws RetryException { + var response = new AmazonBedrockChatCompletionResponse(responseResult); + return response.accept((AmazonBedrockRequest) request); + } + + @Override + public String getRequestType() { + return "Amazon Bedrock Chat Completion"; + } + + public void acceptChatCompletionResponseObject(ConverseResult response) { + this.responseResult = response; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/completion/AmazonBedrockChatCompletionResponseListener.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/completion/AmazonBedrockChatCompletionResponseListener.java new file mode 100644 index 0000000000000..be03ba84571eb --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/completion/AmazonBedrockChatCompletionResponseListener.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.amazonbedrock.completion; + +import com.amazonaws.services.bedrockruntime.model.ConverseResult; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockChatCompletionRequest; +import org.elasticsearch.xpack.inference.external.response.amazonbedrock.AmazonBedrockResponseHandler; +import org.elasticsearch.xpack.inference.external.response.amazonbedrock.AmazonBedrockResponseListener; + +public class AmazonBedrockChatCompletionResponseListener extends AmazonBedrockResponseListener implements ActionListener { + + public AmazonBedrockChatCompletionResponseListener( + AmazonBedrockChatCompletionRequest request, + AmazonBedrockResponseHandler responseHandler, + ActionListener inferenceResultsListener + ) { + super(request, responseHandler, inferenceResultsListener); + } + + @Override + public void onResponse(ConverseResult result) { + ((AmazonBedrockChatCompletionResponseHandler) responseHandler).acceptChatCompletionResponseObject(result); + inferenceResultsListener.onResponse(responseHandler.parseResult(request, null)); + } + + @Override + public void onFailure(Exception e) { + throw new ElasticsearchException(e); + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/embeddings/AmazonBedrockEmbeddingsResponse.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/embeddings/AmazonBedrockEmbeddingsResponse.java new file mode 100644 index 0000000000000..83fa790acbe68 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/embeddings/AmazonBedrockEmbeddingsResponse.java @@ -0,0 +1,132 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.amazonbedrock.embeddings; + +import com.amazonaws.services.bedrockruntime.model.InvokeModelResult; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.AmazonBedrockRequest; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.embeddings.AmazonBedrockEmbeddingsRequest; +import org.elasticsearch.xpack.inference.external.response.XContentUtils; +import org.elasticsearch.xpack.inference.external.response.amazonbedrock.AmazonBedrockResponse; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockProvider; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.List; + +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.common.xcontent.XContentParserUtils.parseList; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField; + +public class AmazonBedrockEmbeddingsResponse extends AmazonBedrockResponse { + private static final String FAILED_TO_FIND_FIELD_TEMPLATE = "Failed to find required field [%s] in Amazon Bedrock embeddings response"; + private final InvokeModelResult result; + + public AmazonBedrockEmbeddingsResponse(InvokeModelResult invokeModelResult) { + this.result = invokeModelResult; + } + + @Override + public InferenceServiceResults accept(AmazonBedrockRequest request) { + if (request instanceof AmazonBedrockEmbeddingsRequest asEmbeddingsRequest) { + return fromResponse(result, asEmbeddingsRequest.provider()); + } + + throw new ElasticsearchException("unexpected request type [" + request.getClass() + "]"); + } + + public static InferenceTextEmbeddingFloatResults fromResponse(InvokeModelResult response, AmazonBedrockProvider provider) { + var charset = StandardCharsets.UTF_8; + var bodyText = String.valueOf(charset.decode(response.getBody())); + + var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); + + try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, bodyText)) { + // move to the first token + jsonParser.nextToken(); + + XContentParser.Token token = jsonParser.currentToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + + var embeddingList = parseEmbeddings(jsonParser, provider); + + return new InferenceTextEmbeddingFloatResults(embeddingList); + } catch (IOException e) { + throw new ElasticsearchException(e); + } + } + + private static List parseEmbeddings( + XContentParser jsonParser, + AmazonBedrockProvider provider + ) throws IOException { + switch (provider) { + case AMAZONTITAN -> { + return parseTitanEmbeddings(jsonParser); + } + case COHERE -> { + return parseCohereEmbeddings(jsonParser); + } + default -> throw new IOException("Unsupported provider [" + provider + "]"); + } + } + + private static List parseTitanEmbeddings(XContentParser parser) + throws IOException { + /* + Titan response: + { + "embedding": [float, float, ...], + "inputTextTokenCount": int + } + */ + positionParserAtTokenAfterField(parser, "embedding", FAILED_TO_FIND_FIELD_TEMPLATE); + List embeddingValuesList = parseList(parser, XContentUtils::parseFloat); + var embeddingValues = InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(embeddingValuesList); + return List.of(embeddingValues); + } + + private static List parseCohereEmbeddings(XContentParser parser) + throws IOException { + /* + Cohere response: + { + "embeddings": [ + [< array of 1024 floats >], + ... + ], + "id": string, + "response_type" : "embeddings_floats", + "texts": [string] + } + */ + positionParserAtTokenAfterField(parser, "embeddings", FAILED_TO_FIND_FIELD_TEMPLATE); + + List embeddingList = parseList( + parser, + AmazonBedrockEmbeddingsResponse::parseCohereEmbeddingsListItem + ); + + return embeddingList; + } + + private static InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding parseCohereEmbeddingsListItem(XContentParser parser) + throws IOException { + List embeddingValuesList = parseList(parser, XContentUtils::parseFloat); + return InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding.of(embeddingValuesList); + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/embeddings/AmazonBedrockEmbeddingsResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/embeddings/AmazonBedrockEmbeddingsResponseHandler.java new file mode 100644 index 0000000000000..a3fb68ee23486 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/embeddings/AmazonBedrockEmbeddingsResponseHandler.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.amazonbedrock.embeddings; + +import com.amazonaws.services.bedrockruntime.model.InvokeModelResult; + +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.RetryException; +import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.AmazonBedrockRequest; +import org.elasticsearch.xpack.inference.external.response.amazonbedrock.AmazonBedrockResponseHandler; + +public class AmazonBedrockEmbeddingsResponseHandler extends AmazonBedrockResponseHandler { + + private InvokeModelResult invokeModelResult; + + @Override + public InferenceServiceResults parseResult(Request request, HttpResult result) throws RetryException { + var responseParser = new AmazonBedrockEmbeddingsResponse(invokeModelResult); + return responseParser.accept((AmazonBedrockRequest) request); + } + + @Override + public String getRequestType() { + return "Amazon Bedrock Embeddings"; + } + + public void acceptEmbeddingsResult(InvokeModelResult result) { + this.invokeModelResult = result; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/embeddings/AmazonBedrockEmbeddingsResponseListener.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/embeddings/AmazonBedrockEmbeddingsResponseListener.java new file mode 100644 index 0000000000000..36519ae31ff60 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/amazonbedrock/embeddings/AmazonBedrockEmbeddingsResponseListener.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.amazonbedrock.embeddings; + +import com.amazonaws.services.bedrockruntime.model.InvokeModelResult; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.embeddings.AmazonBedrockEmbeddingsRequest; +import org.elasticsearch.xpack.inference.external.response.amazonbedrock.AmazonBedrockResponseHandler; +import org.elasticsearch.xpack.inference.external.response.amazonbedrock.AmazonBedrockResponseListener; + +public class AmazonBedrockEmbeddingsResponseListener extends AmazonBedrockResponseListener implements ActionListener { + + public AmazonBedrockEmbeddingsResponseListener( + AmazonBedrockEmbeddingsRequest request, + AmazonBedrockResponseHandler responseHandler, + ActionListener inferenceResultsListener + ) { + super(request, responseHandler, inferenceResultsListener); + } + + @Override + public void onResponse(InvokeModelResult result) { + ((AmazonBedrockEmbeddingsResponseHandler) responseHandler).acceptEmbeddingsResult(result); + inferenceResultsListener.onResponse(responseHandler.parseResult(request, null)); + } + + @Override + public void onFailure(Exception e) { + inferenceResultsListener.onFailure(e); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockConstants.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockConstants.java new file mode 100644 index 0000000000000..1755dac2ac13f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockConstants.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.amazonbedrock; + +public class AmazonBedrockConstants { + public static final String ACCESS_KEY_FIELD = "access_key"; + public static final String SECRET_KEY_FIELD = "secret_key"; + public static final String REGION_FIELD = "region"; + public static final String MODEL_FIELD = "model"; + public static final String PROVIDER_FIELD = "provider"; + + public static final String TEMPERATURE_FIELD = "temperature"; + public static final String TOP_P_FIELD = "top_p"; + public static final String TOP_K_FIELD = "top_k"; + public static final String MAX_NEW_TOKENS_FIELD = "max_new_tokens"; + + public static final Double MIN_TEMPERATURE_TOP_P_TOP_K_VALUE = 0.0; + public static final Double MAX_TEMPERATURE_TOP_P_TOP_K_VALUE = 1.0; + + public static final int DEFAULT_MAX_CHUNK_SIZE = 2048; + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockModel.java new file mode 100644 index 0000000000000..13ca8bd7bd749 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockModel.java @@ -0,0 +1,88 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.amazonbedrock; + +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.inference.TaskSettings; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.action.amazonbedrock.AmazonBedrockActionVisitor; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; + +import java.util.Map; + +public abstract class AmazonBedrockModel extends Model { + + protected String region; + protected String model; + protected AmazonBedrockProvider provider; + protected RateLimitSettings rateLimitSettings; + + protected AmazonBedrockModel(ModelConfigurations modelConfigurations, ModelSecrets secrets) { + super(modelConfigurations, secrets); + setPropertiesFromServiceSettings((AmazonBedrockServiceSettings) modelConfigurations.getServiceSettings()); + } + + protected AmazonBedrockModel(Model model, TaskSettings taskSettings) { + super(model, taskSettings); + + if (model instanceof AmazonBedrockModel bedrockModel) { + setPropertiesFromServiceSettings(bedrockModel.getServiceSettings()); + } + } + + protected AmazonBedrockModel(Model model, ServiceSettings serviceSettings) { + super(model, serviceSettings); + if (serviceSettings instanceof AmazonBedrockServiceSettings bedrockServiceSettings) { + setPropertiesFromServiceSettings(bedrockServiceSettings); + } + } + + protected AmazonBedrockModel(ModelConfigurations modelConfigurations) { + super(modelConfigurations); + setPropertiesFromServiceSettings((AmazonBedrockServiceSettings) modelConfigurations.getServiceSettings()); + } + + public String region() { + return region; + } + + public String model() { + return model; + } + + public AmazonBedrockProvider provider() { + return provider; + } + + public RateLimitSettings rateLimitSettings() { + return rateLimitSettings; + } + + private void setPropertiesFromServiceSettings(AmazonBedrockServiceSettings serviceSettings) { + this.region = serviceSettings.region(); + this.model = serviceSettings.model(); + this.provider = serviceSettings.provider(); + this.rateLimitSettings = serviceSettings.rateLimitSettings(); + } + + public abstract ExecutableAction accept(AmazonBedrockActionVisitor creator, Map taskSettings); + + @Override + public AmazonBedrockServiceSettings getServiceSettings() { + return (AmazonBedrockServiceSettings) super.getServiceSettings(); + } + + @Override + public AmazonBedrockSecretSettings getSecretSettings() { + return (AmazonBedrockSecretSettings) super.getSecretSettings(); + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockProvider.java new file mode 100644 index 0000000000000..340a5a65f0969 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockProvider.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.amazonbedrock; + +import java.util.Locale; + +public enum AmazonBedrockProvider { + AMAZONTITAN, + ANTHROPIC, + AI21LABS, + COHERE, + META, + MISTRAL; + + public static String NAME = "amazon_bedrock_provider"; + + public static AmazonBedrockProvider fromString(String name) { + return valueOf(name.trim().toUpperCase(Locale.ROOT)); + } + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockProviderCapabilities.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockProviderCapabilities.java new file mode 100644 index 0000000000000..28b10ef294bda --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockProviderCapabilities.java @@ -0,0 +1,102 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.amazonbedrock; + +import org.elasticsearch.inference.SimilarityMeasure; +import org.elasticsearch.inference.TaskType; + +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.DEFAULT_MAX_CHUNK_SIZE; + +public final class AmazonBedrockProviderCapabilities { + private static final List embeddingProviders = List.of( + AmazonBedrockProvider.AMAZONTITAN, + AmazonBedrockProvider.COHERE + ); + + private static final List chatCompletionProviders = List.of( + AmazonBedrockProvider.AMAZONTITAN, + AmazonBedrockProvider.ANTHROPIC, + AmazonBedrockProvider.AI21LABS, + AmazonBedrockProvider.COHERE, + AmazonBedrockProvider.META, + AmazonBedrockProvider.MISTRAL + ); + + private static final List chatCompletionProvidersWithTopK = List.of( + AmazonBedrockProvider.ANTHROPIC, + AmazonBedrockProvider.COHERE, + AmazonBedrockProvider.MISTRAL + ); + + private static final Map embeddingsDefaultSimilarityMeasure = Map.of( + AmazonBedrockProvider.AMAZONTITAN, + SimilarityMeasure.COSINE, + AmazonBedrockProvider.COHERE, + SimilarityMeasure.DOT_PRODUCT + ); + + private static final Map embeddingsDefaultChunkSize = Map.of( + AmazonBedrockProvider.AMAZONTITAN, + 8192, + AmazonBedrockProvider.COHERE, + 2048 + ); + + private static final Map embeddingsMaxBatchSize = Map.of( + AmazonBedrockProvider.AMAZONTITAN, + 1, + AmazonBedrockProvider.COHERE, + 96 + ); + + public static boolean providerAllowsTaskType(AmazonBedrockProvider provider, TaskType taskType) { + switch (taskType) { + case COMPLETION -> { + return chatCompletionProviders.contains(provider); + } + case TEXT_EMBEDDING -> { + return embeddingProviders.contains(provider); + } + default -> { + return false; + } + } + } + + public static boolean chatCompletionProviderHasTopKParameter(AmazonBedrockProvider provider) { + return chatCompletionProvidersWithTopK.contains(provider); + } + + public static SimilarityMeasure getProviderDefaultSimilarityMeasure(AmazonBedrockProvider provider) { + if (embeddingsDefaultSimilarityMeasure.containsKey(provider)) { + return embeddingsDefaultSimilarityMeasure.get(provider); + } + + return SimilarityMeasure.COSINE; + } + + public static int getEmbeddingsProviderDefaultChunkSize(AmazonBedrockProvider provider) { + if (embeddingsDefaultChunkSize.containsKey(provider)) { + return embeddingsDefaultChunkSize.get(provider); + } + + return DEFAULT_MAX_CHUNK_SIZE; + } + + public static int getEmbeddingsMaxBatchSize(AmazonBedrockProvider provider) { + if (embeddingsMaxBatchSize.containsKey(provider)) { + return embeddingsMaxBatchSize.get(provider); + } + + return 1; + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockSecretSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockSecretSettings.java new file mode 100644 index 0000000000000..9e6328ce1c358 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockSecretSettings.java @@ -0,0 +1,110 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.amazonbedrock; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.SecretSettings; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.TransportVersions.ML_INFERENCE_AMAZON_BEDROCK_ADDED; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredSecureString; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.ACCESS_KEY_FIELD; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.SECRET_KEY_FIELD; + +public class AmazonBedrockSecretSettings implements SecretSettings { + public static final String NAME = "amazon_bedrock_secret_settings"; + + public final SecureString accessKey; + public final SecureString secretKey; + + public static AmazonBedrockSecretSettings fromMap(@Nullable Map map) { + if (map == null) { + return null; + } + + ValidationException validationException = new ValidationException(); + SecureString secureAccessKey = extractRequiredSecureString( + map, + ACCESS_KEY_FIELD, + ModelSecrets.SECRET_SETTINGS, + validationException + ); + SecureString secureSecretKey = extractRequiredSecureString( + map, + SECRET_KEY_FIELD, + ModelSecrets.SECRET_SETTINGS, + validationException + ); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new AmazonBedrockSecretSettings(secureAccessKey, secureSecretKey); + } + + public AmazonBedrockSecretSettings(SecureString accessKey, SecureString secretKey) { + this.accessKey = Objects.requireNonNull(accessKey); + this.secretKey = Objects.requireNonNull(secretKey); + } + + public AmazonBedrockSecretSettings(StreamInput in) throws IOException { + this.accessKey = in.readSecureString(); + this.secretKey = in.readSecureString(); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return ML_INFERENCE_AMAZON_BEDROCK_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeSecureString(accessKey); + out.writeSecureString(secretKey); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + builder.field(ACCESS_KEY_FIELD, accessKey.toString()); + builder.field(SECRET_KEY_FIELD, secretKey.toString()); + + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object object) { + if (this == object) return true; + if (object == null || getClass() != object.getClass()) return false; + AmazonBedrockSecretSettings that = (AmazonBedrockSecretSettings) object; + return Objects.equals(accessKey, that.accessKey) && Objects.equals(secretKey, that.secretKey); + } + + @Override + public int hashCode() { + return Objects.hash(accessKey, secretKey); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java new file mode 100644 index 0000000000000..dadcc8a40245e --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java @@ -0,0 +1,350 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.amazonbedrock; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.Strings; +import org.elasticsearch.core.IOUtils; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.ChunkedInferenceServiceResults; +import org.elasticsearch.inference.ChunkingOptions; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; +import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; +import org.elasticsearch.xpack.inference.external.action.amazonbedrock.AmazonBedrockActionCreator; +import org.elasticsearch.xpack.inference.external.amazonbedrock.AmazonBedrockRequestSender; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.SenderService; +import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.xpack.inference.services.amazonbedrock.completion.AmazonBedrockChatCompletionModel; +import org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings.AmazonBedrockEmbeddingsModel; +import org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings.AmazonBedrockEmbeddingsServiceSettings; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.TransportVersions.ML_INFERENCE_AMAZON_BEDROCK_ADDED; +import static org.elasticsearch.xpack.core.inference.results.ResultUtils.createInvalidChunkedResultException; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrDefaultEmpty; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.TOP_K_FIELD; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockProviderCapabilities.chatCompletionProviderHasTopKParameter; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockProviderCapabilities.getEmbeddingsMaxBatchSize; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockProviderCapabilities.getProviderDefaultSimilarityMeasure; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockProviderCapabilities.providerAllowsTaskType; + +public class AmazonBedrockService extends SenderService { + public static final String NAME = "amazonbedrock"; + + private final Sender amazonBedrockSender; + + public AmazonBedrockService( + HttpRequestSender.Factory httpSenderFactory, + AmazonBedrockRequestSender.Factory amazonBedrockFactory, + ServiceComponents serviceComponents + ) { + super(httpSenderFactory, serviceComponents); + this.amazonBedrockSender = amazonBedrockFactory.createSender(); + } + + @Override + protected void doInfer( + Model model, + List input, + Map taskSettings, + InputType inputType, + TimeValue timeout, + ActionListener listener + ) { + var actionCreator = new AmazonBedrockActionCreator(amazonBedrockSender, this.getServiceComponents(), timeout); + if (model instanceof AmazonBedrockModel baseAmazonBedrockModel) { + var action = baseAmazonBedrockModel.accept(actionCreator, taskSettings); + action.execute(new DocumentsOnlyInput(input), timeout, listener); + } else { + listener.onFailure(createInvalidModelException(model)); + } + } + + @Override + protected void doInfer( + Model model, + String query, + List input, + Map taskSettings, + InputType inputType, + TimeValue timeout, + ActionListener listener + ) { + throw new UnsupportedOperationException("Amazon Bedrock service does not support inference with query input"); + } + + @Override + protected void doChunkedInfer( + Model model, + String query, + List input, + Map taskSettings, + InputType inputType, + ChunkingOptions chunkingOptions, + TimeValue timeout, + ActionListener> listener + ) { + ActionListener inferListener = listener.delegateFailureAndWrap( + (delegate, response) -> delegate.onResponse(translateToChunkedResults(input, response)) + ); + + var actionCreator = new AmazonBedrockActionCreator(amazonBedrockSender, this.getServiceComponents(), timeout); + if (model instanceof AmazonBedrockModel baseAmazonBedrockModel) { + var maxBatchSize = getEmbeddingsMaxBatchSize(baseAmazonBedrockModel.provider()); + var batchedRequests = new EmbeddingRequestChunker(input, maxBatchSize, EmbeddingRequestChunker.EmbeddingType.FLOAT) + .batchRequestsWithListeners(listener); + for (var request : batchedRequests) { + var action = baseAmazonBedrockModel.accept(actionCreator, taskSettings); + action.execute(new DocumentsOnlyInput(request.batch().inputs()), timeout, inferListener); + } + } else { + listener.onFailure(createInvalidModelException(model)); + } + } + + private static List translateToChunkedResults( + List inputs, + InferenceServiceResults inferenceResults + ) { + if (inferenceResults instanceof InferenceTextEmbeddingFloatResults textEmbeddingResults) { + return InferenceChunkedTextEmbeddingFloatResults.listOf(inputs, textEmbeddingResults); + } else if (inferenceResults instanceof ErrorInferenceResults error) { + return List.of(new ErrorChunkedInferenceResults(error.getException())); + } else { + throw createInvalidChunkedResultException(InferenceTextEmbeddingFloatResults.NAME, inferenceResults.getWriteableName()); + } + } + + @Override + public String name() { + return NAME; + } + + @Override + public void parseRequestConfig( + String modelId, + TaskType taskType, + Map config, + Set platformArchitectures, + ActionListener parsedModelListener + ) { + try { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); + + AmazonBedrockModel model = createModel( + modelId, + taskType, + serviceSettingsMap, + taskSettingsMap, + serviceSettingsMap, + TaskType.unsupportedTaskTypeErrorMsg(taskType, NAME), + ConfigurationParseContext.REQUEST + ); + + throwIfNotEmptyMap(config, NAME); + throwIfNotEmptyMap(serviceSettingsMap, NAME); + throwIfNotEmptyMap(taskSettingsMap, NAME); + + parsedModelListener.onResponse(model); + } catch (Exception e) { + parsedModelListener.onFailure(e); + } + } + + @Override + public Model parsePersistedConfigWithSecrets( + String modelId, + TaskType taskType, + Map config, + Map secrets + ) { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + Map taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); + Map secretSettingsMap = removeFromMapOrDefaultEmpty(secrets, ModelSecrets.SECRET_SETTINGS); + + return createModel( + modelId, + taskType, + serviceSettingsMap, + taskSettingsMap, + secretSettingsMap, + parsePersistedConfigErrorMsg(modelId, NAME), + ConfigurationParseContext.PERSISTENT + ); + } + + @Override + public Model parsePersistedConfig(String modelId, TaskType taskType, Map config) { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); + + return createModel( + modelId, + taskType, + serviceSettingsMap, + taskSettingsMap, + null, + parsePersistedConfigErrorMsg(modelId, NAME), + ConfigurationParseContext.PERSISTENT + ); + } + + private static AmazonBedrockModel createModel( + String inferenceEntityId, + TaskType taskType, + Map serviceSettings, + Map taskSettings, + @Nullable Map secretSettings, + String failureMessage, + ConfigurationParseContext context + ) { + switch (taskType) { + case TEXT_EMBEDDING -> { + var model = new AmazonBedrockEmbeddingsModel( + inferenceEntityId, + taskType, + NAME, + serviceSettings, + taskSettings, + secretSettings, + context + ); + checkProviderForTask(TaskType.TEXT_EMBEDDING, model.provider()); + return model; + } + case COMPLETION -> { + var model = new AmazonBedrockChatCompletionModel( + inferenceEntityId, + taskType, + NAME, + serviceSettings, + taskSettings, + secretSettings, + context + ); + checkProviderForTask(TaskType.COMPLETION, model.provider()); + checkChatCompletionProviderForTopKParameter(model); + return model; + } + default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); + } + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return ML_INFERENCE_AMAZON_BEDROCK_ADDED; + } + + /** + * For text embedding models get the embedding size and + * update the service settings. + * + * @param model The new model + * @param listener The listener + */ + @Override + public void checkModelConfig(Model model, ActionListener listener) { + if (model instanceof AmazonBedrockEmbeddingsModel embeddingsModel) { + ServiceUtils.getEmbeddingSize( + model, + this, + listener.delegateFailureAndWrap((l, size) -> l.onResponse(updateModelWithEmbeddingDetails(embeddingsModel, size))) + ); + } else { + listener.onResponse(model); + } + } + + private AmazonBedrockEmbeddingsModel updateModelWithEmbeddingDetails(AmazonBedrockEmbeddingsModel model, int embeddingSize) { + AmazonBedrockEmbeddingsServiceSettings serviceSettings = model.getServiceSettings(); + if (serviceSettings.dimensionsSetByUser() + && serviceSettings.dimensions() != null + && serviceSettings.dimensions() != embeddingSize) { + throw new ElasticsearchStatusException( + Strings.format( + "The retrieved embeddings size [%s] does not match the size specified in the settings [%s]. " + + "Please recreate the [%s] configuration with the correct dimensions", + embeddingSize, + serviceSettings.dimensions(), + model.getConfigurations().getInferenceEntityId() + ), + RestStatus.BAD_REQUEST + ); + } + + var similarityFromModel = serviceSettings.similarity(); + var similarityToUse = similarityFromModel == null ? getProviderDefaultSimilarityMeasure(model.provider()) : similarityFromModel; + + AmazonBedrockEmbeddingsServiceSettings settingsToUse = new AmazonBedrockEmbeddingsServiceSettings( + serviceSettings.region(), + serviceSettings.model(), + serviceSettings.provider(), + embeddingSize, + serviceSettings.dimensionsSetByUser(), + serviceSettings.maxInputTokens(), + similarityToUse, + serviceSettings.rateLimitSettings() + ); + + return new AmazonBedrockEmbeddingsModel(model, settingsToUse); + } + + private static void checkProviderForTask(TaskType taskType, AmazonBedrockProvider provider) { + if (providerAllowsTaskType(provider, taskType) == false) { + throw new ElasticsearchStatusException( + Strings.format("The [%s] task type for provider [%s] is not available", taskType, provider), + RestStatus.BAD_REQUEST + ); + } + } + + private static void checkChatCompletionProviderForTopKParameter(AmazonBedrockChatCompletionModel model) { + var taskSettings = model.getTaskSettings(); + if (taskSettings.topK() != null) { + if (chatCompletionProviderHasTopKParameter(model.provider()) == false) { + throw new ElasticsearchStatusException( + Strings.format("The [%s] task parameter is not available for provider [%s]", TOP_K_FIELD, model.provider()), + RestStatus.BAD_REQUEST + ); + } + } + } + + @Override + public void close() throws IOException { + super.close(); + IOUtils.closeWhileHandlingException(amazonBedrockSender); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceSettings.java new file mode 100644 index 0000000000000..13c7c0a8c5938 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceSettings.java @@ -0,0 +1,141 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.amazonbedrock; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; + +import java.io.IOException; +import java.util.EnumSet; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.TransportVersions.ML_INFERENCE_AMAZON_BEDROCK_ADDED; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredEnum; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.MODEL_FIELD; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.PROVIDER_FIELD; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.REGION_FIELD; + +public abstract class AmazonBedrockServiceSettings extends FilteredXContentObject implements ServiceSettings { + + protected static final String AMAZON_BEDROCK_BASE_NAME = "amazon_bedrock"; + + protected final String region; + protected final String model; + protected final AmazonBedrockProvider provider; + protected final RateLimitSettings rateLimitSettings; + + // the default requests per minute are defined as per-model in the "Runtime quotas" on AWS + // see: https://docs.aws.amazon.com/bedrock/latest/userguide/quotas.html + // setting this to 240 requests per minute (4 requests / sec) is a sane default for us as it should be enough for + // decent throughput without exceeding the minimal for _most_ items. The user should consult + // the table above if using a model that might have a lesser limit (e.g. Anthropic Claude 3.5) + protected static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(240); + + protected static AmazonBedrockServiceSettings.BaseAmazonBedrockCommonSettings fromMap( + Map map, + ValidationException validationException, + ConfigurationParseContext context + ) { + String model = extractRequiredString(map, MODEL_FIELD, ModelConfigurations.SERVICE_SETTINGS, validationException); + String region = extractRequiredString(map, REGION_FIELD, ModelConfigurations.SERVICE_SETTINGS, validationException); + AmazonBedrockProvider provider = extractRequiredEnum( + map, + PROVIDER_FIELD, + ModelConfigurations.SERVICE_SETTINGS, + AmazonBedrockProvider::fromString, + EnumSet.allOf(AmazonBedrockProvider.class), + validationException + ); + RateLimitSettings rateLimitSettings = RateLimitSettings.of( + map, + DEFAULT_RATE_LIMIT_SETTINGS, + validationException, + AMAZON_BEDROCK_BASE_NAME, + context + ); + + return new BaseAmazonBedrockCommonSettings(region, model, provider, rateLimitSettings); + } + + protected record BaseAmazonBedrockCommonSettings( + String region, + String model, + AmazonBedrockProvider provider, + @Nullable RateLimitSettings rateLimitSettings + ) {} + + protected AmazonBedrockServiceSettings(StreamInput in) throws IOException { + this.region = in.readString(); + this.model = in.readString(); + this.provider = in.readEnum(AmazonBedrockProvider.class); + this.rateLimitSettings = new RateLimitSettings(in); + } + + protected AmazonBedrockServiceSettings( + String region, + String model, + AmazonBedrockProvider provider, + @Nullable RateLimitSettings rateLimitSettings + ) { + this.region = Objects.requireNonNull(region); + this.model = Objects.requireNonNull(model); + this.provider = Objects.requireNonNull(provider); + this.rateLimitSettings = Objects.requireNonNullElse(rateLimitSettings, DEFAULT_RATE_LIMIT_SETTINGS); + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return ML_INFERENCE_AMAZON_BEDROCK_ADDED; + } + + public String region() { + return region; + } + + public String model() { + return model; + } + + public AmazonBedrockProvider provider() { + return provider; + } + + public RateLimitSettings rateLimitSettings() { + return rateLimitSettings; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(region); + out.writeString(model); + out.writeEnum(provider); + rateLimitSettings.writeTo(out); + } + + public void addBaseXContent(XContentBuilder builder, Params params) throws IOException { + toXContentFragmentOfExposedFields(builder, params); + } + + protected void addXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { + builder.field(REGION_FIELD, region); + builder.field(MODEL_FIELD, model); + builder.field(PROVIDER_FIELD, provider.name()); + rateLimitSettings.toXContent(builder, params); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionModel.java new file mode 100644 index 0000000000000..27dc607d671aa --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionModel.java @@ -0,0 +1,83 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.amazonbedrock.completion; + +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.TaskSettings; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.action.amazonbedrock.AmazonBedrockActionVisitor; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockModel; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockSecretSettings; + +import java.util.Map; + +public class AmazonBedrockChatCompletionModel extends AmazonBedrockModel { + + public static AmazonBedrockChatCompletionModel of(AmazonBedrockChatCompletionModel completionModel, Map taskSettings) { + if (taskSettings == null || taskSettings.isEmpty()) { + return completionModel; + } + + var requestTaskSettings = AmazonBedrockChatCompletionRequestTaskSettings.fromMap(taskSettings); + var taskSettingsToUse = AmazonBedrockChatCompletionTaskSettings.of(completionModel.getTaskSettings(), requestTaskSettings); + return new AmazonBedrockChatCompletionModel(completionModel, taskSettingsToUse); + } + + public AmazonBedrockChatCompletionModel( + String inferenceEntityId, + TaskType taskType, + String name, + Map serviceSettings, + Map taskSettings, + Map secretSettings, + ConfigurationParseContext context + ) { + this( + inferenceEntityId, + taskType, + name, + AmazonBedrockChatCompletionServiceSettings.fromMap(serviceSettings, context), + AmazonBedrockChatCompletionTaskSettings.fromMap(taskSettings), + AmazonBedrockSecretSettings.fromMap(secretSettings) + ); + } + + public AmazonBedrockChatCompletionModel( + String inferenceEntityId, + TaskType taskType, + String service, + AmazonBedrockChatCompletionServiceSettings serviceSettings, + AmazonBedrockChatCompletionTaskSettings taskSettings, + AmazonBedrockSecretSettings secrets + ) { + super(new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings), new ModelSecrets(secrets)); + } + + public AmazonBedrockChatCompletionModel(Model model, TaskSettings taskSettings) { + super(model, taskSettings); + } + + @Override + public ExecutableAction accept(AmazonBedrockActionVisitor creator, Map taskSettings) { + return creator.create(this, taskSettings); + } + + @Override + public AmazonBedrockChatCompletionServiceSettings getServiceSettings() { + return (AmazonBedrockChatCompletionServiceSettings) super.getServiceSettings(); + } + + @Override + public AmazonBedrockChatCompletionTaskSettings getTaskSettings() { + return (AmazonBedrockChatCompletionTaskSettings) super.getTaskSettings(); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionRequestTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionRequestTaskSettings.java new file mode 100644 index 0000000000000..5985dcd56c5d2 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionRequestTaskSettings.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.amazonbedrock.completion; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; + +import java.util.Map; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalDoubleInRange; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalPositiveInteger; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.MAX_NEW_TOKENS_FIELD; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.MAX_TEMPERATURE_TOP_P_TOP_K_VALUE; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.MIN_TEMPERATURE_TOP_P_TOP_K_VALUE; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.TEMPERATURE_FIELD; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.TOP_K_FIELD; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.TOP_P_FIELD; + +public record AmazonBedrockChatCompletionRequestTaskSettings( + @Nullable Double temperature, + @Nullable Double topP, + @Nullable Double topK, + @Nullable Integer maxNewTokens +) { + + public static final AmazonBedrockChatCompletionRequestTaskSettings EMPTY_SETTINGS = new AmazonBedrockChatCompletionRequestTaskSettings( + null, + null, + null, + null + ); + + /** + * Extracts the task settings from a map. All settings are considered optional and the absence of a setting + * does not throw an error. + * + * @param map the settings received from a request + * @return a {@link AmazonBedrockChatCompletionRequestTaskSettings} + */ + public static AmazonBedrockChatCompletionRequestTaskSettings fromMap(Map map) { + if (map.isEmpty()) { + return AmazonBedrockChatCompletionRequestTaskSettings.EMPTY_SETTINGS; + } + + ValidationException validationException = new ValidationException(); + + var temperature = extractOptionalDoubleInRange( + map, + TEMPERATURE_FIELD, + MIN_TEMPERATURE_TOP_P_TOP_K_VALUE, + MAX_TEMPERATURE_TOP_P_TOP_K_VALUE, + ModelConfigurations.TASK_SETTINGS, + validationException + ); + var topP = extractOptionalDoubleInRange( + map, + TOP_P_FIELD, + MIN_TEMPERATURE_TOP_P_TOP_K_VALUE, + MAX_TEMPERATURE_TOP_P_TOP_K_VALUE, + ModelConfigurations.TASK_SETTINGS, + validationException + ); + var topK = extractOptionalDoubleInRange( + map, + TOP_K_FIELD, + MIN_TEMPERATURE_TOP_P_TOP_K_VALUE, + MAX_TEMPERATURE_TOP_P_TOP_K_VALUE, + ModelConfigurations.TASK_SETTINGS, + validationException + ); + Integer maxNewTokens = extractOptionalPositiveInteger( + map, + MAX_NEW_TOKENS_FIELD, + ModelConfigurations.TASK_SETTINGS, + validationException + ); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new AmazonBedrockChatCompletionRequestTaskSettings(temperature, topP, topK, maxNewTokens); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionServiceSettings.java new file mode 100644 index 0000000000000..fc3d09c6eea7a --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionServiceSettings.java @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.amazonbedrock.completion; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockProvider; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +public class AmazonBedrockChatCompletionServiceSettings extends AmazonBedrockServiceSettings { + public static final String NAME = "amazon_bedrock_chat_completion_service_settings"; + + public static AmazonBedrockChatCompletionServiceSettings fromMap( + Map serviceSettings, + ConfigurationParseContext context + ) { + ValidationException validationException = new ValidationException(); + + var baseSettings = AmazonBedrockServiceSettings.fromMap(serviceSettings, validationException, context); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new AmazonBedrockChatCompletionServiceSettings( + baseSettings.region(), + baseSettings.model(), + baseSettings.provider(), + baseSettings.rateLimitSettings() + ); + } + + public AmazonBedrockChatCompletionServiceSettings( + String region, + String model, + AmazonBedrockProvider provider, + RateLimitSettings rateLimitSettings + ) { + super(region, model, provider, rateLimitSettings); + } + + public AmazonBedrockChatCompletionServiceSettings(StreamInput in) throws IOException { + super(in); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + super.addBaseXContent(builder, params); + builder.endObject(); + return builder; + } + + @Override + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { + super.addXContentFragmentOfExposedFields(builder, params); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AmazonBedrockChatCompletionServiceSettings that = (AmazonBedrockChatCompletionServiceSettings) o; + + return Objects.equals(region, that.region) + && Objects.equals(provider, that.provider) + && Objects.equals(model, that.model) + && Objects.equals(rateLimitSettings, that.rateLimitSettings); + } + + @Override + public int hashCode() { + return Objects.hash(region, model, provider, rateLimitSettings); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionTaskSettings.java new file mode 100644 index 0000000000000..e689e68794e1f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionTaskSettings.java @@ -0,0 +1,190 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.amazonbedrock.completion; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskSettings; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.TransportVersions.ML_INFERENCE_AMAZON_BEDROCK_ADDED; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalDoubleInRange; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalPositiveInteger; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.MAX_NEW_TOKENS_FIELD; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.MAX_TEMPERATURE_TOP_P_TOP_K_VALUE; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.MIN_TEMPERATURE_TOP_P_TOP_K_VALUE; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.TEMPERATURE_FIELD; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.TOP_K_FIELD; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.TOP_P_FIELD; + +public class AmazonBedrockChatCompletionTaskSettings implements TaskSettings { + public static final String NAME = "amazon_bedrock_chat_completion_task_settings"; + + public static final AmazonBedrockChatCompletionRequestTaskSettings EMPTY_SETTINGS = new AmazonBedrockChatCompletionRequestTaskSettings( + null, + null, + null, + null + ); + + public static AmazonBedrockChatCompletionTaskSettings fromMap(Map settings) { + ValidationException validationException = new ValidationException(); + + Double temperature = extractOptionalDoubleInRange( + settings, + TEMPERATURE_FIELD, + MIN_TEMPERATURE_TOP_P_TOP_K_VALUE, + MAX_TEMPERATURE_TOP_P_TOP_K_VALUE, + ModelConfigurations.TASK_SETTINGS, + validationException + ); + Double topP = extractOptionalDoubleInRange( + settings, + TOP_P_FIELD, + MIN_TEMPERATURE_TOP_P_TOP_K_VALUE, + MAX_TEMPERATURE_TOP_P_TOP_K_VALUE, + ModelConfigurations.TASK_SETTINGS, + validationException + ); + Double topK = extractOptionalDoubleInRange( + settings, + TOP_K_FIELD, + MIN_TEMPERATURE_TOP_P_TOP_K_VALUE, + MAX_TEMPERATURE_TOP_P_TOP_K_VALUE, + ModelConfigurations.TASK_SETTINGS, + validationException + ); + Integer maxNewTokens = extractOptionalPositiveInteger( + settings, + MAX_NEW_TOKENS_FIELD, + ModelConfigurations.TASK_SETTINGS, + validationException + ); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new AmazonBedrockChatCompletionTaskSettings(temperature, topP, topK, maxNewTokens); + } + + public static AmazonBedrockChatCompletionTaskSettings of( + AmazonBedrockChatCompletionTaskSettings originalSettings, + AmazonBedrockChatCompletionRequestTaskSettings requestSettings + ) { + var temperature = requestSettings.temperature() == null ? originalSettings.temperature() : requestSettings.temperature(); + var topP = requestSettings.topP() == null ? originalSettings.topP() : requestSettings.topP(); + var topK = requestSettings.topK() == null ? originalSettings.topK() : requestSettings.topK(); + var maxNewTokens = requestSettings.maxNewTokens() == null ? originalSettings.maxNewTokens() : requestSettings.maxNewTokens(); + + return new AmazonBedrockChatCompletionTaskSettings(temperature, topP, topK, maxNewTokens); + } + + private final Double temperature; + private final Double topP; + private final Double topK; + private final Integer maxNewTokens; + + public AmazonBedrockChatCompletionTaskSettings( + @Nullable Double temperature, + @Nullable Double topP, + @Nullable Double topK, + @Nullable Integer maxNewTokens + ) { + this.temperature = temperature; + this.topP = topP; + this.topK = topK; + this.maxNewTokens = maxNewTokens; + } + + public AmazonBedrockChatCompletionTaskSettings(StreamInput in) throws IOException { + this.temperature = in.readOptionalDouble(); + this.topP = in.readOptionalDouble(); + this.topK = in.readOptionalDouble(); + this.maxNewTokens = in.readOptionalVInt(); + } + + public Double temperature() { + return temperature; + } + + public Double topP() { + return topP; + } + + public Double topK() { + return topK; + } + + public Integer maxNewTokens() { + return maxNewTokens; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return ML_INFERENCE_AMAZON_BEDROCK_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalDouble(temperature); + out.writeOptionalDouble(topP); + out.writeOptionalDouble(topK); + out.writeOptionalVInt(maxNewTokens); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + if (temperature != null) { + builder.field(TEMPERATURE_FIELD, temperature); + } + if (topP != null) { + builder.field(TOP_P_FIELD, topP); + } + if (topK != null) { + builder.field(TOP_K_FIELD, topK); + } + if (maxNewTokens != null) { + builder.field(MAX_NEW_TOKENS_FIELD, maxNewTokens); + } + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AmazonBedrockChatCompletionTaskSettings that = (AmazonBedrockChatCompletionTaskSettings) o; + return Objects.equals(temperature, that.temperature) + && Objects.equals(topP, that.topP) + && Objects.equals(topK, that.topK) + && Objects.equals(maxNewTokens, that.maxNewTokens); + } + + @Override + public int hashCode() { + return Objects.hash(temperature, topP, topK, maxNewTokens); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsModel.java new file mode 100644 index 0000000000000..0e3a954a03279 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsModel.java @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.inference.EmptyTaskSettings; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.inference.TaskSettings; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.action.amazonbedrock.AmazonBedrockActionVisitor; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockModel; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockSecretSettings; + +import java.util.Map; + +public class AmazonBedrockEmbeddingsModel extends AmazonBedrockModel { + + public static AmazonBedrockEmbeddingsModel of(AmazonBedrockEmbeddingsModel embeddingsModel, Map taskSettings) { + if (taskSettings != null && taskSettings.isEmpty() == false) { + // no task settings allowed + var validationException = new ValidationException(); + validationException.addValidationError("Amazon Bedrock embeddings model cannot have task settings"); + throw validationException; + } + + return embeddingsModel; + } + + public AmazonBedrockEmbeddingsModel( + String inferenceEntityId, + TaskType taskType, + String service, + Map serviceSettings, + Map taskSettings, + Map secretSettings, + ConfigurationParseContext context + ) { + this( + inferenceEntityId, + taskType, + service, + AmazonBedrockEmbeddingsServiceSettings.fromMap(serviceSettings, context), + new EmptyTaskSettings(), + AmazonBedrockSecretSettings.fromMap(secretSettings) + ); + } + + public AmazonBedrockEmbeddingsModel( + String inferenceEntityId, + TaskType taskType, + String service, + AmazonBedrockEmbeddingsServiceSettings serviceSettings, + TaskSettings taskSettings, + AmazonBedrockSecretSettings secrets + ) { + super( + new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, new EmptyTaskSettings()), + new ModelSecrets(secrets) + ); + } + + public AmazonBedrockEmbeddingsModel(Model model, ServiceSettings serviceSettings) { + super(model, serviceSettings); + } + + @Override + public ExecutableAction accept(AmazonBedrockActionVisitor creator, Map taskSettings) { + return creator.create(this, taskSettings); + } + + @Override + public AmazonBedrockEmbeddingsServiceSettings getServiceSettings() { + return (AmazonBedrockEmbeddingsServiceSettings) super.getServiceSettings(); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsServiceSettings.java new file mode 100644 index 0000000000000..4bf037558c618 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsServiceSettings.java @@ -0,0 +1,220 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.SimilarityMeasure; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockProvider; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceFields.DIMENSIONS; +import static org.elasticsearch.xpack.inference.services.ServiceFields.MAX_INPUT_TOKENS; +import static org.elasticsearch.xpack.inference.services.ServiceFields.SIMILARITY; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalBoolean; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalPositiveInteger; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractSimilarity; + +public class AmazonBedrockEmbeddingsServiceSettings extends AmazonBedrockServiceSettings { + public static final String NAME = "amazon_bedrock_embeddings_service_settings"; + static final String DIMENSIONS_SET_BY_USER = "dimensions_set_by_user"; + + private final Integer dimensions; + private final Boolean dimensionsSetByUser; + private final Integer maxInputTokens; + private final SimilarityMeasure similarity; + + public static AmazonBedrockEmbeddingsServiceSettings fromMap(Map map, ConfigurationParseContext context) { + ValidationException validationException = new ValidationException(); + + var settings = embeddingSettingsFromMap(map, validationException, context); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return settings; + } + + private static AmazonBedrockEmbeddingsServiceSettings embeddingSettingsFromMap( + Map map, + ValidationException validationException, + ConfigurationParseContext context + ) { + var baseSettings = AmazonBedrockServiceSettings.fromMap(map, validationException, context); + SimilarityMeasure similarity = extractSimilarity(map, ModelConfigurations.SERVICE_SETTINGS, validationException); + + Integer maxTokens = extractOptionalPositiveInteger( + map, + MAX_INPUT_TOKENS, + ModelConfigurations.SERVICE_SETTINGS, + validationException + ); + Integer dims = extractOptionalPositiveInteger(map, DIMENSIONS, ModelConfigurations.SERVICE_SETTINGS, validationException); + + Boolean dimensionsSetByUser = extractOptionalBoolean(map, DIMENSIONS_SET_BY_USER, validationException); + + switch (context) { + case REQUEST -> { + if (dimensionsSetByUser != null) { + validationException.addValidationError( + ServiceUtils.invalidSettingError(DIMENSIONS_SET_BY_USER, ModelConfigurations.SERVICE_SETTINGS) + ); + } + + if (dims != null) { + validationException.addValidationError( + ServiceUtils.invalidSettingError(DIMENSIONS, ModelConfigurations.SERVICE_SETTINGS) + ); + } + dimensionsSetByUser = false; + } + case PERSISTENT -> { + if (dimensionsSetByUser == null) { + validationException.addValidationError( + ServiceUtils.missingSettingErrorMsg(DIMENSIONS_SET_BY_USER, ModelConfigurations.SERVICE_SETTINGS) + ); + } + } + } + return new AmazonBedrockEmbeddingsServiceSettings( + baseSettings.region(), + baseSettings.model(), + baseSettings.provider(), + dims, + dimensionsSetByUser, + maxTokens, + similarity, + baseSettings.rateLimitSettings() + ); + } + + public AmazonBedrockEmbeddingsServiceSettings(StreamInput in) throws IOException { + super(in); + dimensions = in.readOptionalVInt(); + dimensionsSetByUser = in.readBoolean(); + maxInputTokens = in.readOptionalVInt(); + similarity = in.readOptionalEnum(SimilarityMeasure.class); + } + + public AmazonBedrockEmbeddingsServiceSettings( + String region, + String model, + AmazonBedrockProvider provider, + @Nullable Integer dimensions, + Boolean dimensionsSetByUser, + @Nullable Integer maxInputTokens, + @Nullable SimilarityMeasure similarity, + RateLimitSettings rateLimitSettings + ) { + super(region, model, provider, rateLimitSettings); + this.dimensions = dimensions; + this.dimensionsSetByUser = dimensionsSetByUser; + this.maxInputTokens = maxInputTokens; + this.similarity = similarity; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeOptionalVInt(dimensions); + out.writeBoolean(dimensionsSetByUser); + out.writeOptionalVInt(maxInputTokens); + out.writeOptionalEnum(similarity); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + super.addBaseXContent(builder, params); + builder.field(DIMENSIONS_SET_BY_USER, dimensionsSetByUser); + + builder.endObject(); + return builder; + } + + @Override + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException { + super.addXContentFragmentOfExposedFields(builder, params); + + if (dimensions != null) { + builder.field(DIMENSIONS, dimensions); + } + if (maxInputTokens != null) { + builder.field(MAX_INPUT_TOKENS, maxInputTokens); + } + if (similarity != null) { + builder.field(SIMILARITY, similarity); + } + + return builder; + } + + @Override + public SimilarityMeasure similarity() { + return similarity; + } + + @Override + public Integer dimensions() { + return dimensions; + } + + public boolean dimensionsSetByUser() { + return this.dimensionsSetByUser; + } + + public Integer maxInputTokens() { + return maxInputTokens; + } + + @Override + public DenseVectorFieldMapper.ElementType elementType() { + return DenseVectorFieldMapper.ElementType.FLOAT; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AmazonBedrockEmbeddingsServiceSettings that = (AmazonBedrockEmbeddingsServiceSettings) o; + + return Objects.equals(region, that.region) + && Objects.equals(provider, that.provider) + && Objects.equals(model, that.model) + && Objects.equals(dimensions, that.dimensions) + && Objects.equals(dimensionsSetByUser, that.dimensionsSetByUser) + && Objects.equals(maxInputTokens, that.maxInputTokens) + && Objects.equals(similarity, that.similarity) + && Objects.equals(rateLimitSettings, that.rateLimitSettings); + } + + @Override + public int hashCode() { + return Objects.hash(region, model, provider, dimensions, dimensionsSetByUser, maxInputTokens, similarity, rateLimitSettings); + } + +} diff --git a/x-pack/plugin/inference/src/main/plugin-metadata/plugin-security.policy b/x-pack/plugin/inference/src/main/plugin-metadata/plugin-security.policy index f21a46521a7f7..a39fcf53be7f3 100644 --- a/x-pack/plugin/inference/src/main/plugin-metadata/plugin-security.policy +++ b/x-pack/plugin/inference/src/main/plugin-metadata/plugin-security.policy @@ -8,12 +8,18 @@ grant { // required by: com.google.api.client.json.JsonParser#parseValue + // also required by AWS SDK for client configuration permission java.lang.RuntimePermission "accessDeclaredMembers"; + permission java.lang.RuntimePermission "getClassLoader"; + // required by: com.google.api.client.json.GenericJson# + // also by AWS SDK for Jackson's ObjectMapper permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; + // required to add google certs to the gcs client trustore permission java.lang.RuntimePermission "setFactory"; // gcs client opens socket connections for to access repository - permission java.net.SocketPermission "*", "connect"; + // also, AWS Bedrock client opens socket connections and needs resolve for to access to resources + permission java.net.SocketPermission "*", "connect,resolve"; }; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/amazonbedrock/AmazonBedrockActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/amazonbedrock/AmazonBedrockActionCreatorTests.java new file mode 100644 index 0000000000000..87d3a82b4aae6 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/amazonbedrock/AmazonBedrockActionCreatorTests.java @@ -0,0 +1,175 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.amazonbedrock; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; +import org.elasticsearch.xpack.inference.external.amazonbedrock.AmazonBedrockMockRequestSender; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; +import org.elasticsearch.xpack.inference.services.ServiceComponentsTests; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockProvider; +import org.elasticsearch.xpack.inference.services.amazonbedrock.completion.AmazonBedrockChatCompletionModelTests; +import org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings.AmazonBedrockEmbeddingsModelTests; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.results.ChatCompletionResultsTests.buildExpectationCompletion; +import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat; +import static org.hamcrest.Matchers.is; + +public class AmazonBedrockActionCreatorTests extends ESTestCase { + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private ThreadPool threadPool; + + @Before + public void init() throws Exception { + threadPool = createThreadPool(inferenceUtilityPool()); + } + + @After + public void shutdown() throws IOException { + terminate(threadPool); + } + + public void testEmbeddingsRequestAction() throws IOException { + var serviceComponents = ServiceComponentsTests.createWithEmptySettings(threadPool); + var mockedFloatResults = List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.0123F, -0.0123F })); + var mockedResult = new InferenceTextEmbeddingFloatResults(mockedFloatResults); + try (var sender = new AmazonBedrockMockRequestSender()) { + sender.enqueue(mockedResult); + var creator = new AmazonBedrockActionCreator(sender, serviceComponents, TIMEOUT); + var model = AmazonBedrockEmbeddingsModelTests.createModel( + "test_id", + "test_region", + "test_model", + AmazonBedrockProvider.AMAZONTITAN, + null, + false, + null, + null, + null, + "accesskey", + "secretkey" + ); + var action = creator.create(model, Map.of()); + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of("abc")), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + var result = listener.actionGet(TIMEOUT); + + assertThat(result.asMap(), is(buildExpectationFloat(List.of(new float[] { 0.0123F, -0.0123F })))); + + assertThat(sender.sendCount(), is(1)); + var sentInputs = sender.getInputs(); + assertThat(sentInputs.size(), is(1)); + assertThat(sentInputs.get(0), is("abc")); + } + } + + public void testEmbeddingsRequestAction_HandlesException() throws IOException { + var serviceComponents = ServiceComponentsTests.createWithEmptySettings(threadPool); + var mockedResult = new ElasticsearchException("mock exception"); + try (var sender = new AmazonBedrockMockRequestSender()) { + sender.enqueue(mockedResult); + var creator = new AmazonBedrockActionCreator(sender, serviceComponents, TIMEOUT); + var model = AmazonBedrockEmbeddingsModelTests.createModel( + "test_id", + "test_region", + "test_model", + AmazonBedrockProvider.AMAZONTITAN, + "accesskey", + "secretkey" + ); + var action = creator.create(model, Map.of()); + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of("abc")), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(sender.sendCount(), is(1)); + assertThat(sender.getInputs().size(), is(1)); + assertThat(thrownException.getMessage(), is("mock exception")); + } + } + + public void testCompletionRequestAction() throws IOException { + var serviceComponents = ServiceComponentsTests.createWithEmptySettings(threadPool); + var mockedChatCompletionResults = List.of(new ChatCompletionResults.Result("test input string")); + var mockedResult = new ChatCompletionResults(mockedChatCompletionResults); + try (var sender = new AmazonBedrockMockRequestSender()) { + sender.enqueue(mockedResult); + var creator = new AmazonBedrockActionCreator(sender, serviceComponents, TIMEOUT); + var model = AmazonBedrockChatCompletionModelTests.createModel( + "test_id", + "test_region", + "test_model", + AmazonBedrockProvider.AMAZONTITAN, + null, + null, + null, + null, + null, + "accesskey", + "secretkey" + ); + var action = creator.create(model, Map.of()); + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of("abc")), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + var result = listener.actionGet(TIMEOUT); + + assertThat(result.asMap(), is(buildExpectationCompletion(List.of("test input string")))); + + assertThat(sender.sendCount(), is(1)); + var sentInputs = sender.getInputs(); + assertThat(sentInputs.size(), is(1)); + assertThat(sentInputs.get(0), is("abc")); + } + } + + public void testChatCompletionRequestAction_HandlesException() throws IOException { + var serviceComponents = ServiceComponentsTests.createWithEmptySettings(threadPool); + var mockedResult = new ElasticsearchException("mock exception"); + try (var sender = new AmazonBedrockMockRequestSender()) { + sender.enqueue(mockedResult); + var creator = new AmazonBedrockActionCreator(sender, serviceComponents, TIMEOUT); + var model = AmazonBedrockChatCompletionModelTests.createModel( + "test_id", + "test_region", + "test_model", + AmazonBedrockProvider.AMAZONTITAN, + null, + null, + null, + null, + null, + "accesskey", + "secretkey" + ); + var action = creator.create(model, Map.of()); + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of("abc")), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(sender.sendCount(), is(1)); + assertThat(sender.getInputs().size(), is(1)); + assertThat(thrownException.getMessage(), is("mock exception")); + } + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockExecutorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockExecutorTests.java new file mode 100644 index 0000000000000..9326d39cb657c --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockExecutorTests.java @@ -0,0 +1,172 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.amazonbedrock; + +import com.amazonaws.services.bedrockruntime.model.ContentBlock; +import com.amazonaws.services.bedrockruntime.model.ConverseOutput; +import com.amazonaws.services.bedrockruntime.model.ConverseResult; +import com.amazonaws.services.bedrockruntime.model.InvokeModelResult; +import com.amazonaws.services.bedrockruntime.model.Message; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockChatCompletionRequest; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockTitanCompletionRequestEntity; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.embeddings.AmazonBedrockEmbeddingsRequest; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.embeddings.AmazonBedrockTitanEmbeddingsRequestEntity; +import org.elasticsearch.xpack.inference.external.response.amazonbedrock.completion.AmazonBedrockChatCompletionResponseHandler; +import org.elasticsearch.xpack.inference.external.response.amazonbedrock.embeddings.AmazonBedrockEmbeddingsResponseHandler; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockProvider; +import org.elasticsearch.xpack.inference.services.amazonbedrock.completion.AmazonBedrockChatCompletionModelTests; +import org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings.AmazonBedrockEmbeddingsModelTests; + +import java.nio.CharBuffer; +import java.nio.charset.CharacterCodingException; +import java.nio.charset.Charset; +import java.util.List; + +import static org.elasticsearch.xpack.inference.common.TruncatorTests.createTruncator; +import static org.elasticsearch.xpack.inference.results.ChatCompletionResultsTests.buildExpectationCompletion; +import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; + +public class AmazonBedrockExecutorTests extends ESTestCase { + public void testExecute_EmbeddingsRequest_ForAmazonTitan() throws CharacterCodingException { + var model = AmazonBedrockEmbeddingsModelTests.createModel( + "id", + "region", + "model", + AmazonBedrockProvider.AMAZONTITAN, + "accesskey", + "secretkey" + ); + var truncator = createTruncator(); + var truncatedInput = truncator.truncate(List.of("abc")); + var requestEntity = new AmazonBedrockTitanEmbeddingsRequestEntity("abc"); + var request = new AmazonBedrockEmbeddingsRequest(truncator, truncatedInput, model, requestEntity, null); + var responseHandler = new AmazonBedrockEmbeddingsResponseHandler(); + + var clientCache = new AmazonBedrockMockClientCache(null, getTestInvokeResult(TEST_AMAZON_TITAN_EMBEDDINGS_RESULT), null); + var listener = new PlainActionFuture(); + + var executor = new AmazonBedrockEmbeddingsExecutor(request, responseHandler, logger, () -> false, listener, clientCache); + executor.run(); + var result = listener.actionGet(new TimeValue(30000)); + assertNotNull(result); + assertThat(result.asMap(), is(buildExpectationFloat(List.of(new float[] { 0.123F, 0.456F, 0.678F, 0.789F })))); + } + + public void testExecute_EmbeddingsRequest_ForCohere() throws CharacterCodingException { + var model = AmazonBedrockEmbeddingsModelTests.createModel( + "id", + "region", + "model", + AmazonBedrockProvider.COHERE, + "accesskey", + "secretkey" + ); + var requestEntity = new AmazonBedrockTitanEmbeddingsRequestEntity("abc"); + var truncator = createTruncator(); + var truncatedInput = truncator.truncate(List.of("abc")); + var request = new AmazonBedrockEmbeddingsRequest(truncator, truncatedInput, model, requestEntity, null); + var responseHandler = new AmazonBedrockEmbeddingsResponseHandler(); + + var clientCache = new AmazonBedrockMockClientCache(null, getTestInvokeResult(TEST_COHERE_EMBEDDINGS_RESULT), null); + var listener = new PlainActionFuture(); + + var executor = new AmazonBedrockEmbeddingsExecutor(request, responseHandler, logger, () -> false, listener, clientCache); + executor.run(); + var result = listener.actionGet(new TimeValue(30000)); + assertNotNull(result); + assertThat(result.asMap(), is(buildExpectationFloat(List.of(new float[] { 0.123F, 0.456F, 0.678F, 0.789F })))); + } + + public void testExecute_ChatCompletionRequest() throws CharacterCodingException { + var model = AmazonBedrockChatCompletionModelTests.createModel( + "id", + "region", + "model", + AmazonBedrockProvider.AMAZONTITAN, + "accesskey", + "secretkey" + ); + + var requestEntity = new AmazonBedrockTitanCompletionRequestEntity(List.of("abc"), null, null, 512); + var request = new AmazonBedrockChatCompletionRequest(model, requestEntity, null); + var responseHandler = new AmazonBedrockChatCompletionResponseHandler(); + + var clientCache = new AmazonBedrockMockClientCache(getTestConverseResult("converse result"), null, null); + var listener = new PlainActionFuture(); + + var executor = new AmazonBedrockChatCompletionExecutor(request, responseHandler, logger, () -> false, listener, clientCache); + executor.run(); + var result = listener.actionGet(new TimeValue(30000)); + assertNotNull(result); + assertThat(result.asMap(), is(buildExpectationCompletion(List.of("converse result")))); + } + + public void testExecute_FailsProperly_WithElasticsearchException() { + var model = AmazonBedrockChatCompletionModelTests.createModel( + "id", + "region", + "model", + AmazonBedrockProvider.AMAZONTITAN, + "accesskey", + "secretkey" + ); + + var requestEntity = new AmazonBedrockTitanCompletionRequestEntity(List.of("abc"), null, null, 512); + var request = new AmazonBedrockChatCompletionRequest(model, requestEntity, null); + var responseHandler = new AmazonBedrockChatCompletionResponseHandler(); + + var clientCache = new AmazonBedrockMockClientCache(null, null, new ElasticsearchException("test exception")); + var listener = new PlainActionFuture(); + + var executor = new AmazonBedrockChatCompletionExecutor(request, responseHandler, logger, () -> false, listener, clientCache); + executor.run(); + + var exceptionThrown = assertThrows(ElasticsearchException.class, () -> listener.actionGet(new TimeValue(30000))); + assertThat(exceptionThrown.getMessage(), containsString("Failed to send request from inference entity id [id]")); + assertThat(exceptionThrown.getCause().getMessage(), containsString("test exception")); + } + + public static ConverseResult getTestConverseResult(String resultText) { + var message = new Message().withContent(new ContentBlock().withText(resultText)); + var converseOutput = new ConverseOutput().withMessage(message); + return new ConverseResult().withOutput(converseOutput); + } + + public static InvokeModelResult getTestInvokeResult(String resultJson) throws CharacterCodingException { + var result = new InvokeModelResult(); + result.setContentType("application/json"); + var encoder = Charset.forName("UTF-8").newEncoder(); + result.setBody(encoder.encode(CharBuffer.wrap(resultJson))); + return result; + } + + public static final String TEST_AMAZON_TITAN_EMBEDDINGS_RESULT = """ + { + "embedding": [0.123, 0.456, 0.678, 0.789], + "inputTextTokenCount": int + }"""; + + public static final String TEST_COHERE_EMBEDDINGS_RESULT = """ + { + "embeddings": [ + [0.123, 0.456, 0.678, 0.789] + ], + "id": string, + "response_type" : "embeddings_floats", + "texts": [string] + } + """; +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockInferenceClientCacheTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockInferenceClientCacheTests.java new file mode 100644 index 0000000000000..873b2e22497c6 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockInferenceClientCacheTests.java @@ -0,0 +1,108 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.amazonbedrock; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockProvider; +import org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings.AmazonBedrockEmbeddingsModelTests; + +import java.io.IOException; +import java.time.Clock; +import java.time.Duration; +import java.time.Instant; +import java.time.ZoneId; + +import static org.elasticsearch.xpack.inference.external.amazonbedrock.AmazonBedrockInferenceClient.CLIENT_CACHE_EXPIRY_MINUTES; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.sameInstance; + +public class AmazonBedrockInferenceClientCacheTests extends ESTestCase { + public void testCache_ReturnsSameObject() throws IOException { + AmazonBedrockInferenceClientCache cacheInstance; + try (var cache = new AmazonBedrockInferenceClientCache(AmazonBedrockMockInferenceClient::create, null)) { + cacheInstance = cache; + var model = AmazonBedrockEmbeddingsModelTests.createModel( + "inferenceId", + "testregion", + "model", + AmazonBedrockProvider.AMAZONTITAN, + "access_key", + "secret_key" + ); + + var client = cache.getOrCreateClient(model, null); + + var secondModel = AmazonBedrockEmbeddingsModelTests.createModel( + "inferenceId_two", + "testregion", + "a_different_model", + AmazonBedrockProvider.COHERE, + "access_key", + "secret_key" + ); + + var secondClient = cache.getOrCreateClient(secondModel, null); + assertThat(client, sameInstance(secondClient)); + + assertThat(cache.clientCount(), is(1)); + + var thirdClient = cache.getOrCreateClient(model, null); + assertThat(client, sameInstance(thirdClient)); + + assertThat(cache.clientCount(), is(1)); + } + assertThat(cacheInstance.clientCount(), is(0)); + } + + public void testCache_ItEvictsExpiredClients() throws IOException { + var clock = Clock.fixed(Instant.now(), ZoneId.systemDefault()); + AmazonBedrockInferenceClientCache cacheInstance; + try (var cache = new AmazonBedrockInferenceClientCache(AmazonBedrockMockInferenceClient::create, clock)) { + cacheInstance = cache; + + var model = AmazonBedrockEmbeddingsModelTests.createModel( + "inferenceId", + "testregion", + "model", + AmazonBedrockProvider.AMAZONTITAN, + "access_key", + "secret_key" + ); + + var client = cache.getOrCreateClient(model, null); + + var secondModel = AmazonBedrockEmbeddingsModelTests.createModel( + "inferenceId_two", + "some_other_region", + "a_different_model", + AmazonBedrockProvider.COHERE, + "other_access_key", + "other_secret_key" + ); + + assertThat(cache.clientCount(), is(1)); + + var secondClient = cache.getOrCreateClient(secondModel, null); + assertThat(client, not(sameInstance(secondClient))); + + assertThat(cache.clientCount(), is(2)); + + // set clock to after expiry + cache.setClock(Clock.fixed(clock.instant().plus(Duration.ofMinutes(CLIENT_CACHE_EXPIRY_MINUTES + 1)), ZoneId.systemDefault())); + + // get another client, this will ensure flushExpiredClients is called + var regetSecondClient = cache.getOrCreateClient(secondModel, null); + assertThat(secondClient, sameInstance(regetSecondClient)); + + var regetFirstClient = cache.getOrCreateClient(model, null); + assertThat(client, not(sameInstance(regetFirstClient))); + } + assertThat(cacheInstance.clientCount(), is(0)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockClientCache.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockClientCache.java new file mode 100644 index 0000000000000..912967a9012d7 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockClientCache.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.amazonbedrock; + +import com.amazonaws.services.bedrockruntime.model.ConverseResult; +import com.amazonaws.services.bedrockruntime.model.InvokeModelResult; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockModel; + +import java.io.IOException; + +public class AmazonBedrockMockClientCache implements AmazonBedrockClientCache { + private ConverseResult converseResult = null; + private InvokeModelResult invokeModelResult = null; + private ElasticsearchException exceptionToThrow = null; + + public AmazonBedrockMockClientCache() {} + + public AmazonBedrockMockClientCache( + @Nullable ConverseResult converseResult, + @Nullable InvokeModelResult invokeModelResult, + @Nullable ElasticsearchException exceptionToThrow + ) { + this.converseResult = converseResult; + this.invokeModelResult = invokeModelResult; + this.exceptionToThrow = exceptionToThrow; + } + + @Override + public AmazonBedrockBaseClient getOrCreateClient(AmazonBedrockModel model, TimeValue timeout) { + var client = (AmazonBedrockMockInferenceClient) AmazonBedrockMockInferenceClient.create(model, timeout); + client.setConverseResult(converseResult); + client.setInvokeModelResult(invokeModelResult); + client.setExceptionToThrow(exceptionToThrow); + return client; + } + + @Override + public void close() throws IOException { + // nothing to do + } + + public void setConverseResult(ConverseResult converseResult) { + this.converseResult = converseResult; + } + + public void setInvokeModelResult(InvokeModelResult invokeModelResult) { + this.invokeModelResult = invokeModelResult; + } + + public void setExceptionToThrow(ElasticsearchException exceptionToThrow) { + this.exceptionToThrow = exceptionToThrow; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockExecuteRequestSender.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockExecuteRequestSender.java new file mode 100644 index 0000000000000..b0df8a40e2551 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockExecuteRequestSender.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.amazonbedrock; + +import com.amazonaws.services.bedrockruntime.model.ConverseResult; +import com.amazonaws.services.bedrockruntime.model.InvokeModelResult; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.AmazonBedrockRequest; +import org.elasticsearch.xpack.inference.external.response.amazonbedrock.AmazonBedrockResponseHandler; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; + +import java.util.List; +import java.util.Queue; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.function.Supplier; + +public class AmazonBedrockMockExecuteRequestSender extends AmazonBedrockExecuteOnlyRequestSender { + + private Queue results = new ConcurrentLinkedQueue<>(); + private Queue> inputs = new ConcurrentLinkedQueue<>(); + private int sendCounter = 0; + + public AmazonBedrockMockExecuteRequestSender(AmazonBedrockClientCache clientCache, ThrottlerManager throttlerManager) { + super(clientCache, throttlerManager); + } + + public void enqueue(Object result) { + results.add(result); + } + + public int sendCount() { + return sendCounter; + } + + public List getInputs() { + return inputs.remove(); + } + + @Override + protected AmazonBedrockExecutor createExecutor( + AmazonBedrockRequest awsRequest, + AmazonBedrockResponseHandler awsResponse, + Logger logger, + Supplier hasRequestTimedOutFunction, + ActionListener listener + ) { + setCacheResult(); + return super.createExecutor(awsRequest, awsResponse, logger, hasRequestTimedOutFunction, listener); + } + + private void setCacheResult() { + var mockCache = (AmazonBedrockMockClientCache) this.clientCache; + var result = results.remove(); + if (result instanceof ConverseResult converseResult) { + mockCache.setConverseResult(converseResult); + return; + } + + if (result instanceof InvokeModelResult invokeModelResult) { + mockCache.setInvokeModelResult(invokeModelResult); + return; + } + + if (result instanceof ElasticsearchException exception) { + mockCache.setExceptionToThrow(exception); + return; + } + + throw new RuntimeException("Unknown result type: " + result.getClass()); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockInferenceClient.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockInferenceClient.java new file mode 100644 index 0000000000000..dcbf8dfcbff01 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockInferenceClient.java @@ -0,0 +1,133 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.amazonbedrock; + +import com.amazonaws.services.bedrockruntime.AmazonBedrockRuntimeAsync; +import com.amazonaws.services.bedrockruntime.model.ConverseResult; +import com.amazonaws.services.bedrockruntime.model.InvokeModelResult; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockModel; + +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; + +public class AmazonBedrockMockInferenceClient extends AmazonBedrockInferenceClient { + private ConverseResult converseResult = null; + private InvokeModelResult invokeModelResult = null; + private ElasticsearchException exceptionToThrow = null; + + private Future converseResultFuture = new MockConverseResultFuture(); + private Future invokeModelResultFuture = new MockInvokeResultFuture(); + + public static AmazonBedrockBaseClient create(AmazonBedrockModel model, @Nullable TimeValue timeout) { + return new AmazonBedrockMockInferenceClient(model, timeout); + } + + protected AmazonBedrockMockInferenceClient(AmazonBedrockModel model, @Nullable TimeValue timeout) { + super(model, timeout); + } + + public void setExceptionToThrow(ElasticsearchException exceptionToThrow) { + this.exceptionToThrow = exceptionToThrow; + } + + public void setConverseResult(ConverseResult result) { + this.converseResult = result; + } + + public void setInvokeModelResult(InvokeModelResult result) { + this.invokeModelResult = result; + } + + @Override + protected AmazonBedrockRuntimeAsync createAmazonBedrockClient(AmazonBedrockModel model, @Nullable TimeValue timeout) { + var runtimeClient = mock(AmazonBedrockRuntimeAsync.class); + doAnswer(invocation -> invokeModelResultFuture).when(runtimeClient).invokeModelAsync(any()); + doAnswer(invocation -> converseResultFuture).when(runtimeClient).converseAsync(any()); + + return runtimeClient; + } + + @Override + void close() {} + + private class MockConverseResultFuture implements Future { + @Override + public boolean cancel(boolean mayInterruptIfRunning) { + return false; + } + + @Override + public boolean isCancelled() { + return false; + } + + @Override + public boolean isDone() { + return false; + } + + @Override + public ConverseResult get() throws InterruptedException, ExecutionException { + if (exceptionToThrow != null) { + throw exceptionToThrow; + } + return converseResult; + } + + @Override + public ConverseResult get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { + if (exceptionToThrow != null) { + throw exceptionToThrow; + } + return converseResult; + } + } + + private class MockInvokeResultFuture implements Future { + @Override + public boolean cancel(boolean mayInterruptIfRunning) { + return false; + } + + @Override + public boolean isCancelled() { + return false; + } + + @Override + public boolean isDone() { + return false; + } + + @Override + public InvokeModelResult get() throws InterruptedException, ExecutionException { + if (exceptionToThrow != null) { + throw exceptionToThrow; + } + return invokeModelResult; + } + + @Override + public InvokeModelResult get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { + if (exceptionToThrow != null) { + throw exceptionToThrow; + } + return invokeModelResult; + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockRequestSender.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockRequestSender.java new file mode 100644 index 0000000000000..e68beaf4c1eb5 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockMockRequestSender.java @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.amazonbedrock; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; +import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; +import org.elasticsearch.xpack.inference.external.http.sender.RequestManager; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.services.ServiceComponents; + +import java.io.IOException; +import java.util.List; +import java.util.Queue; +import java.util.concurrent.ConcurrentLinkedQueue; + +public class AmazonBedrockMockRequestSender implements Sender { + + public static class Factory extends AmazonBedrockRequestSender.Factory { + private final Sender sender; + + public Factory(ServiceComponents serviceComponents, ClusterService clusterService) { + super(serviceComponents, clusterService); + this.sender = new AmazonBedrockMockRequestSender(); + } + + public Sender createSender() { + return sender; + } + } + + private Queue results = new ConcurrentLinkedQueue<>(); + private Queue> inputs = new ConcurrentLinkedQueue<>(); + private int sendCounter = 0; + + public void enqueue(Object result) { + results.add(result); + } + + public int sendCount() { + return sendCounter; + } + + public List getInputs() { + return inputs.remove(); + } + + @Override + public void start() { + // do nothing + } + + @Override + public void send( + RequestManager requestCreator, + InferenceInputs inferenceInputs, + TimeValue timeout, + ActionListener listener + ) { + sendCounter++; + var docsInput = (DocumentsOnlyInput) inferenceInputs; + inputs.add(docsInput.getInputs()); + + if (results.isEmpty()) { + listener.onFailure(new ElasticsearchException("No results found")); + } else { + var resultObject = results.remove(); + if (resultObject instanceof InferenceServiceResults inferenceResult) { + listener.onResponse(inferenceResult); + } else if (resultObject instanceof Exception e) { + listener.onFailure(e); + } else { + throw new RuntimeException("Unknown result type: " + resultObject.getClass()); + } + } + } + + @Override + public void close() throws IOException { + // do nothing + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockRequestSenderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockRequestSenderTests.java new file mode 100644 index 0000000000000..7fa8a09d5bf12 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/amazonbedrock/AmazonBedrockRequestSenderTests.java @@ -0,0 +1,127 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.amazonbedrock; + +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.http.sender.AmazonBedrockChatCompletionRequestManager; +import org.elasticsearch.xpack.inference.external.http.sender.AmazonBedrockEmbeddingsRequestManager; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.elasticsearch.xpack.inference.services.ServiceComponentsTests; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockProvider; +import org.elasticsearch.xpack.inference.services.amazonbedrock.completion.AmazonBedrockChatCompletionModelTests; +import org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings.AmazonBedrockEmbeddingsModelTests; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; + +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.external.amazonbedrock.AmazonBedrockExecutorTests.TEST_AMAZON_TITAN_EMBEDDINGS_RESULT; +import static org.elasticsearch.xpack.inference.results.ChatCompletionResultsTests.buildExpectationCompletion; +import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class AmazonBedrockRequestSenderTests extends ESTestCase { + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private ThreadPool threadPool; + private final AtomicReference threadRef = new AtomicReference<>(); + + @Before + public void init() throws Exception { + threadPool = createThreadPool(inferenceUtilityPool()); + threadRef.set(null); + } + + @After + public void shutdown() throws IOException, InterruptedException { + if (threadRef.get() != null) { + threadRef.get().join(TIMEOUT.millis()); + } + + terminate(threadPool); + } + + public void testCreateSender_SendsEmbeddingsRequestAndReceivesResponse() throws Exception { + var senderFactory = createSenderFactory(threadPool, Settings.EMPTY); + var requestSender = new AmazonBedrockMockExecuteRequestSender(new AmazonBedrockMockClientCache(), mock(ThrottlerManager.class)); + requestSender.enqueue(AmazonBedrockExecutorTests.getTestInvokeResult(TEST_AMAZON_TITAN_EMBEDDINGS_RESULT)); + try (var sender = createSender(senderFactory, requestSender)) { + sender.start(); + + var model = AmazonBedrockEmbeddingsModelTests.createModel( + "test_id", + "test_region", + "test_model", + AmazonBedrockProvider.AMAZONTITAN, + "accesskey", + "secretkey" + ); + + PlainActionFuture listener = new PlainActionFuture<>(); + var serviceComponents = ServiceComponentsTests.createWithEmptySettings(threadPool); + var requestManager = new AmazonBedrockEmbeddingsRequestManager( + model, + serviceComponents.truncator(), + threadPool, + new TimeValue(30, TimeUnit.SECONDS) + ); + sender.send(requestManager, new DocumentsOnlyInput(List.of("abc")), null, listener); + + var result = listener.actionGet(TIMEOUT); + assertThat(result.asMap(), is(buildExpectationFloat(List.of(new float[] { 0.123F, 0.456F, 0.678F, 0.789F })))); + } + } + + public void testCreateSender_SendsCompletionRequestAndReceivesResponse() throws Exception { + var senderFactory = createSenderFactory(threadPool, Settings.EMPTY); + var requestSender = new AmazonBedrockMockExecuteRequestSender(new AmazonBedrockMockClientCache(), mock(ThrottlerManager.class)); + requestSender.enqueue(AmazonBedrockExecutorTests.getTestConverseResult("test response text")); + try (var sender = createSender(senderFactory, requestSender)) { + sender.start(); + + var model = AmazonBedrockChatCompletionModelTests.createModel( + "test_id", + "test_region", + "test_model", + AmazonBedrockProvider.AMAZONTITAN, + "accesskey", + "secretkey" + ); + + PlainActionFuture listener = new PlainActionFuture<>(); + var requestManager = new AmazonBedrockChatCompletionRequestManager(model, threadPool, new TimeValue(30, TimeUnit.SECONDS)); + sender.send(requestManager, new DocumentsOnlyInput(List.of("abc")), null, listener); + + var result = listener.actionGet(TIMEOUT); + assertThat(result.asMap(), is(buildExpectationCompletion(List.of("test response text")))); + } + } + + public static AmazonBedrockRequestSender.Factory createSenderFactory(ThreadPool threadPool, Settings settings) { + return new AmazonBedrockRequestSender.Factory( + ServiceComponentsTests.createWithSettings(threadPool, settings), + mockClusterServiceEmpty() + ); + } + + public static Sender createSender(AmazonBedrockRequestSender.Factory factory, AmazonBedrockExecuteOnlyRequestSender requestSender) { + return factory.createSender(requestSender); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAI21LabsCompletionRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAI21LabsCompletionRequestEntityTests.java new file mode 100644 index 0000000000000..b91aab5410048 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAI21LabsCompletionRequestEntityTests.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; + +import org.elasticsearch.test.ESTestCase; + +import java.util.List; + +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHasMessage; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyMaxTokensInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTemperatureInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTopKInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTopPInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveMaxTokensInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTemperatureInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTopPInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.getConverseRequest; +import static org.hamcrest.Matchers.is; + +public class AmazonBedrockAI21LabsCompletionRequestEntityTests extends ESTestCase { + public void testRequestEntity_CreatesProperRequest() { + var request = new AmazonBedrockAI21LabsCompletionRequestEntity(List.of("test message"), null, null, null); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); + } + + public void testRequestEntity_CreatesProperRequest_WithTemperature() { + var request = new AmazonBedrockAI21LabsCompletionRequestEntity(List.of("test message"), 1.0, null, null); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertTrue(doesConverseRequestHaveTemperatureInput(builtRequest, 1.0)); + assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); + } + + public void testRequestEntity_CreatesProperRequest_WithTopP() { + var request = new AmazonBedrockAI21LabsCompletionRequestEntity(List.of("test message"), null, 1.0, null); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); + assertTrue(doesConverseRequestHaveTopPInput(builtRequest, 1.0)); + assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); + } + + public void testRequestEntity_CreatesProperRequest_WithMaxTokens() { + var request = new AmazonBedrockAI21LabsCompletionRequestEntity(List.of("test message"), null, null, 128); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); + assertTrue(doesConverseRequestHaveMaxTokensInput(builtRequest, 128)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAnthropicCompletionRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAnthropicCompletionRequestEntityTests.java new file mode 100644 index 0000000000000..89d5fec7efba6 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockAnthropicCompletionRequestEntityTests.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; + +import org.elasticsearch.test.ESTestCase; + +import java.util.List; + +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHasMessage; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyMaxTokensInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTemperatureInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTopKInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTopPInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveMaxTokensInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTemperatureInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTopKInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTopPInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.getConverseRequest; +import static org.hamcrest.Matchers.is; + +public class AmazonBedrockAnthropicCompletionRequestEntityTests extends ESTestCase { + public void testRequestEntity_CreatesProperRequest() { + var request = new AmazonBedrockAnthropicCompletionRequestEntity(List.of("test message"), null, null, null, null); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); + } + + public void testRequestEntity_CreatesProperRequest_WithTemperature() { + var request = new AmazonBedrockAnthropicCompletionRequestEntity(List.of("test message"), 1.0, null, null, null); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertTrue(doesConverseRequestHaveTemperatureInput(builtRequest, 1.0)); + assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); + } + + public void testRequestEntity_CreatesProperRequest_WithTopP() { + var request = new AmazonBedrockAnthropicCompletionRequestEntity(List.of("test message"), null, 1.0, null, null); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); + assertTrue(doesConverseRequestHaveTopPInput(builtRequest, 1.0)); + assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); + } + + public void testRequestEntity_CreatesProperRequest_WithMaxTokens() { + var request = new AmazonBedrockAnthropicCompletionRequestEntity(List.of("test message"), null, null, null, 128); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); + assertTrue(doesConverseRequestHaveMaxTokensInput(builtRequest, 128)); + } + + public void testRequestEntity_CreatesProperRequest_WithTopK() { + var request = new AmazonBedrockAnthropicCompletionRequestEntity(List.of("test message"), null, null, 1.0, null); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); + assertTrue(doesConverseRequestHaveTopKInput(builtRequest, 1.0)); + assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockCohereCompletionRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockCohereCompletionRequestEntityTests.java new file mode 100644 index 0000000000000..8df5c7f32e529 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockCohereCompletionRequestEntityTests.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; + +import org.elasticsearch.test.ESTestCase; + +import java.util.List; + +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHasMessage; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyMaxTokensInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTemperatureInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTopKInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTopPInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveMaxTokensInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTemperatureInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTopKInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTopPInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.getConverseRequest; +import static org.hamcrest.Matchers.is; + +public class AmazonBedrockCohereCompletionRequestEntityTests extends ESTestCase { + public void testRequestEntity_CreatesProperRequest() { + var request = new AmazonBedrockCohereCompletionRequestEntity(List.of("test message"), null, null, null, null); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); + } + + public void testRequestEntity_CreatesProperRequest_WithTemperature() { + var request = new AmazonBedrockCohereCompletionRequestEntity(List.of("test message"), 1.0, null, null, null); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertTrue(doesConverseRequestHaveTemperatureInput(builtRequest, 1.0)); + assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); + } + + public void testRequestEntity_CreatesProperRequest_WithTopP() { + var request = new AmazonBedrockCohereCompletionRequestEntity(List.of("test message"), null, 1.0, null, null); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); + assertTrue(doesConverseRequestHaveTopPInput(builtRequest, 1.0)); + assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); + } + + public void testRequestEntity_CreatesProperRequest_WithMaxTokens() { + var request = new AmazonBedrockCohereCompletionRequestEntity(List.of("test message"), null, null, null, 128); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); + assertTrue(doesConverseRequestHaveMaxTokensInput(builtRequest, 128)); + } + + public void testRequestEntity_CreatesProperRequest_WithTopK() { + var request = new AmazonBedrockCohereCompletionRequestEntity(List.of("test message"), null, null, 1.0, null); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); + assertTrue(doesConverseRequestHaveTopKInput(builtRequest, 1.0)); + assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockConverseRequestUtils.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockConverseRequestUtils.java new file mode 100644 index 0000000000000..cbbe3c5554967 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockConverseRequestUtils.java @@ -0,0 +1,94 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; + +import com.amazonaws.services.bedrockruntime.model.ContentBlock; +import com.amazonaws.services.bedrockruntime.model.ConverseRequest; +import com.amazonaws.services.bedrockruntime.model.Message; + +import org.elasticsearch.core.Strings; + +public final class AmazonBedrockConverseRequestUtils { + public static ConverseRequest getConverseRequest(String modelId, AmazonBedrockConverseRequestEntity requestEntity) { + var converseRequest = new ConverseRequest().withModelId(modelId); + converseRequest = requestEntity.addMessages(converseRequest); + converseRequest = requestEntity.addInferenceConfig(converseRequest); + converseRequest = requestEntity.addAdditionalModelFields(converseRequest); + return converseRequest; + } + + public static boolean doesConverseRequestHasMessage(ConverseRequest converseRequest, String expectedMessage) { + for (Message message : converseRequest.getMessages()) { + var content = message.getContent(); + for (ContentBlock contentBlock : content) { + if (contentBlock.getText().equals(expectedMessage)) { + return true; + } + } + } + return false; + } + + public static boolean doesConverseRequestHaveAnyTemperatureInput(ConverseRequest converseRequest) { + return converseRequest.getInferenceConfig() != null + && converseRequest.getInferenceConfig().getTemperature() != null + && (converseRequest.getInferenceConfig().getTemperature().isNaN() == false); + } + + public static boolean doesConverseRequestHaveAnyTopPInput(ConverseRequest converseRequest) { + return converseRequest.getInferenceConfig() != null + && converseRequest.getInferenceConfig().getTopP() != null + && (converseRequest.getInferenceConfig().getTopP().isNaN() == false); + } + + public static boolean doesConverseRequestHaveAnyMaxTokensInput(ConverseRequest converseRequest) { + return converseRequest.getInferenceConfig() != null && converseRequest.getInferenceConfig().getMaxTokens() != null; + } + + public static boolean doesConverseRequestHaveTemperatureInput(ConverseRequest converseRequest, Double temperature) { + return doesConverseRequestHaveAnyTemperatureInput(converseRequest) + && converseRequest.getInferenceConfig().getTemperature().equals(temperature.floatValue()); + } + + public static boolean doesConverseRequestHaveTopPInput(ConverseRequest converseRequest, Double topP) { + return doesConverseRequestHaveAnyTopPInput(converseRequest) + && converseRequest.getInferenceConfig().getTopP().equals(topP.floatValue()); + } + + public static boolean doesConverseRequestHaveMaxTokensInput(ConverseRequest converseRequest, Integer maxTokens) { + return doesConverseRequestHaveAnyMaxTokensInput(converseRequest) + && converseRequest.getInferenceConfig().getMaxTokens().equals(maxTokens); + } + + public static boolean doesConverseRequestHaveAnyTopKInput(ConverseRequest converseRequest) { + if (converseRequest.getAdditionalModelResponseFieldPaths() == null) { + return false; + } + + for (String fieldPath : converseRequest.getAdditionalModelResponseFieldPaths()) { + if (fieldPath.contains("{\"top_k\":")) { + return true; + } + } + return false; + } + + public static boolean doesConverseRequestHaveTopKInput(ConverseRequest converseRequest, Double topK) { + if (doesConverseRequestHaveAnyTopKInput(converseRequest) == false) { + return false; + } + + var checkString = Strings.format("{\"top_k\":%f}", topK.floatValue()); + for (String fieldPath : converseRequest.getAdditionalModelResponseFieldPaths()) { + if (fieldPath.contains(checkString)) { + return true; + } + } + return false; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMetaCompletionRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMetaCompletionRequestEntityTests.java new file mode 100644 index 0000000000000..fa482669a0bb2 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMetaCompletionRequestEntityTests.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; + +import org.elasticsearch.test.ESTestCase; + +import java.util.List; + +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHasMessage; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyMaxTokensInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTemperatureInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTopKInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTopPInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveMaxTokensInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTemperatureInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTopPInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.getConverseRequest; +import static org.hamcrest.Matchers.is; + +public class AmazonBedrockMetaCompletionRequestEntityTests extends ESTestCase { + public void testRequestEntity_CreatesProperRequest() { + var request = new AmazonBedrockMetaCompletionRequestEntity(List.of("test message"), null, null, null); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); + } + + public void testRequestEntity_CreatesProperRequest_WithTemperature() { + var request = new AmazonBedrockMetaCompletionRequestEntity(List.of("test message"), 1.0, null, null); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertTrue(doesConverseRequestHaveTemperatureInput(builtRequest, 1.0)); + assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); + } + + public void testRequestEntity_CreatesProperRequest_WithTopP() { + var request = new AmazonBedrockMetaCompletionRequestEntity(List.of("test message"), null, 1.0, null); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); + assertTrue(doesConverseRequestHaveTopPInput(builtRequest, 1.0)); + assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); + } + + public void testRequestEntity_CreatesProperRequest_WithMaxTokens() { + var request = new AmazonBedrockMetaCompletionRequestEntity(List.of("test message"), null, null, 128); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); + assertTrue(doesConverseRequestHaveMaxTokensInput(builtRequest, 128)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMistralCompletionRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMistralCompletionRequestEntityTests.java new file mode 100644 index 0000000000000..788625d3702b8 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockMistralCompletionRequestEntityTests.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; + +import org.elasticsearch.test.ESTestCase; + +import java.util.List; + +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHasMessage; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyMaxTokensInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTemperatureInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTopKInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTopPInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveMaxTokensInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTemperatureInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTopKInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTopPInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.getConverseRequest; +import static org.hamcrest.Matchers.is; + +public class AmazonBedrockMistralCompletionRequestEntityTests extends ESTestCase { + public void testRequestEntity_CreatesProperRequest() { + var request = new AmazonBedrockMistralCompletionRequestEntity(List.of("test message"), null, null, null, null); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); + } + + public void testRequestEntity_CreatesProperRequest_WithTemperature() { + var request = new AmazonBedrockMistralCompletionRequestEntity(List.of("test message"), 1.0, null, null, null); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertTrue(doesConverseRequestHaveTemperatureInput(builtRequest, 1.0)); + assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); + } + + public void testRequestEntity_CreatesProperRequest_WithTopP() { + var request = new AmazonBedrockMistralCompletionRequestEntity(List.of("test message"), null, 1.0, null, null); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); + assertTrue(doesConverseRequestHaveTopPInput(builtRequest, 1.0)); + assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); + } + + public void testRequestEntity_CreatesProperRequest_WithMaxTokens() { + var request = new AmazonBedrockMistralCompletionRequestEntity(List.of("test message"), null, null, null, 128); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); + assertTrue(doesConverseRequestHaveMaxTokensInput(builtRequest, 128)); + } + + public void testRequestEntity_CreatesProperRequest_WithTopK() { + var request = new AmazonBedrockMistralCompletionRequestEntity(List.of("test message"), null, null, 1.0, null); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); + assertTrue(doesConverseRequestHaveTopKInput(builtRequest, 1.0)); + assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockTitanCompletionRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockTitanCompletionRequestEntityTests.java new file mode 100644 index 0000000000000..79fa387876c8b --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/completion/AmazonBedrockTitanCompletionRequestEntityTests.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion; + +import org.elasticsearch.test.ESTestCase; + +import java.util.List; + +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHasMessage; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyMaxTokensInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTemperatureInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTopKInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveAnyTopPInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveMaxTokensInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTemperatureInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.doesConverseRequestHaveTopPInput; +import static org.elasticsearch.xpack.inference.external.request.amazonbedrock.completion.AmazonBedrockConverseRequestUtils.getConverseRequest; +import static org.hamcrest.Matchers.is; + +public class AmazonBedrockTitanCompletionRequestEntityTests extends ESTestCase { + public void testRequestEntity_CreatesProperRequest() { + var request = new AmazonBedrockTitanCompletionRequestEntity(List.of("test message"), null, null, null); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); + } + + public void testRequestEntity_CreatesProperRequest_WithTemperature() { + var request = new AmazonBedrockTitanCompletionRequestEntity(List.of("test message"), 1.0, null, null); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertTrue(doesConverseRequestHaveTemperatureInput(builtRequest, 1.0)); + assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); + } + + public void testRequestEntity_CreatesProperRequest_WithTopP() { + var request = new AmazonBedrockTitanCompletionRequestEntity(List.of("test message"), null, 1.0, null); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); + assertTrue(doesConverseRequestHaveTopPInput(builtRequest, 1.0)); + assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyMaxTokensInput(builtRequest)); + } + + public void testRequestEntity_CreatesProperRequest_WithMaxTokens() { + var request = new AmazonBedrockTitanCompletionRequestEntity(List.of("test message"), null, null, 128); + var builtRequest = getConverseRequest("testmodel", request); + assertThat(builtRequest.getModelId(), is("testmodel")); + assertThat(doesConverseRequestHasMessage(builtRequest, "test message"), is(true)); + assertFalse(doesConverseRequestHaveAnyTemperatureInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopPInput(builtRequest)); + assertFalse(doesConverseRequestHaveAnyTopKInput(builtRequest)); + assertTrue(doesConverseRequestHaveMaxTokensInput(builtRequest, 128)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/embeddings/AmazonBedrockCohereEmbeddingsRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/embeddings/AmazonBedrockCohereEmbeddingsRequestEntityTests.java new file mode 100644 index 0000000000000..fd8114f889d6a --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/embeddings/AmazonBedrockCohereEmbeddingsRequestEntityTests.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock.embeddings; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.AmazonBedrockJsonBuilder; + +import java.io.IOException; +import java.util.List; + +import static org.hamcrest.Matchers.is; + +public class AmazonBedrockCohereEmbeddingsRequestEntityTests extends ESTestCase { + public void testRequestEntity_GeneratesExpectedJsonBody() throws IOException { + var entity = new AmazonBedrockCohereEmbeddingsRequestEntity(List.of("test input")); + var builder = new AmazonBedrockJsonBuilder(entity); + var result = builder.getStringContent(); + assertThat(result, is("{\"texts\":[\"test input\"],\"input_type\":\"search_document\"}")); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/embeddings/AmazonBedrockTitanEmbeddingsRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/embeddings/AmazonBedrockTitanEmbeddingsRequestEntityTests.java new file mode 100644 index 0000000000000..da98fa251fdc8 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/amazonbedrock/embeddings/AmazonBedrockTitanEmbeddingsRequestEntityTests.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.amazonbedrock.embeddings; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.external.request.amazonbedrock.AmazonBedrockJsonBuilder; + +import java.io.IOException; + +import static org.hamcrest.Matchers.is; + +public class AmazonBedrockTitanEmbeddingsRequestEntityTests extends ESTestCase { + public void testRequestEntity_GeneratesExpectedJsonBody() throws IOException { + var entity = new AmazonBedrockTitanEmbeddingsRequestEntity("test input"); + var builder = new AmazonBedrockJsonBuilder(entity); + var result = builder.getStringContent(); + assertThat(result, is("{\"inputText\":\"test input\"}")); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockSecretSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockSecretSettingsTests.java new file mode 100644 index 0000000000000..904851842a6c8 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockSecretSettingsTests.java @@ -0,0 +1,120 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.amazonbedrock; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; +import org.hamcrest.CoreMatchers; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.ACCESS_KEY_FIELD; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.SECRET_KEY_FIELD; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; + +public class AmazonBedrockSecretSettingsTests extends AbstractBWCWireSerializationTestCase { + + public void testIt_CreatesSettings_ReturnsNullFromMap_null() { + var secrets = AmazonBedrockSecretSettings.fromMap(null); + assertNull(secrets); + } + + public void testIt_CreatesSettings_FromMap_WithValues() { + var secrets = AmazonBedrockSecretSettings.fromMap( + new HashMap<>(Map.of(ACCESS_KEY_FIELD, "accesstest", SECRET_KEY_FIELD, "secrettest")) + ); + assertThat( + secrets, + is(new AmazonBedrockSecretSettings(new SecureString("accesstest".toCharArray()), new SecureString("secrettest".toCharArray()))) + ); + } + + public void testIt_CreatesSettings_FromMap_IgnoresExtraKeys() { + var secrets = AmazonBedrockSecretSettings.fromMap( + new HashMap<>(Map.of(ACCESS_KEY_FIELD, "accesstest", SECRET_KEY_FIELD, "secrettest", "extrakey", "extravalue")) + ); + assertThat( + secrets, + is(new AmazonBedrockSecretSettings(new SecureString("accesstest".toCharArray()), new SecureString("secrettest".toCharArray()))) + ); + } + + public void testIt_FromMap_ThrowsValidationException_AccessKeyMissing() { + var thrownException = expectThrows( + ValidationException.class, + () -> AmazonBedrockSecretSettings.fromMap(new HashMap<>(Map.of(SECRET_KEY_FIELD, "secrettest"))) + ); + + assertThat( + thrownException.getMessage(), + containsString(Strings.format("[secret_settings] does not contain the required setting [%s]", ACCESS_KEY_FIELD)) + ); + } + + public void testIt_FromMap_ThrowsValidationException_SecretKeyMissing() { + var thrownException = expectThrows( + ValidationException.class, + () -> AmazonBedrockSecretSettings.fromMap(new HashMap<>(Map.of(ACCESS_KEY_FIELD, "accesstest"))) + ); + + assertThat( + thrownException.getMessage(), + containsString(Strings.format("[secret_settings] does not contain the required setting [%s]", SECRET_KEY_FIELD)) + ); + } + + public void testToXContent_CreatesProperContent() throws IOException { + var secrets = AmazonBedrockSecretSettings.fromMap( + new HashMap<>(Map.of(ACCESS_KEY_FIELD, "accesstest", SECRET_KEY_FIELD, "secrettest")) + ); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + secrets.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + assertThat(xContentResult, CoreMatchers.is(""" + {"access_key":"accesstest","secret_key":"secrettest"}""")); + } + + public static Map getAmazonBedrockSecretSettingsMap(String accessKey, String secretKey) { + return new HashMap(Map.of(ACCESS_KEY_FIELD, accessKey, SECRET_KEY_FIELD, secretKey)); + } + + @Override + protected AmazonBedrockSecretSettings mutateInstanceForVersion(AmazonBedrockSecretSettings instance, TransportVersion version) { + return instance; + } + + @Override + protected Writeable.Reader instanceReader() { + return AmazonBedrockSecretSettings::new; + } + + @Override + protected AmazonBedrockSecretSettings createTestInstance() { + return createRandom(); + } + + @Override + protected AmazonBedrockSecretSettings mutateInstance(AmazonBedrockSecretSettings instance) throws IOException { + return randomValueOtherThan(instance, AmazonBedrockSecretSettingsTests::createRandom); + } + + private static AmazonBedrockSecretSettings createRandom() { + return new AmazonBedrockSecretSettings(new SecureString(randomAlphaOfLength(10)), new SecureString(randomAlphaOfLength(10))); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java new file mode 100644 index 0000000000000..00a840c8d4812 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java @@ -0,0 +1,1131 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.amazonbedrock; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.ChunkedInferenceServiceResults; +import org.elasticsearch.inference.ChunkingOptions; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.SimilarityMeasure; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; +import org.elasticsearch.xpack.inference.Utils; +import org.elasticsearch.xpack.inference.external.amazonbedrock.AmazonBedrockMockRequestSender; +import org.elasticsearch.xpack.inference.external.amazonbedrock.AmazonBedrockRequestSender; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.services.ServiceComponentsTests; +import org.elasticsearch.xpack.inference.services.amazonbedrock.completion.AmazonBedrockChatCompletionModel; +import org.elasticsearch.xpack.inference.services.amazonbedrock.completion.AmazonBedrockChatCompletionModelTests; +import org.elasticsearch.xpack.inference.services.amazonbedrock.completion.AmazonBedrockChatCompletionServiceSettings; +import org.elasticsearch.xpack.inference.services.amazonbedrock.completion.AmazonBedrockChatCompletionTaskSettings; +import org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings.AmazonBedrockEmbeddingsModel; +import org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings.AmazonBedrockEmbeddingsModelTests; +import org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings.AmazonBedrockEmbeddingsServiceSettings; +import org.hamcrest.CoreMatchers; +import org.hamcrest.MatcherAssert; +import org.hamcrest.Matchers; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.xpack.inference.Utils.getInvalidModel; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.results.ChatCompletionResultsTests.buildExpectationCompletion; +import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat; +import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockSecretSettingsTests.getAmazonBedrockSecretSettingsMap; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.completion.AmazonBedrockChatCompletionServiceSettingsTests.createChatCompletionRequestSettingsMap; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.completion.AmazonBedrockChatCompletionTaskSettingsTests.getChatCompletionTaskSettingsMap; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings.AmazonBedrockEmbeddingsServiceSettingsTests.createEmbeddingsRequestSettingsMap; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +public class AmazonBedrockServiceTests extends ESTestCase { + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private ThreadPool threadPool; + + @Before + public void init() throws Exception { + threadPool = createThreadPool(inferenceUtilityPool()); + } + + @After + public void shutdown() throws IOException { + terminate(threadPool); + } + + public void testParseRequestConfig_CreatesAnAmazonBedrockModel() throws IOException { + try (var service = createAmazonBedrockService()) { + ActionListener modelVerificationListener = ActionListener.wrap(model -> { + assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class)); + + var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings(); + assertThat(settings.region(), is("region")); + assertThat(settings.model(), is("model")); + assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN)); + var secretSettings = (AmazonBedrockSecretSettings) model.getSecretSettings(); + assertThat(secretSettings.accessKey.toString(), is("access")); + assertThat(secretSettings.secretKey.toString(), is("secret")); + }, exception -> fail("Unexpected exception: " + exception)); + + service.parseRequestConfig( + "id", + TaskType.TEXT_EMBEDDING, + getRequestConfigMap( + createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, null, null, null), + Map.of(), + getAmazonBedrockSecretSettingsMap("access", "secret") + ), + Set.of(), + modelVerificationListener + ); + } + } + + public void testParseRequestConfig_ThrowsUnsupportedModelType() throws IOException { + try (var service = createAmazonBedrockService()) { + ActionListener modelVerificationListener = ActionListener.wrap( + model -> fail("Expected exception, but got model: " + model), + exception -> { + assertThat(exception, instanceOf(ElasticsearchStatusException.class)); + assertThat(exception.getMessage(), is("The [amazonbedrock] service does not support task type [sparse_embedding]")); + } + ); + + service.parseRequestConfig( + "id", + TaskType.SPARSE_EMBEDDING, + getRequestConfigMap( + createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null), + Map.of(), + getAmazonBedrockSecretSettingsMap("access", "secret") + ), + Set.of(), + modelVerificationListener + ); + } + } + + public void testCreateModel_ForEmbeddingsTask_InvalidProvider() throws IOException { + try (var service = createAmazonBedrockService()) { + ActionListener modelVerificationListener = ActionListener.wrap( + model -> fail("Expected exception, but got model: " + model), + exception -> { + assertThat(exception, instanceOf(ElasticsearchStatusException.class)); + assertThat(exception.getMessage(), is("The [text_embedding] task type for provider [anthropic] is not available")); + } + ); + + service.parseRequestConfig( + "id", + TaskType.TEXT_EMBEDDING, + getRequestConfigMap( + createEmbeddingsRequestSettingsMap("region", "model", "anthropic", null, null, null, null), + Map.of(), + getAmazonBedrockSecretSettingsMap("access", "secret") + ), + Set.of(), + modelVerificationListener + ); + } + } + + public void testCreateModel_TopKParameter_NotAvailable() throws IOException { + try (var service = createAmazonBedrockService()) { + ActionListener modelVerificationListener = ActionListener.wrap( + model -> fail("Expected exception, but got model: " + model), + exception -> { + assertThat(exception, instanceOf(ElasticsearchStatusException.class)); + assertThat(exception.getMessage(), is("The [top_k] task parameter is not available for provider [amazontitan]")); + } + ); + + service.parseRequestConfig( + "id", + TaskType.COMPLETION, + getRequestConfigMap( + createChatCompletionRequestSettingsMap("region", "model", "amazontitan"), + getChatCompletionTaskSettingsMap(1.0, 0.5, 0.2, 128), + getAmazonBedrockSecretSettingsMap("access", "secret") + ), + Set.of(), + modelVerificationListener + ); + } + } + + public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInConfig() throws IOException { + try (var service = createAmazonBedrockService()) { + var config = getRequestConfigMap( + createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, null, null, null), + Map.of(), + getAmazonBedrockSecretSettingsMap("access", "secret") + ); + + config.put("extra_key", "value"); + + ActionListener modelVerificationListener = ActionListener.wrap( + model -> fail("Expected exception, but got model: " + model), + exception -> { + assertThat(exception, instanceOf(ElasticsearchStatusException.class)); + assertThat( + exception.getMessage(), + is("Model configuration contains settings [{extra_key=value}] unknown to the [amazonbedrock] service") + ); + } + ); + + service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of(), modelVerificationListener); + } + } + + public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInServiceSettingsMap() throws IOException { + try (var service = createAmazonBedrockService()) { + var serviceSettings = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, null, null, null); + serviceSettings.put("extra_key", "value"); + + var config = getRequestConfigMap(serviceSettings, Map.of(), getAmazonBedrockSecretSettingsMap("access", "secret")); + + ActionListener modelVerificationListener = ActionListener.wrap((model) -> { + fail("Expected exception, but got model: " + model); + }, e -> { + assertThat(e, instanceOf(ElasticsearchStatusException.class)); + assertThat( + e.getMessage(), + is("Model configuration contains settings [{extra_key=value}] unknown to the [amazonbedrock] service") + ); + }); + + service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of(), modelVerificationListener); + } + } + + public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInTaskSettingsMap() throws IOException { + try (var service = createAmazonBedrockService()) { + var settingsMap = createChatCompletionRequestSettingsMap("region", "model", "anthropic"); + var taskSettingsMap = getChatCompletionTaskSettingsMap(1.0, 0.5, 0.2, 128); + var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); + + taskSettingsMap.put("extra_key", "value"); + + var config = getRequestConfigMap(settingsMap, taskSettingsMap, secretSettingsMap); + + ActionListener modelVerificationListener = ActionListener.wrap((model) -> { + fail("Expected exception, but got model: " + model); + }, e -> { + assertThat(e, instanceOf(ElasticsearchStatusException.class)); + assertThat( + e.getMessage(), + is("Model configuration contains settings [{extra_key=value}] unknown to the [amazonbedrock] service") + ); + }); + + service.parseRequestConfig("id", TaskType.COMPLETION, config, Set.of(), modelVerificationListener); + } + } + + public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInSecretSettingsMap() throws IOException { + try (var service = createAmazonBedrockService()) { + var settingsMap = createChatCompletionRequestSettingsMap("region", "model", "anthropic"); + var taskSettingsMap = getChatCompletionTaskSettingsMap(1.0, 0.5, 0.2, 128); + var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); + + secretSettingsMap.put("extra_key", "value"); + + var config = getRequestConfigMap(settingsMap, taskSettingsMap, secretSettingsMap); + + ActionListener modelVerificationListener = ActionListener.wrap((model) -> { + fail("Expected exception, but got model: " + model); + }, e -> { + assertThat(e, instanceOf(ElasticsearchStatusException.class)); + assertThat( + e.getMessage(), + is("Model configuration contains settings [{extra_key=value}] unknown to the [amazonbedrock] service") + ); + }); + + service.parseRequestConfig("id", TaskType.COMPLETION, config, Set.of(), modelVerificationListener); + } + } + + public void testParseRequestConfig_MovesModel() throws IOException { + try (var service = createAmazonBedrockService()) { + ActionListener modelVerificationListener = ActionListener.wrap(model -> { + assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class)); + + var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings(); + assertThat(settings.region(), is("region")); + assertThat(settings.model(), is("model")); + assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN)); + var secretSettings = (AmazonBedrockSecretSettings) model.getSecretSettings(); + assertThat(secretSettings.accessKey.toString(), is("access")); + assertThat(secretSettings.secretKey.toString(), is("secret")); + }, exception -> fail("Unexpected exception: " + exception)); + + service.parseRequestConfig( + "id", + TaskType.TEXT_EMBEDDING, + getRequestConfigMap( + createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, null, null, null), + Map.of(), + getAmazonBedrockSecretSettingsMap("access", "secret") + ), + Set.of(), + modelVerificationListener + ); + } + } + + public void testCreateModel_ForEmbeddingsTask_DimensionsIsNotAllowed() throws IOException { + try (var service = createAmazonBedrockService()) { + ActionListener modelVerificationListener = ActionListener.wrap( + model -> fail("Expected exception, but got model: " + model), + exception -> { + assertThat(exception, instanceOf(ValidationException.class)); + assertThat(exception.getMessage(), containsString("[service_settings] does not allow the setting [dimensions]")); + } + ); + + service.parseRequestConfig( + "id", + TaskType.TEXT_EMBEDDING, + getRequestConfigMap( + createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", 512, null, null, null), + Map.of(), + getAmazonBedrockSecretSettingsMap("access", "secret") + ), + Set.of(), + modelVerificationListener + ); + } + } + + public void testParsePersistedConfigWithSecrets_CreatesAnAmazonBedrockEmbeddingsModel() throws IOException { + try (var service = createAmazonBedrockService()) { + var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null); + var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); + + var persistedConfig = getPersistedConfigMap(settingsMap, new HashMap(Map.of()), secretSettingsMap); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class)); + + var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings(); + assertThat(settings.region(), is("region")); + assertThat(settings.model(), is("model")); + assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN)); + var secretSettings = (AmazonBedrockSecretSettings) model.getSecretSettings(); + assertThat(secretSettings.accessKey.toString(), is("access")); + assertThat(secretSettings.secretKey.toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_ThrowsErrorTryingToParseInvalidModel() throws IOException { + try (var service = createAmazonBedrockService()) { + var settingsMap = createChatCompletionRequestSettingsMap("region", "model", "amazontitan"); + var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); + + var persistedConfig = getPersistedConfigMap(settingsMap, Map.of(), secretSettingsMap); + + var thrownException = expectThrows( + ElasticsearchStatusException.class, + () -> service.parsePersistedConfigWithSecrets( + "id", + TaskType.SPARSE_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse stored model [id] for [amazonbedrock] service, please delete and add the service again") + ); + } + } + + public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInConfig() throws IOException { + try (var service = createAmazonBedrockService()) { + var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null); + var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); + + var persistedConfig = getPersistedConfigMap(settingsMap, new HashMap(Map.of()), secretSettingsMap); + persistedConfig.config().put("extra_key", "value"); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class)); + + var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings(); + assertThat(settings.region(), is("region")); + assertThat(settings.model(), is("model")); + assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN)); + var secretSettings = (AmazonBedrockSecretSettings) model.getSecretSettings(); + assertThat(secretSettings.accessKey.toString(), is("access")); + assertThat(secretSettings.secretKey.toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInSecretsSettings() throws IOException { + try (var service = createAmazonBedrockService()) { + var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null); + var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); + secretSettingsMap.put("extra_key", "value"); + + var persistedConfig = getPersistedConfigMap(settingsMap, new HashMap(Map.of()), secretSettingsMap); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class)); + + var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings(); + assertThat(settings.region(), is("region")); + assertThat(settings.model(), is("model")); + assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN)); + var secretSettings = (AmazonBedrockSecretSettings) model.getSecretSettings(); + assertThat(secretSettings.accessKey.toString(), is("access")); + assertThat(secretSettings.secretKey.toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInSecrets() throws IOException { + try (var service = createAmazonBedrockService()) { + var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null); + var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); + + var persistedConfig = getPersistedConfigMap(settingsMap, new HashMap(Map.of()), secretSettingsMap); + persistedConfig.secrets().put("extra_key", "value"); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class)); + + var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings(); + assertThat(settings.region(), is("region")); + assertThat(settings.model(), is("model")); + assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN)); + var secretSettings = (AmazonBedrockSecretSettings) model.getSecretSettings(); + assertThat(secretSettings.accessKey.toString(), is("access")); + assertThat(secretSettings.secretKey.toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInServiceSettings() throws IOException { + try (var service = createAmazonBedrockService()) { + var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null); + settingsMap.put("extra_key", "value"); + var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); + + var persistedConfig = getPersistedConfigMap(settingsMap, new HashMap(Map.of()), secretSettingsMap); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class)); + + var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings(); + assertThat(settings.region(), is("region")); + assertThat(settings.model(), is("model")); + assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN)); + var secretSettings = (AmazonBedrockSecretSettings) model.getSecretSettings(); + assertThat(secretSettings.accessKey.toString(), is("access")); + assertThat(secretSettings.secretKey.toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInTaskSettings() throws IOException { + try (var service = createAmazonBedrockService()) { + var settingsMap = createChatCompletionRequestSettingsMap("region", "model", "anthropic"); + var taskSettingsMap = getChatCompletionTaskSettingsMap(1.0, 0.5, 0.2, 128); + var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); + taskSettingsMap.put("extra_key", "value"); + + var persistedConfig = getPersistedConfigMap(settingsMap, taskSettingsMap, secretSettingsMap); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.COMPLETION, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(AmazonBedrockChatCompletionModel.class)); + + var settings = (AmazonBedrockChatCompletionServiceSettings) model.getServiceSettings(); + assertThat(settings.region(), is("region")); + assertThat(settings.model(), is("model")); + assertThat(settings.provider(), is(AmazonBedrockProvider.ANTHROPIC)); + var taskSettings = (AmazonBedrockChatCompletionTaskSettings) model.getTaskSettings(); + assertThat(taskSettings.temperature(), is(1.0)); + assertThat(taskSettings.topP(), is(0.5)); + assertThat(taskSettings.topK(), is(0.2)); + assertThat(taskSettings.maxNewTokens(), is(128)); + var secretSettings = (AmazonBedrockSecretSettings) model.getSecretSettings(); + assertThat(secretSettings.accessKey.toString(), is("access")); + assertThat(secretSettings.secretKey.toString(), is("secret")); + } + } + + public void testParsePersistedConfig_CreatesAnAmazonBedrockEmbeddingsModel() throws IOException { + try (var service = createAmazonBedrockService()) { + var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null); + var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); + + var persistedConfig = getPersistedConfigMap(settingsMap, new HashMap(Map.of()), secretSettingsMap); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class)); + + var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings(); + assertThat(settings.region(), is("region")); + assertThat(settings.model(), is("model")); + assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN)); + assertNull(model.getSecretSettings()); + } + } + + public void testParsePersistedConfig_CreatesAnAmazonBedrockChatCompletionModel() throws IOException { + try (var service = createAmazonBedrockService()) { + var settingsMap = createChatCompletionRequestSettingsMap("region", "model", "anthropic"); + var taskSettingsMap = getChatCompletionTaskSettingsMap(1.0, 0.5, 0.2, 128); + var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); + + var persistedConfig = getPersistedConfigMap(settingsMap, taskSettingsMap, secretSettingsMap); + var model = service.parsePersistedConfig("id", TaskType.COMPLETION, persistedConfig.config()); + + assertThat(model, instanceOf(AmazonBedrockChatCompletionModel.class)); + + var settings = (AmazonBedrockChatCompletionServiceSettings) model.getServiceSettings(); + assertThat(settings.region(), is("region")); + assertThat(settings.model(), is("model")); + assertThat(settings.provider(), is(AmazonBedrockProvider.ANTHROPIC)); + var taskSettings = (AmazonBedrockChatCompletionTaskSettings) model.getTaskSettings(); + assertThat(taskSettings.temperature(), is(1.0)); + assertThat(taskSettings.topP(), is(0.5)); + assertThat(taskSettings.topK(), is(0.2)); + assertThat(taskSettings.maxNewTokens(), is(128)); + assertNull(model.getSecretSettings()); + } + } + + public void testParsePersistedConfig_ThrowsErrorTryingToParseInvalidModel() throws IOException { + try (var service = createAmazonBedrockService()) { + var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null); + var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); + + var persistedConfig = getPersistedConfigMap(settingsMap, new HashMap(Map.of()), secretSettingsMap); + + var thrownException = expectThrows( + ElasticsearchStatusException.class, + () -> service.parsePersistedConfig("id", TaskType.SPARSE_EMBEDDING, persistedConfig.config()) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse stored model [id] for [amazonbedrock] service, please delete and add the service again") + ); + } + } + + public void testParsePersistedConfig_DoesNotThrowWhenAnExtraKeyExistsInConfig() throws IOException { + try (var service = createAmazonBedrockService()) { + var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null); + var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); + + var persistedConfig = getPersistedConfigMap(settingsMap, new HashMap(Map.of()), secretSettingsMap); + persistedConfig.config().put("extra_key", "value"); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class)); + + var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings(); + assertThat(settings.region(), is("region")); + assertThat(settings.model(), is("model")); + assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN)); + assertNull(model.getSecretSettings()); + } + } + + public void testParsePersistedConfig_NotThrowWhenAnExtraKeyExistsInServiceSettings() throws IOException { + try (var service = createAmazonBedrockService()) { + var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null); + settingsMap.put("extra_key", "value"); + var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); + + var persistedConfig = getPersistedConfigMap(settingsMap, new HashMap(Map.of()), secretSettingsMap); + persistedConfig.config().put("extra_key", "value"); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class)); + + var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings(); + assertThat(settings.region(), is("region")); + assertThat(settings.model(), is("model")); + assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN)); + assertNull(model.getSecretSettings()); + } + } + + public void testParsePersistedConfig_NotThrowWhenAnExtraKeyExistsInTaskSettings() throws IOException { + try (var service = createAmazonBedrockService()) { + var settingsMap = createChatCompletionRequestSettingsMap("region", "model", "anthropic"); + var taskSettingsMap = getChatCompletionTaskSettingsMap(1.0, 0.5, 0.2, 128); + taskSettingsMap.put("extra_key", "value"); + var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); + + var persistedConfig = getPersistedConfigMap(settingsMap, taskSettingsMap, secretSettingsMap); + var model = service.parsePersistedConfig("id", TaskType.COMPLETION, persistedConfig.config()); + + assertThat(model, instanceOf(AmazonBedrockChatCompletionModel.class)); + + var settings = (AmazonBedrockChatCompletionServiceSettings) model.getServiceSettings(); + assertThat(settings.region(), is("region")); + assertThat(settings.model(), is("model")); + assertThat(settings.provider(), is(AmazonBedrockProvider.ANTHROPIC)); + var taskSettings = (AmazonBedrockChatCompletionTaskSettings) model.getTaskSettings(); + assertThat(taskSettings.temperature(), is(1.0)); + assertThat(taskSettings.topP(), is(0.5)); + assertThat(taskSettings.topK(), is(0.2)); + assertThat(taskSettings.maxNewTokens(), is(128)); + assertNull(model.getSecretSettings()); + } + } + + public void testInfer_ThrowsErrorWhenModelIsNotAmazonBedrockModel() throws IOException { + var sender = mock(Sender.class); + var factory = mock(HttpRequestSender.Factory.class); + when(factory.createSender()).thenReturn(sender); + + var amazonBedrockFactory = new AmazonBedrockMockRequestSender.Factory( + ServiceComponentsTests.createWithSettings(threadPool, Settings.EMPTY), + mockClusterServiceEmpty() + ); + var mockModel = getInvalidModel("model_id", "service_name"); + + try (var service = new AmazonBedrockService(factory, amazonBedrockFactory, createWithEmptySettings(threadPool))) { + PlainActionFuture listener = new PlainActionFuture<>(); + service.infer( + mockModel, + null, + List.of(""), + new HashMap<>(), + InputType.INGEST, + InferenceAction.Request.DEFAULT_TIMEOUT, + listener + ); + + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + thrownException.getMessage(), + is("The internal model was invalid, please delete the service [service_name] with id [model_id] and add it again.") + ); + + verify(factory, times(1)).createSender(); + verify(sender, times(1)).start(); + } + verify(sender, times(1)).close(); + verifyNoMoreInteractions(factory); + verifyNoMoreInteractions(sender); + } + + public void testInfer_SendsRequest_ForEmbeddingsModel() throws IOException { + var sender = mock(Sender.class); + var factory = mock(HttpRequestSender.Factory.class); + when(factory.createSender()).thenReturn(sender); + + var amazonBedrockFactory = new AmazonBedrockMockRequestSender.Factory( + ServiceComponentsTests.createWithSettings(threadPool, Settings.EMPTY), + mockClusterServiceEmpty() + ); + + try (var service = new AmazonBedrockService(factory, amazonBedrockFactory, createWithEmptySettings(threadPool))) { + try (var requestSender = (AmazonBedrockMockRequestSender) amazonBedrockFactory.createSender()) { + var results = new InferenceTextEmbeddingFloatResults( + List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.123F, 0.678F })) + ); + requestSender.enqueue(results); + + var model = AmazonBedrockEmbeddingsModelTests.createModel( + "id", + "region", + "model", + AmazonBedrockProvider.AMAZONTITAN, + "access", + "secret" + ); + PlainActionFuture listener = new PlainActionFuture<>(); + service.infer( + model, + null, + List.of("abc"), + new HashMap<>(), + InputType.INGEST, + InferenceAction.Request.DEFAULT_TIMEOUT, + listener + ); + + var result = listener.actionGet(TIMEOUT); + + assertThat(result.asMap(), Matchers.is(buildExpectationFloat(List.of(new float[] { 0.123F, 0.678F })))); + } + } + } + + public void testInfer_SendsRequest_ForChatCompletionModel() throws IOException { + var sender = mock(Sender.class); + var factory = mock(HttpRequestSender.Factory.class); + when(factory.createSender()).thenReturn(sender); + + var amazonBedrockFactory = new AmazonBedrockMockRequestSender.Factory( + ServiceComponentsTests.createWithSettings(threadPool, Settings.EMPTY), + mockClusterServiceEmpty() + ); + + try (var service = new AmazonBedrockService(factory, amazonBedrockFactory, createWithEmptySettings(threadPool))) { + try (var requestSender = (AmazonBedrockMockRequestSender) amazonBedrockFactory.createSender()) { + var mockResults = new ChatCompletionResults(List.of(new ChatCompletionResults.Result("test result"))); + requestSender.enqueue(mockResults); + + var model = AmazonBedrockChatCompletionModelTests.createModel( + "id", + "region", + "model", + AmazonBedrockProvider.AMAZONTITAN, + "access", + "secret" + ); + PlainActionFuture listener = new PlainActionFuture<>(); + service.infer( + model, + null, + List.of("abc"), + new HashMap<>(), + InputType.INGEST, + InferenceAction.Request.DEFAULT_TIMEOUT, + listener + ); + + var result = listener.actionGet(TIMEOUT); + + assertThat(result.asMap(), Matchers.is(buildExpectationCompletion(List.of("test result")))); + } + } + } + + public void testCheckModelConfig_IncludesMaxTokens_ForEmbeddingsModel() throws IOException { + var sender = mock(Sender.class); + var factory = mock(HttpRequestSender.Factory.class); + when(factory.createSender()).thenReturn(sender); + + var amazonBedrockFactory = new AmazonBedrockMockRequestSender.Factory( + ServiceComponentsTests.createWithSettings(threadPool, Settings.EMPTY), + mockClusterServiceEmpty() + ); + + try (var service = new AmazonBedrockService(factory, amazonBedrockFactory, createWithEmptySettings(threadPool))) { + try (var requestSender = (AmazonBedrockMockRequestSender) amazonBedrockFactory.createSender()) { + var results = new InferenceTextEmbeddingFloatResults( + List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.123F, 0.678F })) + ); + requestSender.enqueue(results); + + var model = AmazonBedrockEmbeddingsModelTests.createModel( + "id", + "region", + "model", + AmazonBedrockProvider.AMAZONTITAN, + null, + false, + 100, + null, + null, + "access", + "secret" + ); + + PlainActionFuture listener = new PlainActionFuture<>(); + service.checkModelConfig(model, listener); + var result = listener.actionGet(TIMEOUT); + assertThat( + result, + is( + AmazonBedrockEmbeddingsModelTests.createModel( + "id", + "region", + "model", + AmazonBedrockProvider.AMAZONTITAN, + 2, + false, + 100, + SimilarityMeasure.COSINE, + null, + "access", + "secret" + ) + ) + ); + var inputStrings = requestSender.getInputs(); + + MatcherAssert.assertThat(inputStrings, Matchers.is(List.of("how big"))); + } + } + } + + public void testCheckModelConfig_HasSimilarity_ForEmbeddingsModel() throws IOException { + var sender = mock(Sender.class); + var factory = mock(HttpRequestSender.Factory.class); + when(factory.createSender()).thenReturn(sender); + + var amazonBedrockFactory = new AmazonBedrockMockRequestSender.Factory( + ServiceComponentsTests.createWithSettings(threadPool, Settings.EMPTY), + mockClusterServiceEmpty() + ); + + try (var service = new AmazonBedrockService(factory, amazonBedrockFactory, createWithEmptySettings(threadPool))) { + try (var requestSender = (AmazonBedrockMockRequestSender) amazonBedrockFactory.createSender()) { + var results = new InferenceTextEmbeddingFloatResults( + List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.123F, 0.678F })) + ); + requestSender.enqueue(results); + + var model = AmazonBedrockEmbeddingsModelTests.createModel( + "id", + "region", + "model", + AmazonBedrockProvider.AMAZONTITAN, + null, + false, + null, + SimilarityMeasure.COSINE, + null, + "access", + "secret" + ); + + PlainActionFuture listener = new PlainActionFuture<>(); + service.checkModelConfig(model, listener); + var result = listener.actionGet(TIMEOUT); + assertThat( + result, + is( + AmazonBedrockEmbeddingsModelTests.createModel( + "id", + "region", + "model", + AmazonBedrockProvider.AMAZONTITAN, + 2, + false, + null, + SimilarityMeasure.COSINE, + null, + "access", + "secret" + ) + ) + ); + var inputStrings = requestSender.getInputs(); + + MatcherAssert.assertThat(inputStrings, Matchers.is(List.of("how big"))); + } + } + } + + public void testCheckModelConfig_ThrowsIfEmbeddingSizeDoesNotMatchValueSetByUser() throws IOException { + var sender = mock(Sender.class); + var factory = mock(HttpRequestSender.Factory.class); + when(factory.createSender()).thenReturn(sender); + + var amazonBedrockFactory = new AmazonBedrockMockRequestSender.Factory( + ServiceComponentsTests.createWithSettings(threadPool, Settings.EMPTY), + mockClusterServiceEmpty() + ); + + try (var service = new AmazonBedrockService(factory, amazonBedrockFactory, createWithEmptySettings(threadPool))) { + try (var requestSender = (AmazonBedrockMockRequestSender) amazonBedrockFactory.createSender()) { + var results = new InferenceTextEmbeddingFloatResults( + List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.123F, 0.678F })) + ); + requestSender.enqueue(results); + + var model = AmazonBedrockEmbeddingsModelTests.createModel( + "id", + "region", + "model", + AmazonBedrockProvider.AMAZONTITAN, + 3, + true, + null, + null, + null, + "access", + "secret" + ); + + PlainActionFuture listener = new PlainActionFuture<>(); + service.checkModelConfig(model, listener); + + var exception = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + exception.getMessage(), + is( + "The retrieved embeddings size [2] does not match the size specified in the settings [3]. " + + "Please recreate the [id] configuration with the correct dimensions" + ) + ); + + var inputStrings = requestSender.getInputs(); + MatcherAssert.assertThat(inputStrings, Matchers.is(List.of("how big"))); + } + } + } + + public void testCheckModelConfig_ReturnsNewModelReference_AndDoesNotSendDimensionsField_WhenNotSetByUser() throws IOException { + var sender = mock(Sender.class); + var factory = mock(HttpRequestSender.Factory.class); + when(factory.createSender()).thenReturn(sender); + + var amazonBedrockFactory = new AmazonBedrockMockRequestSender.Factory( + ServiceComponentsTests.createWithSettings(threadPool, Settings.EMPTY), + mockClusterServiceEmpty() + ); + + try (var service = new AmazonBedrockService(factory, amazonBedrockFactory, createWithEmptySettings(threadPool))) { + try (var requestSender = (AmazonBedrockMockRequestSender) amazonBedrockFactory.createSender()) { + var results = new InferenceTextEmbeddingFloatResults( + List.of(new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.123F, 0.678F })) + ); + requestSender.enqueue(results); + + var model = AmazonBedrockEmbeddingsModelTests.createModel( + "id", + "region", + "model", + AmazonBedrockProvider.AMAZONTITAN, + 100, + false, + null, + SimilarityMeasure.COSINE, + null, + "access", + "secret" + ); + + PlainActionFuture listener = new PlainActionFuture<>(); + service.checkModelConfig(model, listener); + var result = listener.actionGet(TIMEOUT); + assertThat( + result, + is( + AmazonBedrockEmbeddingsModelTests.createModel( + "id", + "region", + "model", + AmazonBedrockProvider.AMAZONTITAN, + 2, + false, + null, + SimilarityMeasure.COSINE, + null, + "access", + "secret" + ) + ) + ); + var inputStrings = requestSender.getInputs(); + + MatcherAssert.assertThat(inputStrings, Matchers.is(List.of("how big"))); + } + } + } + + public void testInfer_UnauthorizedResponse() throws IOException { + var sender = mock(Sender.class); + var factory = mock(HttpRequestSender.Factory.class); + when(factory.createSender()).thenReturn(sender); + + var amazonBedrockFactory = new AmazonBedrockRequestSender.Factory( + ServiceComponentsTests.createWithSettings(threadPool, Settings.EMPTY), + mockClusterServiceEmpty() + ); + + try (var service = new AmazonBedrockService(factory, amazonBedrockFactory, createWithEmptySettings(threadPool))) { + var model = AmazonBedrockEmbeddingsModelTests.createModel( + "id", + "us-east-1", + "amazon.titan-embed-text-v1", + AmazonBedrockProvider.AMAZONTITAN, + "_INVALID_AWS_ACCESS_KEY_", + "_INVALID_AWS_SECRET_KEY_" + ); + PlainActionFuture listener = new PlainActionFuture<>(); + service.infer( + model, + null, + List.of("abc"), + new HashMap<>(), + InputType.INGEST, + InferenceAction.Request.DEFAULT_TIMEOUT, + listener + ); + + var exceptionThrown = assertThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + assertThat(exceptionThrown.getCause().getMessage(), containsString("The security token included in the request is invalid")); + } + } + + public void testChunkedInfer_CallsInfer_ConvertsFloatResponse_ForEmbeddings() throws IOException { + var sender = mock(Sender.class); + var factory = mock(HttpRequestSender.Factory.class); + when(factory.createSender()).thenReturn(sender); + + var amazonBedrockFactory = new AmazonBedrockMockRequestSender.Factory( + ServiceComponentsTests.createWithSettings(threadPool, Settings.EMPTY), + mockClusterServiceEmpty() + ); + + try (var service = new AmazonBedrockService(factory, amazonBedrockFactory, createWithEmptySettings(threadPool))) { + try (var requestSender = (AmazonBedrockMockRequestSender) amazonBedrockFactory.createSender()) { + var mockResults = new InferenceTextEmbeddingFloatResults( + List.of( + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.123F, 0.678F }), + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(new float[] { 0.456F, 0.987F }) + ) + ); + requestSender.enqueue(mockResults); + + var model = AmazonBedrockEmbeddingsModelTests.createModel( + "id", + "region", + "model", + AmazonBedrockProvider.AMAZONTITAN, + "access", + "secret" + ); + PlainActionFuture> listener = new PlainActionFuture<>(); + service.chunkedInfer( + model, + List.of("abc", "xyz"), + new HashMap<>(), + InputType.INGEST, + new ChunkingOptions(null, null), + InferenceAction.Request.DEFAULT_TIMEOUT, + listener + ); + + var results = listener.actionGet(TIMEOUT); + assertThat(results, hasSize(2)); + { + assertThat(results.get(0), CoreMatchers.instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var floatResult = (InferenceChunkedTextEmbeddingFloatResults) results.get(0); + assertThat(floatResult.chunks(), hasSize(1)); + assertEquals("abc", floatResult.chunks().get(0).matchedText()); + assertArrayEquals(new float[] { 0.123F, 0.678F }, floatResult.chunks().get(0).embedding(), 0.0f); + } + { + assertThat(results.get(1), CoreMatchers.instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var floatResult = (InferenceChunkedTextEmbeddingFloatResults) results.get(1); + assertThat(floatResult.chunks(), hasSize(1)); + assertEquals("xyz", floatResult.chunks().get(0).matchedText()); + assertArrayEquals(new float[] { 0.456F, 0.987F }, floatResult.chunks().get(0).embedding(), 0.0f); + } + } + } + } + + private AmazonBedrockService createAmazonBedrockService() { + var amazonBedrockFactory = new AmazonBedrockMockRequestSender.Factory( + ServiceComponentsTests.createWithSettings(threadPool, Settings.EMPTY), + mockClusterServiceEmpty() + ); + return new AmazonBedrockService(mock(HttpRequestSender.Factory.class), amazonBedrockFactory, createWithEmptySettings(threadPool)); + } + + private Map getRequestConfigMap( + Map serviceSettings, + Map taskSettings, + Map secretSettings + ) { + var builtServiceSettings = new HashMap<>(); + builtServiceSettings.putAll(serviceSettings); + builtServiceSettings.putAll(secretSettings); + + return new HashMap<>( + Map.of(ModelConfigurations.SERVICE_SETTINGS, builtServiceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings) + ); + } + + private Utils.PersistedConfig getPersistedConfigMap( + Map serviceSettings, + Map taskSettings, + Map secretSettings + ) { + + return new Utils.PersistedConfig( + new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, serviceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)), + new HashMap<>(Map.of(ModelSecrets.SECRET_SETTINGS, secretSettings)) + ); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionModelTests.java new file mode 100644 index 0000000000000..22173943ff432 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionModelTests.java @@ -0,0 +1,221 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.amazonbedrock.completion; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockProvider; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockSecretSettings; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; + +import static org.elasticsearch.xpack.inference.services.amazonbedrock.completion.AmazonBedrockChatCompletionTaskSettingsTests.getChatCompletionTaskSettingsMap; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.sameInstance; + +public class AmazonBedrockChatCompletionModelTests extends ESTestCase { + public void testOverrideWith_OverridesWithoutValues() { + var model = createModel( + "id", + "region", + "model", + AmazonBedrockProvider.AMAZONTITAN, + 1.0, + 0.5, + 0.6, + 512, + null, + "access_key", + "secret_key" + ); + var requestTaskSettingsMap = getChatCompletionTaskSettingsMap(null, null, null, null); + var overriddenModel = AmazonBedrockChatCompletionModel.of(model, requestTaskSettingsMap); + + assertThat(overriddenModel, sameInstance(overriddenModel)); + } + + public void testOverrideWith_temperature() { + var model = createModel( + "id", + "region", + "model", + AmazonBedrockProvider.AMAZONTITAN, + 1.0, + null, + null, + null, + null, + "access_key", + "secret_key" + ); + var requestTaskSettings = getChatCompletionTaskSettingsMap(0.5, null, null, null); + var overriddenModel = AmazonBedrockChatCompletionModel.of(model, requestTaskSettings); + assertThat( + overriddenModel, + is( + createModel( + "id", + "region", + "model", + AmazonBedrockProvider.AMAZONTITAN, + 0.5, + null, + null, + null, + null, + "access_key", + "secret_key" + ) + ) + ); + } + + public void testOverrideWith_topP() { + var model = createModel( + "id", + "region", + "model", + AmazonBedrockProvider.AMAZONTITAN, + null, + 0.8, + null, + null, + null, + "access_key", + "secret_key" + ); + var requestTaskSettings = getChatCompletionTaskSettingsMap(null, 0.5, null, null); + var overriddenModel = AmazonBedrockChatCompletionModel.of(model, requestTaskSettings); + assertThat( + overriddenModel, + is( + createModel( + "id", + "region", + "model", + AmazonBedrockProvider.AMAZONTITAN, + null, + 0.5, + null, + null, + null, + "access_key", + "secret_key" + ) + ) + ); + } + + public void testOverrideWith_topK() { + var model = createModel( + "id", + "region", + "model", + AmazonBedrockProvider.AMAZONTITAN, + null, + null, + 1.0, + null, + null, + "access_key", + "secret_key" + ); + var requestTaskSettings = getChatCompletionTaskSettingsMap(null, null, 0.8, null); + var overriddenModel = AmazonBedrockChatCompletionModel.of(model, requestTaskSettings); + assertThat( + overriddenModel, + is( + createModel( + "id", + "region", + "model", + AmazonBedrockProvider.AMAZONTITAN, + null, + null, + 0.8, + null, + null, + "access_key", + "secret_key" + ) + ) + ); + } + + public void testOverrideWith_maxNewTokens() { + var model = createModel( + "id", + "region", + "model", + AmazonBedrockProvider.AMAZONTITAN, + null, + null, + null, + 512, + null, + "access_key", + "secret_key" + ); + var requestTaskSettings = getChatCompletionTaskSettingsMap(null, null, null, 128); + var overriddenModel = AmazonBedrockChatCompletionModel.of(model, requestTaskSettings); + assertThat( + overriddenModel, + is( + createModel( + "id", + "region", + "model", + AmazonBedrockProvider.AMAZONTITAN, + null, + null, + null, + 128, + null, + "access_key", + "secret_key" + ) + ) + ); + } + + public static AmazonBedrockChatCompletionModel createModel( + String id, + String region, + String model, + AmazonBedrockProvider provider, + String accessKey, + String secretKey + ) { + return createModel(id, region, model, provider, null, null, null, null, null, accessKey, secretKey); + } + + public static AmazonBedrockChatCompletionModel createModel( + String id, + String region, + String model, + AmazonBedrockProvider provider, + @Nullable Double temperature, + @Nullable Double topP, + @Nullable Double topK, + @Nullable Integer maxNewTokens, + @Nullable RateLimitSettings rateLimitSettings, + String accessKey, + String secretKey + ) { + return new AmazonBedrockChatCompletionModel( + id, + TaskType.COMPLETION, + "amazonbedrock", + new AmazonBedrockChatCompletionServiceSettings(region, model, provider, rateLimitSettings), + new AmazonBedrockChatCompletionTaskSettings(temperature, topP, topK, maxNewTokens), + new AmazonBedrockSecretSettings(new SecureString(accessKey), new SecureString(secretKey)) + ); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionRequestTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionRequestTaskSettingsTests.java new file mode 100644 index 0000000000000..681088c786b6b --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionRequestTaskSettingsTests.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.amazonbedrock.completion; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.MatcherAssert; + +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.MAX_NEW_TOKENS_FIELD; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.TEMPERATURE_FIELD; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.TOP_K_FIELD; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.TOP_P_FIELD; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; + +public class AmazonBedrockChatCompletionRequestTaskSettingsTests extends ESTestCase { + public void testFromMap_ReturnsEmptySettings_WhenTheMapIsEmpty() { + var settings = AmazonBedrockChatCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of())); + assertThat(settings, is(AmazonBedrockChatCompletionRequestTaskSettings.EMPTY_SETTINGS)); + } + + public void testFromMap_ReturnsEmptySettings_WhenTheMapDoesNotContainTheFields() { + var settings = AmazonBedrockChatCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of("key", "model"))); + assertThat(settings, is(AmazonBedrockChatCompletionRequestTaskSettings.EMPTY_SETTINGS)); + } + + public void testFromMap_ReturnsTemperature() { + var settings = AmazonBedrockChatCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of(TEMPERATURE_FIELD, 0.1))); + assertThat(settings.temperature(), is(0.1)); + } + + public void testFromMap_ReturnsTopP() { + var settings = AmazonBedrockChatCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of(TOP_P_FIELD, 0.1))); + assertThat(settings.topP(), is(0.1)); + } + + public void testFromMap_ReturnsDoSample() { + var settings = AmazonBedrockChatCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of(TOP_K_FIELD, 0.3))); + assertThat(settings.topK(), is(0.3)); + } + + public void testFromMap_ReturnsMaxNewTokens() { + var settings = AmazonBedrockChatCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of(MAX_NEW_TOKENS_FIELD, 512))); + assertThat(settings.maxNewTokens(), is(512)); + } + + public void testFromMap_TemperatureIsInvalidValue_ThrowsValidationException() { + var thrownException = expectThrows( + ValidationException.class, + () -> AmazonBedrockChatCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of(TEMPERATURE_FIELD, "invalid"))) + ); + + MatcherAssert.assertThat( + thrownException.getMessage(), + containsString( + Strings.format("field [temperature] is not of the expected type. The value [invalid] cannot be converted to a [Double]") + ) + ); + } + + public void testFromMap_TopPIsInvalidValue_ThrowsValidationException() { + var thrownException = expectThrows( + ValidationException.class, + () -> AmazonBedrockChatCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of(TOP_P_FIELD, "invalid"))) + ); + + MatcherAssert.assertThat( + thrownException.getMessage(), + containsString( + Strings.format("field [top_p] is not of the expected type. The value [invalid] cannot be converted to a [Double]") + ) + ); + } + + public void testFromMap_TopKIsInvalidValue_ThrowsValidationException() { + var thrownException = expectThrows( + ValidationException.class, + () -> AmazonBedrockChatCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of(TOP_K_FIELD, "invalid"))) + ); + + MatcherAssert.assertThat( + thrownException.getMessage(), + containsString("field [top_k] is not of the expected type. The value [invalid] cannot be converted to a [Double]") + ); + } + + public void testFromMap_MaxTokensIsInvalidValue_ThrowsStatusException() { + var thrownException = expectThrows( + ValidationException.class, + () -> AmazonBedrockChatCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of(MAX_NEW_TOKENS_FIELD, "invalid"))) + ); + + MatcherAssert.assertThat( + thrownException.getMessage(), + containsString("field [max_new_tokens] is not of the expected type. The value [invalid] cannot be converted to a [Integer]") + ); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionServiceSettingsTests.java new file mode 100644 index 0000000000000..90868530d8df8 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionServiceSettingsTests.java @@ -0,0 +1,131 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.amazonbedrock.completion; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockProvider; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettingsTests; +import org.hamcrest.CoreMatchers; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.MODEL_FIELD; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.PROVIDER_FIELD; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.REGION_FIELD; +import static org.hamcrest.Matchers.is; + +public class AmazonBedrockChatCompletionServiceSettingsTests extends AbstractBWCWireSerializationTestCase< + AmazonBedrockChatCompletionServiceSettings> { + + public void testFromMap_Request_CreatesSettingsCorrectly() { + var region = "region"; + var model = "model-id"; + var provider = "amazontitan"; + var serviceSettings = AmazonBedrockChatCompletionServiceSettings.fromMap( + createChatCompletionRequestSettingsMap(region, model, provider), + ConfigurationParseContext.REQUEST + ); + + assertThat( + serviceSettings, + is(new AmazonBedrockChatCompletionServiceSettings(region, model, AmazonBedrockProvider.AMAZONTITAN, null)) + ); + } + + public void testFromMap_RequestWithRateLimit_CreatesSettingsCorrectly() { + var region = "region"; + var model = "model-id"; + var provider = "amazontitan"; + var settingsMap = createChatCompletionRequestSettingsMap(region, model, provider); + settingsMap.put(RateLimitSettings.FIELD_NAME, new HashMap<>(Map.of(RateLimitSettings.REQUESTS_PER_MINUTE_FIELD, 3))); + + var serviceSettings = AmazonBedrockChatCompletionServiceSettings.fromMap(settingsMap, ConfigurationParseContext.REQUEST); + + assertThat( + serviceSettings, + is(new AmazonBedrockChatCompletionServiceSettings(region, model, AmazonBedrockProvider.AMAZONTITAN, new RateLimitSettings(3))) + ); + } + + public void testFromMap_Persistent_CreatesSettingsCorrectly() { + var region = "region"; + var model = "model-id"; + var provider = "amazontitan"; + var settingsMap = createChatCompletionRequestSettingsMap(region, model, provider); + var serviceSettings = AmazonBedrockChatCompletionServiceSettings.fromMap(settingsMap, ConfigurationParseContext.PERSISTENT); + + assertThat( + serviceSettings, + is(new AmazonBedrockChatCompletionServiceSettings(region, model, AmazonBedrockProvider.AMAZONTITAN, null)) + ); + } + + public void testToXContent_WritesAllValues() throws IOException { + var entity = new AmazonBedrockChatCompletionServiceSettings( + "testregion", + "testmodel", + AmazonBedrockProvider.AMAZONTITAN, + new RateLimitSettings(3) + ); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, CoreMatchers.is(""" + {"region":"testregion","model":"testmodel","provider":"AMAZONTITAN",""" + """ + "rate_limit":{"requests_per_minute":3}}""")); + } + + public static HashMap createChatCompletionRequestSettingsMap(String region, String model, String provider) { + return new HashMap(Map.of(REGION_FIELD, region, MODEL_FIELD, model, PROVIDER_FIELD, provider)); + } + + @Override + protected AmazonBedrockChatCompletionServiceSettings mutateInstanceForVersion( + AmazonBedrockChatCompletionServiceSettings instance, + TransportVersion version + ) { + return instance; + } + + @Override + protected Writeable.Reader instanceReader() { + return AmazonBedrockChatCompletionServiceSettings::new; + } + + @Override + protected AmazonBedrockChatCompletionServiceSettings createTestInstance() { + return createRandom(); + } + + @Override + protected AmazonBedrockChatCompletionServiceSettings mutateInstance(AmazonBedrockChatCompletionServiceSettings instance) + throws IOException { + return randomValueOtherThan(instance, AmazonBedrockChatCompletionServiceSettingsTests::createRandom); + } + + private static AmazonBedrockChatCompletionServiceSettings createRandom() { + return new AmazonBedrockChatCompletionServiceSettings( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomFrom(AmazonBedrockProvider.values()), + RateLimitSettingsTests.createRandom() + ); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionTaskSettingsTests.java new file mode 100644 index 0000000000000..0d5440c6d2cf8 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/completion/AmazonBedrockChatCompletionTaskSettingsTests.java @@ -0,0 +1,226 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.amazonbedrock.completion; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; +import org.hamcrest.MatcherAssert; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.MAX_NEW_TOKENS_FIELD; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.TEMPERATURE_FIELD; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.TOP_K_FIELD; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.TOP_P_FIELD; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; + +public class AmazonBedrockChatCompletionTaskSettingsTests extends AbstractBWCWireSerializationTestCase< + AmazonBedrockChatCompletionTaskSettings> { + + public void testFromMap_AllValues() { + var taskMap = getChatCompletionTaskSettingsMap(1.0, 0.5, 0.6, 512); + assertEquals( + new AmazonBedrockChatCompletionTaskSettings(1.0, 0.5, 0.6, 512), + AmazonBedrockChatCompletionTaskSettings.fromMap(taskMap) + ); + } + + public void testFromMap_TemperatureIsInvalidValue_ThrowsValidationException() { + var taskMap = getChatCompletionTaskSettingsMap(1.0, 0.5, 0.6, 512); + taskMap.put(TEMPERATURE_FIELD, "invalid"); + + var thrownException = expectThrows(ValidationException.class, () -> AmazonBedrockChatCompletionTaskSettings.fromMap(taskMap)); + + MatcherAssert.assertThat( + thrownException.getMessage(), + containsString( + Strings.format("field [temperature] is not of the expected type. The value [invalid] cannot be converted to a [Double]") + ) + ); + } + + public void testFromMap_TopPIsInvalidValue_ThrowsValidationException() { + var taskMap = getChatCompletionTaskSettingsMap(1.0, 0.5, 0.6, 512); + taskMap.put(TOP_P_FIELD, "invalid"); + + var thrownException = expectThrows(ValidationException.class, () -> AmazonBedrockChatCompletionTaskSettings.fromMap(taskMap)); + + MatcherAssert.assertThat( + thrownException.getMessage(), + containsString( + Strings.format("field [top_p] is not of the expected type. The value [invalid] cannot be converted to a [Double]") + ) + ); + } + + public void testFromMap_TopKIsInvalidValue_ThrowsValidationException() { + var taskMap = getChatCompletionTaskSettingsMap(1.0, 0.5, 0.6, 512); + taskMap.put(TOP_K_FIELD, "invalid"); + + var thrownException = expectThrows(ValidationException.class, () -> AmazonBedrockChatCompletionTaskSettings.fromMap(taskMap)); + + MatcherAssert.assertThat( + thrownException.getMessage(), + containsString("field [top_k] is not of the expected type. The value [invalid] cannot be converted to a [Double]") + ); + } + + public void testFromMap_MaxNewTokensIsInvalidValue_ThrowsValidationException() { + var taskMap = getChatCompletionTaskSettingsMap(1.0, 0.5, 0.6, 512); + taskMap.put(MAX_NEW_TOKENS_FIELD, "invalid"); + + var thrownException = expectThrows(ValidationException.class, () -> AmazonBedrockChatCompletionTaskSettings.fromMap(taskMap)); + + MatcherAssert.assertThat( + thrownException.getMessage(), + containsString( + Strings.format("field [max_new_tokens] is not of the expected type. The value [invalid] cannot be converted to a [Integer]") + ) + ); + } + + public void testFromMap_WithNoValues_DoesNotThrowException() { + var taskMap = AmazonBedrockChatCompletionTaskSettings.fromMap(new HashMap(Map.of())); + assertNull(taskMap.temperature()); + assertNull(taskMap.topP()); + assertNull(taskMap.topK()); + assertNull(taskMap.maxNewTokens()); + } + + public void testOverrideWith_KeepsOriginalValuesWithOverridesAreNull() { + var settings = AmazonBedrockChatCompletionTaskSettings.fromMap(getChatCompletionTaskSettingsMap(1.0, 0.5, 0.6, 512)); + var overrideSettings = AmazonBedrockChatCompletionTaskSettings.of(settings, AmazonBedrockChatCompletionTaskSettings.EMPTY_SETTINGS); + MatcherAssert.assertThat(overrideSettings, is(settings)); + } + + public void testOverrideWith_UsesTemperatureOverride() { + var settings = AmazonBedrockChatCompletionTaskSettings.fromMap(getChatCompletionTaskSettingsMap(1.0, 0.5, 0.6, 512)); + var overrideSettings = AmazonBedrockChatCompletionRequestTaskSettings.fromMap( + getChatCompletionTaskSettingsMap(0.3, null, null, null) + ); + var overriddenTaskSettings = AmazonBedrockChatCompletionTaskSettings.of(settings, overrideSettings); + MatcherAssert.assertThat(overriddenTaskSettings, is(new AmazonBedrockChatCompletionTaskSettings(0.3, 0.5, 0.6, 512))); + } + + public void testOverrideWith_UsesTopPOverride() { + var settings = AmazonBedrockChatCompletionTaskSettings.fromMap(getChatCompletionTaskSettingsMap(1.0, 0.5, 0.6, 512)); + var overrideSettings = AmazonBedrockChatCompletionRequestTaskSettings.fromMap( + getChatCompletionTaskSettingsMap(null, 0.2, null, null) + ); + var overriddenTaskSettings = AmazonBedrockChatCompletionTaskSettings.of(settings, overrideSettings); + MatcherAssert.assertThat(overriddenTaskSettings, is(new AmazonBedrockChatCompletionTaskSettings(1.0, 0.2, 0.6, 512))); + } + + public void testOverrideWith_UsesDoSampleOverride() { + var settings = AmazonBedrockChatCompletionTaskSettings.fromMap(getChatCompletionTaskSettingsMap(1.0, 0.5, 0.6, 512)); + var overrideSettings = AmazonBedrockChatCompletionRequestTaskSettings.fromMap( + getChatCompletionTaskSettingsMap(null, null, 0.1, null) + ); + var overriddenTaskSettings = AmazonBedrockChatCompletionTaskSettings.of(settings, overrideSettings); + MatcherAssert.assertThat(overriddenTaskSettings, is(new AmazonBedrockChatCompletionTaskSettings(1.0, 0.5, 0.1, 512))); + } + + public void testOverrideWith_UsesMaxNewTokensOverride() { + var settings = AmazonBedrockChatCompletionTaskSettings.fromMap(getChatCompletionTaskSettingsMap(1.0, 0.5, 0.6, 512)); + var overrideSettings = AmazonBedrockChatCompletionRequestTaskSettings.fromMap( + getChatCompletionTaskSettingsMap(null, null, null, 128) + ); + var overriddenTaskSettings = AmazonBedrockChatCompletionTaskSettings.of(settings, overrideSettings); + MatcherAssert.assertThat(overriddenTaskSettings, is(new AmazonBedrockChatCompletionTaskSettings(1.0, 0.5, 0.6, 128))); + } + + public void testToXContent_WithoutParameters() throws IOException { + var settings = AmazonBedrockChatCompletionTaskSettings.fromMap(getChatCompletionTaskSettingsMap(null, null, null, null)); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + settings.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is("{}")); + } + + public void testToXContent_WithParameters() throws IOException { + var settings = AmazonBedrockChatCompletionTaskSettings.fromMap(getChatCompletionTaskSettingsMap(1.0, 0.5, 0.6, 512)); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + settings.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"temperature":1.0,"top_p":0.5,"top_k":0.6,"max_new_tokens":512}""")); + } + + public static Map getChatCompletionTaskSettingsMap( + @Nullable Double temperature, + @Nullable Double topP, + @Nullable Double topK, + @Nullable Integer maxNewTokens + ) { + var map = new HashMap(); + + if (temperature != null) { + map.put(TEMPERATURE_FIELD, temperature); + } + + if (topP != null) { + map.put(TOP_P_FIELD, topP); + } + + if (topK != null) { + map.put(TOP_K_FIELD, topK); + } + + if (maxNewTokens != null) { + map.put(MAX_NEW_TOKENS_FIELD, maxNewTokens); + } + + return map; + } + + @Override + protected AmazonBedrockChatCompletionTaskSettings mutateInstanceForVersion( + AmazonBedrockChatCompletionTaskSettings instance, + TransportVersion version + ) { + return instance; + } + + @Override + protected Writeable.Reader instanceReader() { + return AmazonBedrockChatCompletionTaskSettings::new; + } + + @Override + protected AmazonBedrockChatCompletionTaskSettings createTestInstance() { + return createRandom(); + } + + @Override + protected AmazonBedrockChatCompletionTaskSettings mutateInstance(AmazonBedrockChatCompletionTaskSettings instance) throws IOException { + return randomValueOtherThan(instance, AmazonBedrockChatCompletionTaskSettingsTests::createRandom); + } + + private static AmazonBedrockChatCompletionTaskSettings createRandom() { + return new AmazonBedrockChatCompletionTaskSettings( + randomFrom(new Double[] { null, randomDouble() }), + randomFrom(new Double[] { null, randomDouble() }), + randomFrom(new Double[] { null, randomDouble() }), + randomFrom(new Integer[] { null, randomNonNegativeInt() }) + ); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsModelTests.java new file mode 100644 index 0000000000000..711e3cbb5a511 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsModelTests.java @@ -0,0 +1,81 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.EmptyTaskSettings; +import org.elasticsearch.inference.SimilarityMeasure; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockProvider; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockSecretSettings; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; + +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; + +public class AmazonBedrockEmbeddingsModelTests extends ESTestCase { + + public void testCreateModel_withTaskSettings_shouldFail() { + var baseModel = createModel("id", "region", "model", AmazonBedrockProvider.AMAZONTITAN, "accesskey", "secretkey"); + var thrownException = assertThrows( + ValidationException.class, + () -> AmazonBedrockEmbeddingsModel.of(baseModel, Map.of("testkey", "testvalue")) + ); + assertThat(thrownException.getMessage(), containsString("Amazon Bedrock embeddings model cannot have task settings")); + } + + // model creation only - no tests to define, but we want to have the public createModel + // method available + + public static AmazonBedrockEmbeddingsModel createModel( + String inferenceId, + String region, + String model, + AmazonBedrockProvider provider, + String accessKey, + String secretKey + ) { + return createModel(inferenceId, region, model, provider, null, false, null, null, new RateLimitSettings(240), accessKey, secretKey); + } + + public static AmazonBedrockEmbeddingsModel createModel( + String inferenceId, + String region, + String model, + AmazonBedrockProvider provider, + @Nullable Integer dimensions, + boolean dimensionsSetByUser, + @Nullable Integer maxTokens, + @Nullable SimilarityMeasure similarity, + RateLimitSettings rateLimitSettings, + String accessKey, + String secretKey + ) { + return new AmazonBedrockEmbeddingsModel( + inferenceId, + TaskType.TEXT_EMBEDDING, + "amazonbedrock", + new AmazonBedrockEmbeddingsServiceSettings( + region, + model, + provider, + dimensions, + dimensionsSetByUser, + maxTokens, + similarity, + rateLimitSettings + ), + new EmptyTaskSettings(), + new AmazonBedrockSecretSettings(new SecureString(accessKey), new SecureString(secretKey)) + ); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsServiceSettingsTests.java new file mode 100644 index 0000000000000..a100b89e1db6e --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsServiceSettingsTests.java @@ -0,0 +1,404 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.SimilarityMeasure; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; +import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.ServiceFields; +import org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockProvider; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettingsTests; +import org.hamcrest.CoreMatchers; +import org.hamcrest.MatcherAssert; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.xpack.inference.services.ServiceFields.DIMENSIONS; +import static org.elasticsearch.xpack.inference.services.ServiceFields.MAX_INPUT_TOKENS; +import static org.elasticsearch.xpack.inference.services.ServiceFields.SIMILARITY; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.MODEL_FIELD; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.PROVIDER_FIELD; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.AmazonBedrockConstants.REGION_FIELD; +import static org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings.AmazonBedrockEmbeddingsServiceSettings.DIMENSIONS_SET_BY_USER; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; + +public class AmazonBedrockEmbeddingsServiceSettingsTests extends AbstractBWCWireSerializationTestCase< + AmazonBedrockEmbeddingsServiceSettings> { + + public void testFromMap_Request_CreatesSettingsCorrectly() { + var region = "region"; + var model = "model-id"; + var provider = "amazontitan"; + var maxInputTokens = 512; + var serviceSettings = AmazonBedrockEmbeddingsServiceSettings.fromMap( + createEmbeddingsRequestSettingsMap(region, model, provider, null, null, maxInputTokens, SimilarityMeasure.COSINE), + ConfigurationParseContext.REQUEST + ); + + assertThat( + serviceSettings, + is( + new AmazonBedrockEmbeddingsServiceSettings( + region, + model, + AmazonBedrockProvider.AMAZONTITAN, + null, + false, + maxInputTokens, + SimilarityMeasure.COSINE, + null + ) + ) + ); + } + + public void testFromMap_RequestWithRateLimit_CreatesSettingsCorrectly() { + var region = "region"; + var model = "model-id"; + var provider = "amazontitan"; + var maxInputTokens = 512; + var settingsMap = createEmbeddingsRequestSettingsMap(region, model, provider, null, null, maxInputTokens, SimilarityMeasure.COSINE); + settingsMap.put(RateLimitSettings.FIELD_NAME, new HashMap<>(Map.of(RateLimitSettings.REQUESTS_PER_MINUTE_FIELD, 3))); + + var serviceSettings = AmazonBedrockEmbeddingsServiceSettings.fromMap(settingsMap, ConfigurationParseContext.REQUEST); + + assertThat( + serviceSettings, + is( + new AmazonBedrockEmbeddingsServiceSettings( + region, + model, + AmazonBedrockProvider.AMAZONTITAN, + null, + false, + maxInputTokens, + SimilarityMeasure.COSINE, + new RateLimitSettings(3) + ) + ) + ); + } + + public void testFromMap_Request_DimensionsSetByUser_IsFalse_WhenDimensionsAreNotPresent() { + var region = "region"; + var model = "model-id"; + var provider = "amazontitan"; + var maxInputTokens = 512; + var settingsMap = createEmbeddingsRequestSettingsMap(region, model, provider, null, null, maxInputTokens, SimilarityMeasure.COSINE); + var serviceSettings = AmazonBedrockEmbeddingsServiceSettings.fromMap(settingsMap, ConfigurationParseContext.REQUEST); + + assertThat( + serviceSettings, + is( + new AmazonBedrockEmbeddingsServiceSettings( + region, + model, + AmazonBedrockProvider.AMAZONTITAN, + null, + false, + maxInputTokens, + SimilarityMeasure.COSINE, + null + ) + ) + ); + } + + public void testFromMap_Request_DimensionsSetByUser_ShouldThrowWhenPresent() { + var region = "region"; + var model = "model-id"; + var provider = "amazontitan"; + var maxInputTokens = 512; + + var settingsMap = createEmbeddingsRequestSettingsMap(region, model, provider, null, true, maxInputTokens, SimilarityMeasure.COSINE); + + var thrownException = expectThrows( + ValidationException.class, + () -> AmazonBedrockEmbeddingsServiceSettings.fromMap(settingsMap, ConfigurationParseContext.REQUEST) + ); + + MatcherAssert.assertThat( + thrownException.getMessage(), + containsString( + Strings.format("Validation Failed: 1: [service_settings] does not allow the setting [%s];", DIMENSIONS_SET_BY_USER) + ) + ); + } + + public void testFromMap_Request_Dimensions_ShouldThrowWhenPresent() { + var region = "region"; + var model = "model-id"; + var provider = "amazontitan"; + var dims = 128; + + var settingsMap = createEmbeddingsRequestSettingsMap(region, model, provider, dims, null, null, null); + + var thrownException = expectThrows( + ValidationException.class, + () -> AmazonBedrockEmbeddingsServiceSettings.fromMap(settingsMap, ConfigurationParseContext.REQUEST) + ); + + MatcherAssert.assertThat( + thrownException.getMessage(), + containsString(Strings.format("[service_settings] does not allow the setting [%s]", DIMENSIONS)) + ); + } + + public void testFromMap_Request_MaxTokensShouldBePositiveInteger() { + var region = "region"; + var model = "model-id"; + var provider = "amazontitan"; + var maxInputTokens = -128; + + var settingsMap = createEmbeddingsRequestSettingsMap(region, model, provider, null, null, maxInputTokens, null); + + var thrownException = expectThrows( + ValidationException.class, + () -> AmazonBedrockEmbeddingsServiceSettings.fromMap(settingsMap, ConfigurationParseContext.REQUEST) + ); + + MatcherAssert.assertThat( + thrownException.getMessage(), + containsString(Strings.format("[%s] must be a positive integer", MAX_INPUT_TOKENS)) + ); + } + + public void testFromMap_Persistent_CreatesSettingsCorrectly() { + var region = "region"; + var model = "model-id"; + var provider = "amazontitan"; + var dims = 1536; + var maxInputTokens = 512; + + var settingsMap = createEmbeddingsRequestSettingsMap( + region, + model, + provider, + dims, + false, + maxInputTokens, + SimilarityMeasure.COSINE + ); + var serviceSettings = AmazonBedrockEmbeddingsServiceSettings.fromMap(settingsMap, ConfigurationParseContext.PERSISTENT); + + assertThat( + serviceSettings, + is( + new AmazonBedrockEmbeddingsServiceSettings( + region, + model, + AmazonBedrockProvider.AMAZONTITAN, + dims, + false, + maxInputTokens, + SimilarityMeasure.COSINE, + null + ) + ) + ); + } + + public void testFromMap_PersistentContext_DoesNotThrowException_WhenDimensionsIsNull() { + var region = "region"; + var model = "model-id"; + var provider = "amazontitan"; + + var settingsMap = createEmbeddingsRequestSettingsMap(region, model, provider, null, true, null, null); + var serviceSettings = AmazonBedrockEmbeddingsServiceSettings.fromMap(settingsMap, ConfigurationParseContext.PERSISTENT); + + assertThat( + serviceSettings, + is(new AmazonBedrockEmbeddingsServiceSettings(region, model, AmazonBedrockProvider.AMAZONTITAN, null, true, null, null, null)) + ); + } + + public void testFromMap_PersistentContext_DoesNotThrowException_WhenSimilarityIsPresent() { + var region = "region"; + var model = "model-id"; + var provider = "amazontitan"; + + var settingsMap = createEmbeddingsRequestSettingsMap(region, model, provider, null, true, null, SimilarityMeasure.DOT_PRODUCT); + var serviceSettings = AmazonBedrockEmbeddingsServiceSettings.fromMap(settingsMap, ConfigurationParseContext.PERSISTENT); + + assertThat( + serviceSettings, + is( + new AmazonBedrockEmbeddingsServiceSettings( + region, + model, + AmazonBedrockProvider.AMAZONTITAN, + null, + true, + null, + SimilarityMeasure.DOT_PRODUCT, + null + ) + ) + ); + } + + public void testFromMap_PersistentContext_ThrowsException_WhenDimensionsSetByUserIsNull() { + var region = "region"; + var model = "model-id"; + var provider = "amazontitan"; + + var settingsMap = createEmbeddingsRequestSettingsMap(region, model, provider, 1, null, null, null); + + var exception = expectThrows( + ValidationException.class, + () -> AmazonBedrockEmbeddingsServiceSettings.fromMap(settingsMap, ConfigurationParseContext.PERSISTENT) + ); + + assertThat( + exception.getMessage(), + containsString("Validation Failed: 1: [service_settings] does not contain the required setting [dimensions_set_by_user];") + ); + } + + public void testToXContent_WritesDimensionsSetByUserTrue() throws IOException { + var entity = new AmazonBedrockEmbeddingsServiceSettings( + "testregion", + "testmodel", + AmazonBedrockProvider.AMAZONTITAN, + null, + true, + null, + null, + new RateLimitSettings(2) + ); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, CoreMatchers.is(""" + {"region":"testregion","model":"testmodel","provider":"AMAZONTITAN",""" + """ + "rate_limit":{"requests_per_minute":2},"dimensions_set_by_user":true}""")); + } + + public void testToXContent_WritesAllValues() throws IOException { + var entity = new AmazonBedrockEmbeddingsServiceSettings( + "testregion", + "testmodel", + AmazonBedrockProvider.AMAZONTITAN, + 1024, + false, + 512, + null, + new RateLimitSettings(3) + ); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, CoreMatchers.is(""" + {"region":"testregion","model":"testmodel","provider":"AMAZONTITAN",""" + """ + "rate_limit":{"requests_per_minute":3},"dimensions":1024,"max_input_tokens":512,"dimensions_set_by_user":false}""")); + } + + public void testToFilteredXContent_WritesAllValues_ExceptDimensionsSetByUser() throws IOException { + var entity = new AmazonBedrockEmbeddingsServiceSettings( + "testregion", + "testmodel", + AmazonBedrockProvider.AMAZONTITAN, + 1024, + false, + 512, + null, + new RateLimitSettings(3) + ); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + var filteredXContent = entity.getFilteredXContentObject(); + filteredXContent.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, CoreMatchers.is(""" + {"region":"testregion","model":"testmodel","provider":"AMAZONTITAN",""" + """ + "rate_limit":{"requests_per_minute":3},"dimensions":1024,"max_input_tokens":512}""")); + } + + public static HashMap createEmbeddingsRequestSettingsMap( + String region, + String model, + String provider, + @Nullable Integer dimensions, + @Nullable Boolean dimensionsSetByUser, + @Nullable Integer maxTokens, + @Nullable SimilarityMeasure similarityMeasure + ) { + var map = new HashMap(Map.of(REGION_FIELD, region, MODEL_FIELD, model, PROVIDER_FIELD, provider)); + + if (dimensions != null) { + map.put(ServiceFields.DIMENSIONS, dimensions); + } + + if (dimensionsSetByUser != null) { + map.put(DIMENSIONS_SET_BY_USER, dimensionsSetByUser.equals(Boolean.TRUE)); + } + + if (maxTokens != null) { + map.put(ServiceFields.MAX_INPUT_TOKENS, maxTokens); + } + + if (similarityMeasure != null) { + map.put(SIMILARITY, similarityMeasure.toString()); + } + + return map; + } + + @Override + protected AmazonBedrockEmbeddingsServiceSettings mutateInstanceForVersion( + AmazonBedrockEmbeddingsServiceSettings instance, + TransportVersion version + ) { + return instance; + } + + @Override + protected Writeable.Reader instanceReader() { + return AmazonBedrockEmbeddingsServiceSettings::new; + } + + @Override + protected AmazonBedrockEmbeddingsServiceSettings createTestInstance() { + return createRandom(); + } + + @Override + protected AmazonBedrockEmbeddingsServiceSettings mutateInstance(AmazonBedrockEmbeddingsServiceSettings instance) throws IOException { + return randomValueOtherThan(instance, AmazonBedrockEmbeddingsServiceSettingsTests::createRandom); + } + + private static AmazonBedrockEmbeddingsServiceSettings createRandom() { + return new AmazonBedrockEmbeddingsServiceSettings( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomFrom(AmazonBedrockProvider.values()), + randomFrom(new Integer[] { null, randomNonNegativeInt() }), + randomBoolean(), + randomFrom(new Integer[] { null, randomNonNegativeInt() }), + randomFrom(new SimilarityMeasure[] { null, randomFrom(SimilarityMeasure.values()) }), + RateLimitSettingsTests.createRandom() + ); + } +} From e427f5894ca7f12f790a1971123ca74db7cfefe8 Mon Sep 17 00:00:00 2001 From: Fang Xing <155562079+fang-xing-esql@users.noreply.github.com> Date: Fri, 5 Jul 2024 14:49:49 -0400 Subject: [PATCH 208/216] [ES|QL] validate mv_sort order (#110021) * validate mv_sort order --- docs/changelog/110021.yaml | 6 +++ .../function/scalar/multivalue/MvSort.java | 54 +++++++++++++++++-- .../scalar/multivalue/MvSortTests.java | 18 +++++++ .../optimizer/LogicalPlanOptimizerTests.java | 43 +++++++++++++++ 4 files changed, 116 insertions(+), 5 deletions(-) create mode 100644 docs/changelog/110021.yaml diff --git a/docs/changelog/110021.yaml b/docs/changelog/110021.yaml new file mode 100644 index 0000000000000..51878b960dfd0 --- /dev/null +++ b/docs/changelog/110021.yaml @@ -0,0 +1,6 @@ +pr: 110021 +summary: "[ES|QL] validate `mv_sort` order" +area: ES|QL +type: bug +issues: + - 109910 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java index 444c0e319fc6a..199dc49b46097 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSort.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; @@ -29,6 +30,7 @@ import org.elasticsearch.compute.operator.mvdedupe.MultivalueDedupeInt; import org.elasticsearch.compute.operator.mvdedupe.MultivalueDedupeLong; import org.elasticsearch.xpack.esql.capabilities.Validatable; +import org.elasticsearch.xpack.esql.core.common.Failure; import org.elasticsearch.xpack.esql.core.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; @@ -64,6 +66,9 @@ public class MvSort extends EsqlScalarFunction implements OptionalArgument, Vali private final Expression field, order; private static final Literal ASC = new Literal(Source.EMPTY, "ASC", DataType.KEYWORD); + private static final Literal DESC = new Literal(Source.EMPTY, "DESC", DataType.KEYWORD); + + private static final String INVALID_ORDER_ERROR = "Invalid order value in [{}], expected one of [{}, {}] but got [{}]"; @FunctionInfo( returnType = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }, @@ -84,7 +89,7 @@ public MvSort( optional = true ) Expression order ) { - super(source, order == null ? Arrays.asList(field, ASC) : Arrays.asList(field, order)); + super(source, order == null ? Arrays.asList(field) : Arrays.asList(field, order)); this.field = field; this.order = order; } @@ -128,6 +133,7 @@ protected TypeResolution resolveType() { if (resolution.unresolved()) { return resolution; } + if (order == null) { return resolution; } @@ -144,10 +150,23 @@ public boolean foldable() { public EvalOperator.ExpressionEvaluator.Factory toEvaluator( Function toEvaluator ) { - Expression nonNullOrder = order == null ? ASC : order; - boolean ordering = nonNullOrder.foldable() && ((BytesRef) nonNullOrder.fold()).utf8ToString().equalsIgnoreCase("DESC") - ? false - : true; + boolean ordering = true; + if (isValidOrder() == false) { + throw new IllegalArgumentException( + LoggerMessageFormat.format( + null, + INVALID_ORDER_ERROR, + sourceText(), + ASC.value(), + DESC.value(), + ((BytesRef) order.fold()).utf8ToString() + ) + ); + } + if (order != null && order.foldable()) { + ordering = ((BytesRef) order.fold()).utf8ToString().equalsIgnoreCase((String) ASC.value()); + } + return switch (PlannerUtils.toElementType(field.dataType())) { case BOOLEAN -> new MvSort.EvaluatorFactory( toEvaluator.apply(field), @@ -216,8 +235,33 @@ public DataType dataType() { @Override public void validate(Failures failures) { + if (order == null) { + return; + } String operation = sourceText(); failures.add(isFoldable(order, operation, SECOND)); + if (isValidOrder() == false) { + failures.add( + Failure.fail(order, INVALID_ORDER_ERROR, sourceText(), ASC.value(), DESC.value(), ((BytesRef) order.fold()).utf8ToString()) + ); + } + } + + private boolean isValidOrder() { + boolean isValidOrder = true; + if (order != null && order.foldable()) { + Object obj = order.fold(); + String o = null; + if (obj instanceof BytesRef ob) { + o = ob.utf8ToString(); + } else if (obj instanceof String os) { + o = os; + } + if (o == null || o.equalsIgnoreCase((String) ASC.value()) == false && o.equalsIgnoreCase((String) DESC.value()) == false) { + isValidOrder = false; + } + } + return isValidOrder; } private record EvaluatorFactory( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java index a085c0acfa25d..15c81557961f1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java @@ -12,7 +12,9 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; @@ -183,6 +185,22 @@ private static void bytesRefs(List suppliers) { })); } + public void testInvalidOrder() { + String invalidOrder = randomAlphaOfLength(10); + DriverContext driverContext = driverContext(); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> evaluator( + new MvSort( + Source.EMPTY, + field("str", DataType.DATETIME), + new Literal(Source.EMPTY, new BytesRef(invalidOrder), DataType.KEYWORD) + ) + ).get(driverContext) + ); + assertThat(e.getMessage(), equalTo("Invalid order value in [], expected one of [ASC, DESC] but got [" + invalidOrder + "]")); + } + @Override public void testSimpleWithNulls() { assumeFalse("test case is invalid", false); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index ee987f7a5a48a..7ace781652419 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -5477,6 +5477,49 @@ METRICS k8s avg(round(1.05 * rate(network.total_bytes_in))) BY bucket(@timestamp assertThat(Expressions.attribute(values.field()).name(), equalTo("cluster")); } + public void testMvSortInvalidOrder() { + VerificationException e = expectThrows(VerificationException.class, () -> plan(""" + from test + | EVAL sd = mv_sort(salary, "ABC") + """)); + assertTrue(e.getMessage().startsWith("Found ")); + final String header = "Found 1 problem\nline "; + assertEquals( + "2:29: Invalid order value in [mv_sort(salary, \"ABC\")], expected one of [ASC, DESC] but got [ABC]", + e.getMessage().substring(header.length()) + ); + + e = expectThrows(VerificationException.class, () -> plan(""" + from test + | EVAL order = "ABC", sd = mv_sort(salary, order) + """)); + assertTrue(e.getMessage().startsWith("Found ")); + assertEquals( + "2:16: Invalid order value in [mv_sort(salary, order)], expected one of [ASC, DESC] but got [ABC]", + e.getMessage().substring(header.length()) + ); + + e = expectThrows(VerificationException.class, () -> plan(""" + from test + | EVAL order = concat("d", "sc"), sd = mv_sort(salary, order) + """)); + assertTrue(e.getMessage().startsWith("Found ")); + assertEquals( + "2:16: Invalid order value in [mv_sort(salary, order)], expected one of [ASC, DESC] but got [dsc]", + e.getMessage().substring(header.length()) + ); + + IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> plan(""" + row v = [1, 2, 3] | EVAL sd = mv_sort(v, "dsc") + """)); + assertEquals("Invalid order value in [mv_sort(v, \"dsc\")], expected one of [ASC, DESC] but got [dsc]", iae.getMessage()); + + iae = expectThrows(IllegalArgumentException.class, () -> plan(""" + row v = [1, 2, 3], o = concat("d", "sc") | EVAL sd = mv_sort(v, o) + """)); + assertEquals("Invalid order value in [mv_sort(v, o)], expected one of [ASC, DESC] but got [dsc]", iae.getMessage()); + } + private Literal nullOf(DataType dataType) { return new Literal(Source.EMPTY, null, dataType); } From 5e096578bd3afe8a23794a9bb6ef1b6ac07fb07f Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 5 Jul 2024 21:28:29 +0200 Subject: [PATCH 209/216] Speedup ES87BloomFilterPostingsFormat.writeBloomFilters (#110289) Aquiring a buffer is rather expensive and we use a buffer of constant size throughout, lets leverage this fact and save contention on the allocator. Also, we can hoist the filter size calculation out of the loop and do the write to the index output without without grabbing the file pointer or allocating any bytes in msot cases. --- .../ES87BloomFilterPostingsFormat.java | 70 ++++++------------- 1 file changed, 23 insertions(+), 47 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES87BloomFilterPostingsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES87BloomFilterPostingsFormat.java index 191fe8f75b2f0..01d874adec14d 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES87BloomFilterPostingsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES87BloomFilterPostingsFormat.java @@ -128,7 +128,6 @@ final class FieldsWriter extends FieldsConsumer { private final List fieldsGroups = new ArrayList<>(); private final List toCloses = new ArrayList<>(); private boolean closed; - private final int[] hashes = new int[NUM_HASH_FUNCTIONS]; FieldsWriter(SegmentWriteState state) throws IOException { this.state = state; @@ -180,23 +179,24 @@ public Iterator iterator() { } private void writeBloomFilters(Fields fields) throws IOException { - for (String field : fields) { - final Terms terms = fields.terms(field); - if (terms == null) { - continue; - } - final int bloomFilterSize = bloomFilterSize(state.segmentInfo.maxDoc()); - final int numBytes = numBytesForBloomFilter(bloomFilterSize); - try (ByteArray buffer = bigArrays.newByteArray(numBytes)) { + final int bloomFilterSize = bloomFilterSize(state.segmentInfo.maxDoc()); + final int numBytes = numBytesForBloomFilter(bloomFilterSize); + final int[] hashes = new int[NUM_HASH_FUNCTIONS]; + try (ByteArray buffer = bigArrays.newByteArray(numBytes, false)) { + long written = indexOut.getFilePointer(); + for (String field : fields) { + final Terms terms = fields.terms(field); + if (terms == null) { + continue; + } + buffer.fill(0, numBytes, (byte) 0); final TermsEnum termsEnum = terms.iterator(); while (true) { final BytesRef term = termsEnum.next(); if (term == null) { break; } - - hashTerm(term, hashes); - for (int hash : hashes) { + for (int hash : hashTerm(term, hashes)) { hash = hash % bloomFilterSize; final int pos = hash >> 3; final int mask = 1 << (hash & 7); @@ -204,9 +204,13 @@ private void writeBloomFilters(Fields fields) throws IOException { buffer.set(pos, val); } } - bloomFilters.add(new BloomFilter(field, indexOut.getFilePointer(), bloomFilterSize)); - final BytesReference bytes = BytesReference.fromByteArray(buffer, numBytes); - bytes.writeTo(new IndexOutputOutputStream(indexOut)); + bloomFilters.add(new BloomFilter(field, written, bloomFilterSize)); + if (buffer.hasArray()) { + indexOut.writeBytes(buffer.array(), 0, numBytes); + } else { + BytesReference.fromByteArray(buffer, numBytes).writeTo(new IndexOutputOutputStream(indexOut)); + } + written += numBytes; } } } @@ -636,35 +640,10 @@ private MurmurHash3() {} * @param length The length of array * @return The sum of the two 64-bit hashes that make up the hash128 */ - public static long hash64(final byte[] data, final int offset, final int length) { - // We hope that the C2 escape analysis prevents ths allocation from creating GC pressure. - long[] hash128 = { 0, 0 }; - hash128x64Internal(data, offset, length, DEFAULT_SEED, hash128); - return hash128[0]; - } - - /** - * Generates 128-bit hash from the byte array with the given offset, length and seed. - * - *

    This is an implementation of the 128-bit hash function {@code MurmurHash3_x64_128} - * from Austin Appleby's original MurmurHash3 {@code c++} code in SMHasher.

    - * - * @param data The input byte array - * @param offset The first element of array - * @param length The length of array - * @param seed The initial seed value - * @return The 128-bit hash (2 longs) - */ @SuppressWarnings("fallthrough") - private static long[] hash128x64Internal( - final byte[] data, - final int offset, - final int length, - final long seed, - final long[] result - ) { - long h1 = seed; - long h2 = seed; + public static long hash64(final byte[] data, final int offset, final int length) { + long h1 = MurmurHash3.DEFAULT_SEED; + long h2 = MurmurHash3.DEFAULT_SEED; final int nblocks = length >> 4; // body @@ -749,11 +728,8 @@ private static long[] hash128x64Internal( h2 = fmix64(h2); h1 += h2; - h2 += h1; - result[0] = h1; - result[1] = h2; - return result; + return h1; } /** From 27e6b37875bd18b980f5721eb02c238e3a6671eb Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Fri, 5 Jul 2024 15:35:47 -0400 Subject: [PATCH 210/216] [ML] Wait for test to finish (#110542) The tests can kick off tasks on another thread. We should wait for those threads to join back before we begin making assertions. Fix #110536 --- muted-tests.yml | 3 - .../TrainedModelAssignmentNodeService.java | 1 + ...rainedModelAssignmentNodeServiceTests.java | 58 ++++++------------- 3 files changed, 20 insertions(+), 42 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 099a48cd34c58..990b7d5dc5130 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -100,9 +100,6 @@ tests: - class: org.elasticsearch.test.rest.yaml.CcsCommonYamlTestSuiteIT method: test {p0=search.vectors/41_knn_search_half_byte_quantized/Test create, merge, and search cosine} issue: https://github.com/elastic/elasticsearch/issues/109978 -- class: org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentNodeServiceTests - method: testLoadQueuedModelsWhenOneFails - issue: https://github.com/elastic/elasticsearch/issues/110536 # Examples: # diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeService.java index 1ac177be3d594..afd17b803cdcb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeService.java @@ -184,6 +184,7 @@ void stop() { void loadQueuedModels(ActionListener rescheduleImmediately) { if (stopped) { + rescheduleImmediately.onResponse(false); return; } if (latestState != null) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeServiceTests.java index f8f699b86966d..a5bba21d9e778 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeServiceTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; @@ -50,13 +49,12 @@ import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.BiConsumer; +import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.xpack.ml.MachineLearning.UTILITY_THREAD_POOL_NAME; import static org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentClusterServiceTests.shutdownMetadata; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; @@ -122,41 +120,20 @@ private void loadQueuedModels(TrainedModelAssignmentNodeService trainedModelAssi loadQueuedModels(trainedModelAssignmentNodeService, false); } - private void loadQueuedModels(TrainedModelAssignmentNodeService trainedModelAssignmentNodeService, boolean expectedRunImmediately) { - trainedModelAssignmentNodeService.loadQueuedModels(ActionListener.wrap(actualRunImmediately -> { - assertThat( - "We should rerun immediately if there are still model loading tasks to process.", - actualRunImmediately, - equalTo(expectedRunImmediately) - ); - }, e -> fail("We should never call the onFailure method of this listener."))); - } - - private void loadQueuedModels(TrainedModelAssignmentNodeService trainedModelAssignmentNodeService, int times) + private void loadQueuedModels(TrainedModelAssignmentNodeService trainedModelAssignmentNodeService, boolean expectedRunImmediately) throws InterruptedException { - var modelQueueSize = new AtomicInteger(times); - BiConsumer, Boolean> verifyRerunningImmediately = (listener, result) -> { - var runImmediately = modelQueueSize.decrementAndGet() > 0; - assertThat( - "We should rerun immediately if there are still model loading tasks to process. Models remaining: " + modelQueueSize.get(), - result, - is(runImmediately) - ); - listener.onResponse(null); - }; - - var chain = SubscribableListener.newForked( - l -> trainedModelAssignmentNodeService.loadQueuedModels(l.delegateFailure(verifyRerunningImmediately)) - ); - for (int i = 1; i < times; i++) { - chain = chain.andThen( - (l, r) -> trainedModelAssignmentNodeService.loadQueuedModels(l.delegateFailure(verifyRerunningImmediately)) - ); - } - var latch = new CountDownLatch(1); - chain.addListener(ActionListener.running(latch::countDown)); + var actual = new AtomicReference(); // AtomicReference for nullable + trainedModelAssignmentNodeService.loadQueuedModels( + ActionListener.runAfter(ActionListener.wrap(actual::set, e -> {}), latch::countDown) + ); assertTrue("Timed out waiting for loadQueuedModels to finish.", latch.await(10, TimeUnit.SECONDS)); + assertThat("Test failed to call the onResponse handler.", actual.get(), notNullValue()); + assertThat( + "We should rerun immediately if there are still model loading tasks to process.", + actual.get(), + equalTo(expectedRunImmediately) + ); } public void testLoadQueuedModels() throws InterruptedException { @@ -237,7 +214,7 @@ public void testLoadQueuedModelsWhenFailureIsRetried() throws InterruptedExcepti verifyNoMoreInteractions(deploymentManager, trainedModelAssignmentService); } - public void testLoadQueuedModelsWhenStopped() { + public void testLoadQueuedModelsWhenStopped() throws InterruptedException { TrainedModelAssignmentNodeService trainedModelAssignmentNodeService = createService(); // When there are no queued models @@ -247,8 +224,11 @@ public void testLoadQueuedModelsWhenStopped() { trainedModelAssignmentNodeService.prepareModelToLoad(newParams(modelToLoad, modelToLoad)); trainedModelAssignmentNodeService.stop(); - trainedModelAssignmentNodeService.loadQueuedModels( - ActionListener.running(() -> fail("When stopped, then loadQueuedModels should never run.")) + var latch = new CountDownLatch(1); + trainedModelAssignmentNodeService.loadQueuedModels(ActionListener.running(latch::countDown)); + assertTrue( + "loadQueuedModels should immediately call the listener without forking to another thread.", + latch.await(0, TimeUnit.SECONDS) ); verifyNoMoreInteractions(deploymentManager, trainedModelAssignmentService); } From 81f95b97b4bdd0176fcc8c21ee93457241755f2c Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 5 Jul 2024 12:55:37 -0700 Subject: [PATCH 211/216] Introduce compute listener (#110400) Currently, if a child request fails, we automatically trigger cancellation for ES|QL requests. This can result in TaskCancelledException being collected by the RefCountingListener first, which then returns that exception to the caller. For example, if we encounter a CircuitBreakingException (429), we might incorrectly return a TaskCancelledException (400) instead. This change introduces the ComputeListener, a variant of RefCountingListener, which selects the most appropriate exception to return to the caller. I also integrated the following features into ComputeListener to simplify ComputeService: - Automatic cancellation of sub-tasks on failure. - Collection of profiles from sub-tasks. - Collection of response headers from sub-tasks. --- docs/changelog/110400.yaml | 5 + .../compute/operator/AsyncOperator.java | 31 +-- .../compute/operator/DriverRunner.java | 25 +- .../compute/operator/FailureCollector.java | 112 ++++++++ .../exchange/ExchangeSourceHandler.java | 33 +-- .../operator/FailureCollectorTests.java | 90 +++++++ .../xpack/esql/plugin/ComputeListener.java | 90 +++++++ .../xpack/esql/plugin/ComputeService.java | 255 +++++++----------- .../esql/plugin/ComputeListenerTests.java | 246 +++++++++++++++++ 9 files changed, 657 insertions(+), 230 deletions(-) create mode 100644 docs/changelog/110400.yaml create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/FailureCollector.java create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FailureCollectorTests.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeListener.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java diff --git a/docs/changelog/110400.yaml b/docs/changelog/110400.yaml new file mode 100644 index 0000000000000..f2810eba214f1 --- /dev/null +++ b/docs/changelog/110400.yaml @@ -0,0 +1,5 @@ +pr: 110400 +summary: Introduce compute listener +area: ES|QL +type: bug +issues: [] diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java index 061cefc86bed0..0fed88370a144 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java @@ -21,13 +21,11 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.seqno.LocalCheckpointTracker; import org.elasticsearch.index.seqno.SequenceNumbers; -import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.Map; import java.util.Objects; -import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.LongAdder; /** @@ -40,7 +38,7 @@ public abstract class AsyncOperator implements Operator { private volatile SubscribableListener blockedFuture; private final Map buffers = ConcurrentCollections.newConcurrentMap(); - private final AtomicReference failure = new AtomicReference<>(); + private final FailureCollector failureCollector = new FailureCollector(); private final DriverContext driverContext; private final int maxOutstandingRequests; @@ -77,7 +75,7 @@ public boolean needsInput() { @Override public void addInput(Page input) { - if (failure.get() != null) { + if (failureCollector.hasFailure()) { input.releaseBlocks(); return; } @@ -90,7 +88,7 @@ public void addInput(Page input) { onSeqNoCompleted(seqNo); }, e -> { releasePageOnAnyThread(input); - onFailure(e); + failureCollector.unwrapAndCollect(e); onSeqNoCompleted(seqNo); }); final long startNanos = System.nanoTime(); @@ -121,31 +119,12 @@ private void releasePageOnAnyThread(Page page) { protected abstract void doClose(); - private void onFailure(Exception e) { - failure.getAndUpdate(first -> { - if (first == null) { - return e; - } - // ignore subsequent TaskCancelledException exceptions as they don't provide useful info. - if (ExceptionsHelper.unwrap(e, TaskCancelledException.class) != null) { - return first; - } - if (ExceptionsHelper.unwrap(first, TaskCancelledException.class) != null) { - return e; - } - if (ExceptionsHelper.unwrapCause(first) != ExceptionsHelper.unwrapCause(e)) { - first.addSuppressed(e); - } - return first; - }); - } - private void onSeqNoCompleted(long seqNo) { checkpoint.markSeqNoAsProcessed(seqNo); if (checkpoint.getPersistedCheckpoint() < checkpoint.getProcessedCheckpoint()) { notifyIfBlocked(); } - if (closed || failure.get() != null) { + if (closed || failureCollector.hasFailure()) { discardPages(); } } @@ -164,7 +143,7 @@ private void notifyIfBlocked() { } private void checkFailure() { - Exception e = failure.get(); + Exception e = failureCollector.getFailure(); if (e != null) { discardPages(); throw ExceptionsHelper.convertToElastic(e); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverRunner.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverRunner.java index 5de017fbd279e..b427a36566f11 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverRunner.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverRunner.java @@ -7,14 +7,11 @@ package org.elasticsearch.compute.operator; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.tasks.TaskCancelledException; import java.util.List; -import java.util.concurrent.atomic.AtomicReference; /** * Run a set of drivers to completion. @@ -35,8 +32,8 @@ public DriverRunner(ThreadContext threadContext) { * Run all drivers to completion asynchronously. */ public void runToCompletion(List drivers, ActionListener listener) { - AtomicReference failure = new AtomicReference<>(); var responseHeadersCollector = new ResponseHeadersCollector(threadContext); + var failure = new FailureCollector(); CountDown counter = new CountDown(drivers.size()); for (int i = 0; i < drivers.size(); i++) { Driver driver = drivers.get(i); @@ -48,23 +45,7 @@ public void onResponse(Void unused) { @Override public void onFailure(Exception e) { - failure.getAndUpdate(first -> { - if (first == null) { - return e; - } - if (ExceptionsHelper.unwrap(e, TaskCancelledException.class) != null) { - return first; - } else { - if (ExceptionsHelper.unwrap(first, TaskCancelledException.class) != null) { - return e; - } else { - if (first != e) { - first.addSuppressed(e); - } - return first; - } - } - }); + failure.unwrapAndCollect(e); for (Driver d : drivers) { if (driver != d) { d.cancel("Driver [" + driver.sessionId() + "] was cancelled or failed"); @@ -77,7 +58,7 @@ private void done() { responseHeadersCollector.collect(); if (counter.countDown()) { responseHeadersCollector.finish(); - Exception error = failure.get(); + Exception error = failure.getFailure(); if (error != null) { listener.onFailure(error); } else { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/FailureCollector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/FailureCollector.java new file mode 100644 index 0000000000000..99edab038af31 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/FailureCollector.java @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.tasks.TaskCancelledException; +import org.elasticsearch.transport.TransportException; + +import java.util.List; +import java.util.Queue; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * {@code FailureCollector} is responsible for collecting exceptions that occur in the compute engine. + * The collected exceptions are categorized into task-cancelled and non-task-cancelled exceptions. + * To limit memory usage, this class collects only the first 10 exceptions in each category by default. + * When returning the accumulated failure to the caller, this class prefers non-task-cancelled exceptions + * over task-cancelled ones as they are more useful for diagnosing issues. + */ +public final class FailureCollector { + private final Queue cancelledExceptions = ConcurrentCollections.newQueue(); + private final AtomicInteger cancelledExceptionsCount = new AtomicInteger(); + + private final Queue nonCancelledExceptions = ConcurrentCollections.newQueue(); + private final AtomicInteger nonCancelledExceptionsCount = new AtomicInteger(); + + private final int maxExceptions; + private volatile boolean hasFailure = false; + private Exception finalFailure = null; + + public FailureCollector() { + this(10); + } + + public FailureCollector(int maxExceptions) { + if (maxExceptions <= 0) { + throw new IllegalArgumentException("maxExceptions must be at least one"); + } + this.maxExceptions = maxExceptions; + } + + public void unwrapAndCollect(Exception originEx) { + final Exception e = originEx instanceof TransportException + ? (originEx.getCause() instanceof Exception cause ? cause : new ElasticsearchException(originEx.getCause())) + : originEx; + if (ExceptionsHelper.unwrap(e, TaskCancelledException.class) != null) { + if (cancelledExceptionsCount.incrementAndGet() <= maxExceptions) { + cancelledExceptions.add(e); + } + } else { + if (nonCancelledExceptionsCount.incrementAndGet() <= maxExceptions) { + nonCancelledExceptions.add(e); + } + } + hasFailure = true; + } + + /** + * @return {@code true} if any failure has been collected, {@code false} otherwise + */ + public boolean hasFailure() { + return hasFailure; + } + + /** + * Returns the accumulated failure, preferring non-task-cancelled exceptions over task-cancelled ones. + * Once this method builds the failure, incoming failures are discarded. + * + * @return the accumulated failure, or {@code null} if no failure has been collected + */ + public Exception getFailure() { + if (hasFailure == false) { + return null; + } + synchronized (this) { + if (finalFailure == null) { + finalFailure = buildFailure(); + } + return finalFailure; + } + } + + private Exception buildFailure() { + assert hasFailure; + assert Thread.holdsLock(this); + int total = 0; + Exception first = null; + for (var exceptions : List.of(nonCancelledExceptions, cancelledExceptions)) { + for (Exception e : exceptions) { + if (first == null) { + first = e; + total++; + } else if (first != e) { + first.addSuppressed(e); + total++; + } + if (total >= maxExceptions) { + return first; + } + } + } + assert first != null; + return first; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java index adce8d8a88407..77b535949eb9d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java @@ -7,21 +7,18 @@ package org.elasticsearch.compute.operator.exchange; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.FailureCollector; import org.elasticsearch.core.Releasable; -import org.elasticsearch.tasks.TaskCancelledException; -import org.elasticsearch.transport.TransportException; import java.util.List; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReference; /** * An {@link ExchangeSourceHandler} asynchronously fetches pages and status from multiple {@link RemoteSink}s @@ -37,7 +34,7 @@ public final class ExchangeSourceHandler { private final PendingInstances outstandingSinks; private final PendingInstances outstandingSources; - private final AtomicReference failure = new AtomicReference<>(); + private final FailureCollector failure = new FailureCollector(); public ExchangeSourceHandler(int maxBufferSize, Executor fetchExecutor) { this.buffer = new ExchangeBuffer(maxBufferSize); @@ -54,7 +51,7 @@ private class ExchangeSourceImpl implements ExchangeSource { } private void checkFailure() { - Exception e = failure.get(); + Exception e = failure.getFailure(); if (e != null) { throw ExceptionsHelper.convertToElastic(e); } @@ -172,7 +169,7 @@ void fetchPage() { while (loopControl.isRunning()) { loopControl.exiting(); // finish other sinks if one of them failed or source no longer need pages. - boolean toFinishSinks = buffer.noMoreInputs() || failure.get() != null; + boolean toFinishSinks = buffer.noMoreInputs() || failure.hasFailure(); remoteSink.fetchPageAsync(toFinishSinks, ActionListener.wrap(resp -> { Page page = resp.takePage(); if (page != null) { @@ -199,26 +196,8 @@ void fetchPage() { loopControl.exited(); } - void onSinkFailed(Exception originEx) { - final Exception e = originEx instanceof TransportException - ? (originEx.getCause() instanceof Exception cause ? cause : new ElasticsearchException(originEx.getCause())) - : originEx; - failure.getAndUpdate(first -> { - if (first == null) { - return e; - } - // ignore subsequent TaskCancelledException exceptions as they don't provide useful info. - if (ExceptionsHelper.unwrap(e, TaskCancelledException.class) != null) { - return first; - } - if (ExceptionsHelper.unwrap(first, TaskCancelledException.class) != null) { - return e; - } - if (ExceptionsHelper.unwrapCause(first) != ExceptionsHelper.unwrapCause(e)) { - first.addSuppressed(e); - } - return first; - }); + void onSinkFailed(Exception e) { + failure.unwrapAndCollect(e); buffer.waitForReading().onResponse(null); // resume the Driver if it is being blocked on reading onSinkComplete(); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FailureCollectorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FailureCollectorTests.java new file mode 100644 index 0000000000000..d5fa0a1eaecc9 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FailureCollectorTests.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator; + +import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.tasks.TaskCancelledException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.transport.RemoteTransportException; +import org.hamcrest.Matchers; + +import java.io.IOException; +import java.util.List; +import java.util.Queue; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.IntStream; +import java.util.stream.Stream; + +import static org.hamcrest.Matchers.lessThan; + +public class FailureCollectorTests extends ESTestCase { + + public void testCollect() throws Exception { + int maxExceptions = between(1, 100); + FailureCollector collector = new FailureCollector(maxExceptions); + List cancelledExceptions = List.of( + new TaskCancelledException("user request"), + new TaskCancelledException("cross "), + new TaskCancelledException("on failure") + ); + List nonCancelledExceptions = List.of( + new IOException("i/o simulated"), + new IOException("disk broken"), + new CircuitBreakingException("low memory", CircuitBreaker.Durability.TRANSIENT), + new CircuitBreakingException("over limit", CircuitBreaker.Durability.TRANSIENT) + ); + List failures = Stream.concat( + IntStream.range(0, between(1, 500)).mapToObj(n -> randomFrom(cancelledExceptions)), + IntStream.range(0, between(1, 500)).mapToObj(n -> randomFrom(nonCancelledExceptions)) + ).collect(Collectors.toList()); + Randomness.shuffle(failures); + Queue queue = new ConcurrentLinkedQueue<>(failures); + Thread[] threads = new Thread[between(1, 4)]; + CyclicBarrier carrier = new CyclicBarrier(threads.length); + for (int i = 0; i < threads.length; i++) { + threads[i] = new Thread(() -> { + try { + carrier.await(10, TimeUnit.SECONDS); + } catch (Exception e) { + throw new AssertionError(e); + } + Exception ex; + while ((ex = queue.poll()) != null) { + if (randomBoolean()) { + collector.unwrapAndCollect(ex); + } else { + collector.unwrapAndCollect(new RemoteTransportException("disconnect", ex)); + } + if (randomBoolean()) { + assertTrue(collector.hasFailure()); + } + } + }); + threads[i].start(); + } + for (Thread thread : threads) { + thread.join(); + } + assertTrue(collector.hasFailure()); + Exception failure = collector.getFailure(); + assertNotNull(failure); + assertThat(failure, Matchers.in(nonCancelledExceptions)); + assertThat(failure.getSuppressed().length, lessThan(maxExceptions)); + } + + public void testEmpty() { + FailureCollector collector = new FailureCollector(5); + assertFalse(collector.hasFailure()); + assertNull(collector.getFailure()); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeListener.java new file mode 100644 index 0000000000000..f8f35bb6f0b4f --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeListener.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plugin; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.RefCountingListener; +import org.elasticsearch.compute.operator.DriverProfile; +import org.elasticsearch.compute.operator.FailureCollector; +import org.elasticsearch.compute.operator.ResponseHeadersCollector; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.transport.TransportService; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * A variant of {@link RefCountingListener} with the following differences: + * 1. Automatically cancels sub tasks on failure. + * 2. Collects driver profiles from sub tasks. + * 3. Collects response headers from sub tasks, specifically warnings emitted during compute + * 4. Collects failures and returns the most appropriate exception to the caller. + */ +final class ComputeListener implements Releasable { + private static final Logger LOGGER = LogManager.getLogger(ComputeService.class); + + private final RefCountingListener refs; + private final FailureCollector failureCollector = new FailureCollector(); + private final AtomicBoolean cancelled = new AtomicBoolean(); + private final CancellableTask task; + private final TransportService transportService; + private final List collectedProfiles; + private final ResponseHeadersCollector responseHeaders; + + ComputeListener(TransportService transportService, CancellableTask task, ActionListener delegate) { + this.transportService = transportService; + this.task = task; + this.responseHeaders = new ResponseHeadersCollector(transportService.getThreadPool().getThreadContext()); + this.collectedProfiles = Collections.synchronizedList(new ArrayList<>()); + this.refs = new RefCountingListener(1, ActionListener.wrap(ignored -> { + responseHeaders.finish(); + var result = new ComputeResponse(collectedProfiles.isEmpty() ? List.of() : collectedProfiles.stream().toList()); + delegate.onResponse(result); + }, e -> delegate.onFailure(failureCollector.getFailure()))); + } + + /** + * Acquires a new listener that doesn't collect result + */ + ActionListener acquireAvoid() { + return refs.acquire().delegateResponse((l, e) -> { + failureCollector.unwrapAndCollect(e); + try { + if (cancelled.compareAndSet(false, true)) { + LOGGER.debug("cancelling ESQL task {} on failure", task); + transportService.getTaskManager().cancelTaskAndDescendants(task, "cancelled on failure", false, ActionListener.noop()); + } + } finally { + l.onFailure(e); + } + }); + } + + /** + * Acquires a new listener that collects compute result. This listener will also collects warnings emitted during compute + */ + ActionListener acquireCompute() { + return acquireAvoid().map(resp -> { + responseHeaders.collect(); + if (resp != null && resp.getProfiles().isEmpty() == false) { + collectedProfiles.addAll(resp.getProfiles()); + } + return null; + }); + } + + @Override + public void close() { + refs.close(); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index e28c8e8434643..673e320e5106b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -27,9 +27,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.Driver; -import org.elasticsearch.compute.operator.DriverProfile; import org.elasticsearch.compute.operator.DriverTaskRunner; -import org.elasticsearch.compute.operator.ResponseHeadersCollector; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.compute.operator.exchange.ExchangeSink; import org.elasticsearch.compute.operator.exchange.ExchangeSinkHandler; @@ -82,7 +80,6 @@ import java.util.Set; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Supplier; import static org.elasticsearch.xpack.esql.plugin.EsqlPlugin.ESQL_WORKER_THREAD_POOL_NAME; @@ -171,13 +168,16 @@ public void execute( null, null ); - runCompute( - rootTask, - computeContext, - coordinatorPlan, - listener.map(driverProfiles -> new Result(physicalPlan.output(), collectedPages, driverProfiles)) - ); - return; + try ( + var computeListener = new ComputeListener( + transportService, + rootTask, + listener.map(r -> new Result(physicalPlan.output(), collectedPages, r.getProfiles())) + ) + ) { + runCompute(rootTask, computeContext, coordinatorPlan, computeListener.acquireCompute()); + return; + } } else { if (clusterToConcreteIndices.values().stream().allMatch(v -> v.indices().length == 0)) { var error = "expected concrete indices with data node plan but got empty; data node plan " + dataNodePlan; @@ -190,33 +190,25 @@ public void execute( .groupIndices(SearchRequest.DEFAULT_INDICES_OPTIONS, PlannerUtils.planOriginalIndices(physicalPlan)); var localOriginalIndices = clusterToOriginalIndices.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); var localConcreteIndices = clusterToConcreteIndices.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); - final var responseHeadersCollector = new ResponseHeadersCollector(transportService.getThreadPool().getThreadContext()); - listener = ActionListener.runBefore(listener, responseHeadersCollector::finish); - final AtomicBoolean cancelled = new AtomicBoolean(); - final List collectedProfiles = configuration.profile() ? Collections.synchronizedList(new ArrayList<>()) : List.of(); final var exchangeSource = new ExchangeSourceHandler( queryPragmas.exchangeBufferSize(), transportService.getThreadPool().executor(ThreadPool.Names.SEARCH) ); try ( Releasable ignored = exchangeSource.addEmptySink(); - RefCountingListener refs = new RefCountingListener( - listener.map(unused -> new Result(physicalPlan.output(), collectedPages, collectedProfiles)) + var computeListener = new ComputeListener( + transportService, + rootTask, + listener.map(r -> new Result(physicalPlan.output(), collectedPages, r.getProfiles())) ) ) { // run compute on the coordinator - exchangeSource.addCompletionListener(refs.acquire()); + exchangeSource.addCompletionListener(computeListener.acquireAvoid()); runCompute( rootTask, new ComputeContext(sessionId, RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, List.of(), configuration, exchangeSource, null), coordinatorPlan, - cancelOnFailure(rootTask, cancelled, refs.acquire()).map(driverProfiles -> { - responseHeadersCollector.collect(); - if (configuration.profile()) { - collectedProfiles.addAll(driverProfiles); - } - return null; - }) + computeListener.acquireCompute() ); // starts computes on data nodes on the main cluster if (localConcreteIndices != null && localConcreteIndices.indices().length > 0) { @@ -229,17 +221,10 @@ public void execute( Set.of(localConcreteIndices.indices()), localOriginalIndices.indices(), exchangeSource, - ActionListener.releaseAfter(refs.acquire(), exchangeSource.addEmptySink()), - () -> cancelOnFailure(rootTask, cancelled, refs.acquire()).map(response -> { - responseHeadersCollector.collect(); - if (configuration.profile()) { - collectedProfiles.addAll(response.getProfiles()); - } - return null; - }) + computeListener ); } - // starts computes on remote cluster + // starts computes on remote clusters startComputeOnRemoteClusters( sessionId, rootTask, @@ -247,13 +232,7 @@ public void execute( dataNodePlan, exchangeSource, getRemoteClusters(clusterToConcreteIndices, clusterToOriginalIndices), - () -> cancelOnFailure(rootTask, cancelled, refs.acquire()).map(response -> { - responseHeadersCollector.collect(); - if (configuration.profile()) { - collectedProfiles.addAll(response.getProfiles()); - } - return null; - }) + computeListener ); } } @@ -289,8 +268,7 @@ private void startComputeOnDataNodes( Set concreteIndices, String[] originalIndices, ExchangeSourceHandler exchangeSource, - ActionListener parentListener, - Supplier> dataNodeListenerSupplier + ComputeListener computeListener ) { var planWithReducer = configuration.pragmas().nodeLevelReduction() == false ? dataNodePlan @@ -304,12 +282,12 @@ private void startComputeOnDataNodes( // Since it's used only for @timestamp, it is relatively safe to assume it's not needed // but it would be better to have a proper impl. QueryBuilder requestFilter = PlannerUtils.requestFilter(planWithReducer, x -> true); + var lookupListener = ActionListener.releaseAfter(computeListener.acquireAvoid(), exchangeSource.addEmptySink()); lookupDataNodes(parentTask, clusterAlias, requestFilter, concreteIndices, originalIndices, ActionListener.wrap(dataNodes -> { - try (RefCountingRunnable refs = new RefCountingRunnable(() -> parentListener.onResponse(null))) { + try (RefCountingListener refs = new RefCountingListener(lookupListener)) { // For each target node, first open a remote exchange on the remote node, then link the exchange source to // the new remote exchange sink, and initialize the computation on the target node via data-node-request. for (DataNode node : dataNodes) { - var dataNodeListener = ActionListener.releaseAfter(dataNodeListenerSupplier.get(), refs.acquire()); var queryPragmas = configuration.pragmas(); ExchangeService.openExchange( transportService, @@ -317,9 +295,10 @@ private void startComputeOnDataNodes( sessionId, queryPragmas.exchangeBufferSize(), esqlExecutor, - dataNodeListener.delegateFailureAndWrap((delegate, unused) -> { + refs.acquire().delegateFailureAndWrap((l, unused) -> { var remoteSink = exchangeService.newRemoteSink(parentTask, sessionId, transportService, node.connection); exchangeSource.addRemoteSink(remoteSink, queryPragmas.concurrentExchangeClients()); + var dataNodeListener = ActionListener.runBefore(computeListener.acquireCompute(), () -> l.onResponse(null)); transportService.sendChildRequest( node.connection, DATA_ACTION_NAME, @@ -333,13 +312,13 @@ private void startComputeOnDataNodes( ), parentTask, TransportRequestOptions.EMPTY, - new ActionListenerResponseHandler<>(delegate, ComputeResponse::new, esqlExecutor) + new ActionListenerResponseHandler<>(dataNodeListener, ComputeResponse::new, esqlExecutor) ); }) ); } } - }, parentListener::onFailure)); + }, lookupListener::onFailure)); } private void startComputeOnRemoteClusters( @@ -349,19 +328,19 @@ private void startComputeOnRemoteClusters( PhysicalPlan plan, ExchangeSourceHandler exchangeSource, List clusters, - Supplier> listener + ComputeListener computeListener ) { - try (RefCountingRunnable refs = new RefCountingRunnable(exchangeSource.addEmptySink()::close)) { + var queryPragmas = configuration.pragmas(); + var linkExchangeListeners = ActionListener.releaseAfter(computeListener.acquireAvoid(), exchangeSource.addEmptySink()); + try (RefCountingListener refs = new RefCountingListener(linkExchangeListeners)) { for (RemoteCluster cluster : clusters) { - var targetNodeListener = ActionListener.releaseAfter(listener.get(), refs.acquire()); - var queryPragmas = configuration.pragmas(); ExchangeService.openExchange( transportService, cluster.connection, sessionId, queryPragmas.exchangeBufferSize(), esqlExecutor, - targetNodeListener.delegateFailureAndWrap((l, unused) -> { + refs.acquire().delegateFailureAndWrap((l, unused) -> { var remoteSink = exchangeService.newRemoteSink(rootTask, sessionId, transportService, cluster.connection); exchangeSource.addRemoteSink(remoteSink, queryPragmas.concurrentExchangeClients()); var clusterRequest = new ClusterComputeRequest( @@ -372,13 +351,14 @@ private void startComputeOnRemoteClusters( cluster.concreteIndices, cluster.originalIndices ); + var clusterListener = ActionListener.runBefore(computeListener.acquireCompute(), () -> l.onResponse(null)); transportService.sendChildRequest( cluster.connection, CLUSTER_ACTION_NAME, clusterRequest, rootTask, TransportRequestOptions.EMPTY, - new ActionListenerResponseHandler<>(l, ComputeResponse::new, esqlExecutor) + new ActionListenerResponseHandler<>(clusterListener, ComputeResponse::new, esqlExecutor) ); }) ); @@ -386,17 +366,7 @@ private void startComputeOnRemoteClusters( } } - private ActionListener cancelOnFailure(CancellableTask task, AtomicBoolean cancelled, ActionListener listener) { - return listener.delegateResponse((l, e) -> { - l.onFailure(e); - if (cancelled.compareAndSet(false, true)) { - LOGGER.debug("cancelling ESQL task {} on failure", task); - transportService.getTaskManager().cancelTaskAndDescendants(task, "cancelled", false, ActionListener.noop()); - } - }); - } - - void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, ActionListener> listener) { + void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, ActionListener listener) { listener = ActionListener.runBefore(listener, () -> Releasables.close(context.searchContexts)); List contexts = new ArrayList<>(context.searchContexts.size()); for (int i = 0; i < context.searchContexts.size(); i++) { @@ -446,9 +416,10 @@ void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, } ActionListener listenerCollectingStatus = listener.map(ignored -> { if (context.configuration.profile()) { - return drivers.stream().map(Driver::profile).toList(); + return new ComputeResponse(drivers.stream().map(Driver::profile).toList()); + } else { + return new ComputeResponse(List.of()); } - return null; }); listenerCollectingStatus = ActionListener.releaseAfter(listenerCollectingStatus, () -> Releasables.close(drivers)); driverRunner.executeDrivers( @@ -613,8 +584,7 @@ private class DataNodeRequestExecutor { private final DataNodeRequest request; private final CancellableTask parentTask; private final ExchangeSinkHandler exchangeSink; - private final ActionListener listener; - private final List driverProfiles; + private final ComputeListener computeListener; private final int maxConcurrentShards; private final ExchangeSink blockingSink; // block until we have completed on all shards or the coordinator has enough data @@ -623,14 +593,12 @@ private class DataNodeRequestExecutor { CancellableTask parentTask, ExchangeSinkHandler exchangeSink, int maxConcurrentShards, - List driverProfiles, - ActionListener listener + ComputeListener computeListener ) { this.request = request; this.parentTask = parentTask; this.exchangeSink = exchangeSink; - this.listener = listener; - this.driverProfiles = driverProfiles; + this.computeListener = computeListener; this.maxConcurrentShards = maxConcurrentShards; this.blockingSink = exchangeSink.createExchangeSink(); } @@ -648,40 +616,46 @@ private void runBatch(int startBatchIndex) { final var sessionId = request.sessionId(); final int endBatchIndex = Math.min(startBatchIndex + maxConcurrentShards, request.shardIds().size()); List shardIds = request.shardIds().subList(startBatchIndex, endBatchIndex); + ActionListener batchListener = new ActionListener<>() { + final ActionListener ref = computeListener.acquireCompute(); + + @Override + public void onResponse(ComputeResponse result) { + try { + onBatchCompleted(endBatchIndex); + } finally { + ref.onResponse(result); + } + } + + @Override + public void onFailure(Exception e) { + try { + exchangeService.finishSinkHandler(request.sessionId(), e); + } finally { + ref.onFailure(e); + } + } + }; acquireSearchContexts(clusterAlias, shardIds, configuration, request.aliasFilters(), ActionListener.wrap(searchContexts -> { assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.SEARCH, ESQL_WORKER_THREAD_POOL_NAME); var computeContext = new ComputeContext(sessionId, clusterAlias, searchContexts, configuration, null, exchangeSink); - runCompute( - parentTask, - computeContext, - request.plan(), - ActionListener.wrap(profiles -> onBatchCompleted(endBatchIndex, profiles), this::onFailure) - ); - }, this::onFailure)); + runCompute(parentTask, computeContext, request.plan(), batchListener); + }, batchListener::onFailure)); } - private void onBatchCompleted(int lastBatchIndex, List batchProfiles) { - if (request.configuration().profile()) { - driverProfiles.addAll(batchProfiles); - } + private void onBatchCompleted(int lastBatchIndex) { if (lastBatchIndex < request.shardIds().size() && exchangeSink.isFinished() == false) { runBatch(lastBatchIndex); } else { - blockingSink.finish(); // don't return until all pages are fetched + var completionListener = computeListener.acquireAvoid(); exchangeSink.addCompletionListener( - ContextPreservingActionListener.wrapPreservingContext( - ActionListener.runBefore(listener, () -> exchangeService.finishSinkHandler(request.sessionId(), null)), - transportService.getThreadPool().getThreadContext() - ) + ActionListener.runAfter(completionListener, () -> exchangeService.finishSinkHandler(request.sessionId(), null)) ); + blockingSink.finish(); } } - - private void onFailure(Exception e) { - exchangeService.finishSinkHandler(request.sessionId(), e); - listener.onFailure(e); - } } private void runComputeOnDataNode( @@ -689,17 +663,10 @@ private void runComputeOnDataNode( String externalId, PhysicalPlan reducePlan, DataNodeRequest request, - ActionListener listener + ComputeListener computeListener ) { - final List collectedProfiles = request.configuration().profile() - ? Collections.synchronizedList(new ArrayList<>()) - : List.of(); - final var responseHeadersCollector = new ResponseHeadersCollector(transportService.getThreadPool().getThreadContext()); - final RefCountingListener listenerRefs = new RefCountingListener( - ActionListener.runBefore(listener.map(unused -> new ComputeResponse(collectedProfiles)), responseHeadersCollector::finish) - ); + var parentListener = computeListener.acquireAvoid(); try { - final AtomicBoolean cancelled = new AtomicBoolean(); // run compute with target shards var internalSink = exchangeService.createSinkHandler(request.sessionId(), request.pragmas().exchangeBufferSize()); DataNodeRequestExecutor dataNodeRequestExecutor = new DataNodeRequestExecutor( @@ -707,17 +674,16 @@ private void runComputeOnDataNode( task, internalSink, request.configuration().pragmas().maxConcurrentShardsPerNode(), - collectedProfiles, - ActionListener.runBefore(cancelOnFailure(task, cancelled, listenerRefs.acquire()), responseHeadersCollector::collect) + computeListener ); dataNodeRequestExecutor.start(); // run the node-level reduction var externalSink = exchangeService.getSinkHandler(externalId); task.addListener(() -> exchangeService.finishSinkHandler(externalId, new TaskCancelledException(task.getReasonCancelled()))); var exchangeSource = new ExchangeSourceHandler(1, esqlExecutor); - exchangeSource.addCompletionListener(listenerRefs.acquire()); + exchangeSource.addCompletionListener(computeListener.acquireAvoid()); exchangeSource.addRemoteSink(internalSink::fetchPageAsync, 1); - ActionListener reductionListener = cancelOnFailure(task, cancelled, listenerRefs.acquire()); + ActionListener reductionListener = computeListener.acquireCompute(); runCompute( task, new ComputeContext( @@ -729,26 +695,22 @@ private void runComputeOnDataNode( externalSink ), reducePlan, - ActionListener.wrap(driverProfiles -> { - responseHeadersCollector.collect(); - if (request.configuration().profile()) { - collectedProfiles.addAll(driverProfiles); - } + ActionListener.wrap(resp -> { // don't return until all pages are fetched - externalSink.addCompletionListener( - ActionListener.runBefore(reductionListener, () -> exchangeService.finishSinkHandler(externalId, null)) - ); + externalSink.addCompletionListener(ActionListener.running(() -> { + exchangeService.finishSinkHandler(externalId, null); + reductionListener.onResponse(resp); + })); }, e -> { exchangeService.finishSinkHandler(externalId, e); reductionListener.onFailure(e); }) ); + parentListener.onResponse(null); } catch (Exception e) { exchangeService.finishSinkHandler(externalId, e); exchangeService.finishSinkHandler(request.sessionId(), e); - listenerRefs.acquire().onFailure(e); - } finally { - listenerRefs.close(); + parentListener.onFailure(e); } } @@ -785,7 +747,9 @@ public void messageReceived(DataNodeRequest request, TransportChannel channel, T request.aliasFilters(), request.plan() ); - runComputeOnDataNode((CancellableTask) task, sessionId, reducePlan, request, listener); + try (var computeListener = new ComputeListener(transportService, (CancellableTask) task, listener)) { + runComputeOnDataNode((CancellableTask) task, sessionId, reducePlan, request, computeListener); + } } } @@ -799,16 +763,18 @@ public void messageReceived(ClusterComputeRequest request, TransportChannel chan listener.onFailure(new IllegalStateException("expected exchange sink for a remote compute; got " + request.plan())); return; } - runComputeOnRemoteCluster( - request.clusterAlias(), - request.sessionId(), - (CancellableTask) task, - request.configuration(), - (ExchangeSinkExec) request.plan(), - Set.of(request.indices()), - request.originalIndices(), - listener - ); + try (var computeListener = new ComputeListener(transportService, (CancellableTask) task, listener)) { + runComputeOnRemoteCluster( + request.clusterAlias(), + request.sessionId(), + (CancellableTask) task, + request.configuration(), + (ExchangeSinkExec) request.plan(), + Set.of(request.indices()), + request.originalIndices(), + computeListener + ); + } } } @@ -829,28 +795,20 @@ void runComputeOnRemoteCluster( ExchangeSinkExec plan, Set concreteIndices, String[] originalIndices, - ActionListener listener + ComputeListener computeListener ) { final var exchangeSink = exchangeService.getSinkHandler(globalSessionId); parentTask.addListener( () -> exchangeService.finishSinkHandler(globalSessionId, new TaskCancelledException(parentTask.getReasonCancelled())) ); - ThreadPool threadPool = transportService.getThreadPool(); - final var responseHeadersCollector = new ResponseHeadersCollector(threadPool.getThreadContext()); - listener = ActionListener.runBefore(listener, responseHeadersCollector::finish); - final AtomicBoolean cancelled = new AtomicBoolean(); - final List collectedProfiles = configuration.profile() ? Collections.synchronizedList(new ArrayList<>()) : List.of(); final String localSessionId = clusterAlias + ":" + globalSessionId; var exchangeSource = new ExchangeSourceHandler( configuration.pragmas().exchangeBufferSize(), transportService.getThreadPool().executor(ThreadPool.Names.SEARCH) ); - try ( - Releasable ignored = exchangeSource.addEmptySink(); - RefCountingListener refs = new RefCountingListener(listener.map(unused -> new ComputeResponse(collectedProfiles))) - ) { - exchangeSink.addCompletionListener(refs.acquire()); - exchangeSource.addCompletionListener(refs.acquire()); + try (Releasable ignored = exchangeSource.addEmptySink()) { + exchangeSink.addCompletionListener(computeListener.acquireAvoid()); + exchangeSource.addCompletionListener(computeListener.acquireAvoid()); PhysicalPlan coordinatorPlan = new ExchangeSinkExec( plan.source(), plan.output(), @@ -861,13 +819,7 @@ void runComputeOnRemoteCluster( parentTask, new ComputeContext(localSessionId, clusterAlias, List.of(), configuration, exchangeSource, exchangeSink), coordinatorPlan, - cancelOnFailure(parentTask, cancelled, refs.acquire()).map(driverProfiles -> { - responseHeadersCollector.collect(); - if (configuration.profile()) { - collectedProfiles.addAll(driverProfiles); - } - return null; - }) + computeListener.acquireCompute() ); startComputeOnDataNodes( localSessionId, @@ -878,14 +830,7 @@ void runComputeOnRemoteCluster( concreteIndices, originalIndices, exchangeSource, - ActionListener.releaseAfter(refs.acquire(), exchangeSource.addEmptySink()), - () -> cancelOnFailure(parentTask, cancelled, refs.acquire()).map(r -> { - responseHeadersCollector.collect(); - if (configuration.profile()) { - collectedProfiles.addAll(r.getProfiles()); - } - return null; - }) + computeListener ); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java new file mode 100644 index 0000000000000..c93f3b9e0e350 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/ComputeListenerTests.java @@ -0,0 +1,246 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plugin; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRunnable; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.cluster.node.VersionInformation; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.compute.operator.DriverProfile; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.TaskCancellationService; +import org.elasticsearch.tasks.TaskCancelledException; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.TransportVersionUtils; +import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.junit.After; +import org.junit.Before; +import org.mockito.Mockito; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static org.elasticsearch.test.tasks.MockTaskManager.SPY_TASK_MANAGER_SETTING; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.lessThan; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; + +public class ComputeListenerTests extends ESTestCase { + private ThreadPool threadPool; + private TransportService transportService; + + @Before + public void setUpTransportService() { + threadPool = new TestThreadPool(getTestName()); + transportService = MockTransportService.createNewService( + Settings.builder().put(SPY_TASK_MANAGER_SETTING.getKey(), true).build(), + VersionInformation.CURRENT, + TransportVersionUtils.randomVersion(), + threadPool + ); + transportService.start(); + TaskCancellationService cancellationService = new TaskCancellationService(transportService); + transportService.getTaskManager().setTaskCancellationService(cancellationService); + Mockito.clearInvocations(transportService.getTaskManager()); + } + + @After + public void shutdownTransportService() { + transportService.close(); + terminate(threadPool); + } + + private CancellableTask newTask() { + return new CancellableTask( + randomIntBetween(1, 100), + "test-type", + "test-action", + "test-description", + TaskId.EMPTY_TASK_ID, + Map.of() + ); + } + + private ComputeResponse randomResponse() { + int numProfiles = randomIntBetween(0, 2); + List profiles = new ArrayList<>(numProfiles); + for (int i = 0; i < numProfiles; i++) { + profiles.add(new DriverProfile(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), List.of())); + } + return new ComputeResponse(profiles); + } + + public void testEmpty() { + PlainActionFuture results = new PlainActionFuture<>(); + try (ComputeListener ignored = new ComputeListener(transportService, newTask(), results)) { + assertFalse(results.isDone()); + } + assertTrue(results.isDone()); + assertThat(results.actionGet(10, TimeUnit.SECONDS).getProfiles(), empty()); + } + + public void testCollectComputeResults() { + PlainActionFuture future = new PlainActionFuture<>(); + List allProfiles = new ArrayList<>(); + try (ComputeListener computeListener = new ComputeListener(transportService, newTask(), future)) { + int tasks = randomIntBetween(1, 100); + for (int t = 0; t < tasks; t++) { + if (randomBoolean()) { + ActionListener subListener = computeListener.acquireAvoid(); + threadPool.schedule( + ActionRunnable.wrap(subListener, l -> l.onResponse(null)), + TimeValue.timeValueNanos(between(0, 100)), + threadPool.generic() + ); + } else { + ComputeResponse resp = randomResponse(); + allProfiles.addAll(resp.getProfiles()); + ActionListener subListener = computeListener.acquireCompute(); + threadPool.schedule( + ActionRunnable.wrap(subListener, l -> l.onResponse(resp)), + TimeValue.timeValueNanos(between(0, 100)), + threadPool.generic() + ); + } + } + } + ComputeResponse result = future.actionGet(10, TimeUnit.SECONDS); + assertThat( + result.getProfiles().stream().collect(Collectors.toMap(p -> p, p -> 1, Integer::sum)), + equalTo(allProfiles.stream().collect(Collectors.toMap(p -> p, p -> 1, Integer::sum))) + ); + Mockito.verifyNoInteractions(transportService.getTaskManager()); + } + + public void testCancelOnFailure() throws Exception { + Queue rootCauseExceptions = ConcurrentCollections.newQueue(); + IntStream.range(0, between(1, 100)) + .forEach( + n -> rootCauseExceptions.add(new CircuitBreakingException("breaking exception " + n, CircuitBreaker.Durability.TRANSIENT)) + ); + int successTasks = between(1, 50); + int failedTasks = between(1, 100); + PlainActionFuture rootListener = new PlainActionFuture<>(); + CancellableTask rootTask = newTask(); + try (ComputeListener computeListener = new ComputeListener(transportService, rootTask, rootListener)) { + for (int i = 0; i < successTasks; i++) { + ActionListener subListener = computeListener.acquireCompute(); + threadPool.schedule( + ActionRunnable.wrap(subListener, l -> l.onResponse(randomResponse())), + TimeValue.timeValueNanos(between(0, 100)), + threadPool.generic() + ); + } + for (int i = 0; i < failedTasks; i++) { + ActionListener subListener = randomBoolean() ? computeListener.acquireAvoid() : computeListener.acquireCompute(); + threadPool.schedule(ActionRunnable.wrap(subListener, l -> { + Exception ex = rootCauseExceptions.poll(); + if (ex == null) { + ex = new TaskCancelledException("task was cancelled"); + } + l.onFailure(ex); + }), TimeValue.timeValueNanos(between(0, 100)), threadPool.generic()); + } + } + assertBusy(rootListener::isDone); + ExecutionException failure = expectThrows(ExecutionException.class, () -> rootListener.get(1, TimeUnit.SECONDS)); + Throwable cause = failure.getCause(); + assertNotNull(failure); + assertThat(cause, instanceOf(CircuitBreakingException.class)); + assertThat(failure.getSuppressed().length, lessThan(10)); + Mockito.verify(transportService.getTaskManager(), Mockito.times(1)) + .cancelTaskAndDescendants(eq(rootTask), eq("cancelled on failure"), eq(false), any()); + } + + public void testCollectWarnings() throws Exception { + List allProfiles = new ArrayList<>(); + Map> allWarnings = new HashMap<>(); + ActionListener rootListener = new ActionListener<>() { + @Override + public void onResponse(ComputeResponse result) { + assertThat( + result.getProfiles().stream().collect(Collectors.toMap(p -> p, p -> 1, Integer::sum)), + equalTo(allProfiles.stream().collect(Collectors.toMap(p -> p, p -> 1, Integer::sum))) + ); + Map> responseHeaders = threadPool.getThreadContext() + .getResponseHeaders() + .entrySet() + .stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> new HashSet<>(e.getValue()))); + assertThat(responseHeaders, equalTo(allWarnings)); + } + + @Override + public void onFailure(Exception e) { + throw new AssertionError(e); + } + }; + CountDownLatch latch = new CountDownLatch(1); + try ( + ComputeListener computeListener = new ComputeListener( + transportService, + newTask(), + ActionListener.runAfter(rootListener, latch::countDown) + ) + ) { + int tasks = randomIntBetween(1, 100); + for (int t = 0; t < tasks; t++) { + if (randomBoolean()) { + ActionListener subListener = computeListener.acquireAvoid(); + threadPool.schedule( + ActionRunnable.wrap(subListener, l -> l.onResponse(null)), + TimeValue.timeValueNanos(between(0, 100)), + threadPool.generic() + ); + } else { + ComputeResponse resp = randomResponse(); + allProfiles.addAll(resp.getProfiles()); + int numWarnings = randomIntBetween(1, 5); + Map warnings = new HashMap<>(); + for (int i = 0; i < numWarnings; i++) { + warnings.put("key" + between(1, 10), "value" + between(1, 10)); + } + for (Map.Entry e : warnings.entrySet()) { + allWarnings.computeIfAbsent(e.getKey(), v -> new HashSet<>()).add(e.getValue()); + } + ActionListener subListener = computeListener.acquireCompute(); + threadPool.schedule(ActionRunnable.wrap(subListener, l -> { + for (Map.Entry e : warnings.entrySet()) { + threadPool.getThreadContext().addResponseHeader(e.getKey(), e.getValue()); + } + l.onResponse(resp); + }), TimeValue.timeValueNanos(between(0, 100)), threadPool.generic()); + } + } + } + assertTrue(latch.await(10, TimeUnit.SECONDS)); + Mockito.verifyNoInteractions(transportService.getTaskManager()); + } +} From eeedb356fd57025300aa91893b906963b2b3ea94 Mon Sep 17 00:00:00 2001 From: Parker Timmins Date: Fri, 5 Jul 2024 14:36:32 -0600 Subject: [PATCH 212/216] Deprecate using slm privileges to access ilm (#110540) Currently, read_slm privilege grants access to get the ILM status, and manage_slm grants access to start/stop ILM. This access will be removed in the future, but needs to be deprecated before removal. Add deprecation warning to the read_slm and manage_slm docs. --- docs/changelog/110540.yaml | 16 ++++++++++++++++ .../security/authorization/privileges.asciidoc | 10 +++++++++- 2 files changed, 25 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/110540.yaml diff --git a/docs/changelog/110540.yaml b/docs/changelog/110540.yaml new file mode 100644 index 0000000000000..5e4994da80704 --- /dev/null +++ b/docs/changelog/110540.yaml @@ -0,0 +1,16 @@ +pr: 110540 +summary: Deprecate using slm privileges to access ilm +area: ILM+SLM +type: deprecation +issues: [] +deprecation: + title: Deprecate using slm privileges to access ilm + area: REST API + details: The `read_slm` privilege can get the ILM status, and + the `manage_slm` privilege can start and stop ILM. Access to these + APIs should be granted using the `read_ilm` and `manage_ilm` privileges + instead. Access to ILM APIs will be removed from SLM privileges in + a future major release, and is now deprecated. + impact: Users that need access to the ILM status API should now + use the `read_ilm` privilege. Users that need to start and stop ILM, + should use the `manage_ilm` privilege. diff --git a/docs/reference/security/authorization/privileges.asciidoc b/docs/reference/security/authorization/privileges.asciidoc index cc44c97a08129..44897baa8cb4a 100644 --- a/docs/reference/security/authorization/privileges.asciidoc +++ b/docs/reference/security/authorization/privileges.asciidoc @@ -2,7 +2,7 @@ === Security privileges :frontmatter-description: A list of privileges that can be assigned to user roles. :frontmatter-tags-products: [elasticsearch] -:frontmatter-tags-content-type: [reference] +:frontmatter-tags-content-type: [reference] :frontmatter-tags-user-goals: [secure] This section lists the privileges that you can assign to a role. @@ -198,6 +198,10 @@ All {slm} ({slm-init}) actions, including creating and updating policies and starting and stopping {slm-init}. + This privilege is not available in {serverless-full}. ++ +deprecated:[8.15] Also grants the permission to start and stop {Ilm}, using +the {ref}/ilm-start.html[ILM start] and {ref}/ilm-stop.html[ILM stop] APIs. +In a future major release, this privilege will not grant any {Ilm} permissions. `manage_token`:: All security-related operations on tokens that are generated by the {es} Token @@ -285,6 +289,10 @@ All read-only {slm-init} actions, such as getting policies and checking the {slm-init} status. + This privilege is not available in {serverless-full}. ++ +deprecated:[8.15] Also grants the permission to get the {Ilm} status, using +the {ref}/ilm-get-status.html[ILM get status API]. In a future major release, +this privilege will not grant any {Ilm} permissions. `read_security`:: All read-only security-related operations, such as getting users, user profiles, From 27b177938f9e78fa341a523bc8ff333e04939222 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 5 Jul 2024 14:32:21 -0700 Subject: [PATCH 213/216] Update JDK23 to build 24 (#110549) --- .../internal/toolchain/OracleOpenJdkToolchainResolver.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java index d0c7e9316d996..ec86798e653f1 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java @@ -88,7 +88,7 @@ public String url(String os, String arch, String extension) { List builds = List.of( getBundledJdkBuild(), // 23 early access - new EarlyAccessJdkBuild(JavaLanguageVersion.of(23), "23", "23") + new EarlyAccessJdkBuild(JavaLanguageVersion.of(23), "23", "24") ); private JdkBuild getBundledJdkBuild() { From c7ee39a58d2fb0756405d840bc132342ff2517a0 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Sun, 7 Jul 2024 08:11:36 -0700 Subject: [PATCH 214/216] Adjust cancellation message in task tests (#110546) Adding `parent task was cancelled [test cancel]` to the list of allowed cancellation messages. --- .../org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index 9778756176574..cde4f10ef556c 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -59,6 +59,7 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.in; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; @@ -325,7 +326,7 @@ private void assertCancelled(ActionFuture response) throws Ex */ assertThat( cancelException.getMessage(), - either(equalTo("test cancel")).or(equalTo("task cancelled")).or(equalTo("request cancelled test cancel")) + in(List.of("test cancel", "task cancelled", "request cancelled test cancel", "parent task was cancelled [test cancel]")) ); assertBusy( () -> assertThat( From f87c81d509eab8e35e54ba5e837eb3d732efc1e7 Mon Sep 17 00:00:00 2001 From: Aditya Kukankar Date: Mon, 8 Jul 2024 03:36:49 +0200 Subject: [PATCH 215/216] Correct transport CA name in security autoconfig (#106520) Updates the name of the transport CA in security autoconfiguration. Previously both the HTTP and Transport CAs had the same name (`CN=Elasticsearch security auto-configuration HTTP CA`). The transport CA now has a different name (`CN=Elasticsearch security auto-configuration transport CA`). Closes: #106455 Co-authored-by: Aditya Kukankar Co-authored-by: Tim Vernum --- docs/changelog/106520.yaml | 6 +++ .../xpack/security/cli/AutoConfigureNode.java | 10 +++-- .../security/cli/AutoConfigureNodeTests.java | 45 ++++++++++++++++++- 3 files changed, 56 insertions(+), 5 deletions(-) create mode 100644 docs/changelog/106520.yaml diff --git a/docs/changelog/106520.yaml b/docs/changelog/106520.yaml new file mode 100644 index 0000000000000..c3fe69a4c3dbd --- /dev/null +++ b/docs/changelog/106520.yaml @@ -0,0 +1,6 @@ +pr: 106520 +summary: Updated the transport CA name in Security Auto-Configuration. +area: Security +type: bug +issues: + - 106455 diff --git a/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/AutoConfigureNode.java b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/AutoConfigureNode.java index 29828fba085d8..3994fb50c7fc6 100644 --- a/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/AutoConfigureNode.java +++ b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/AutoConfigureNode.java @@ -114,7 +114,8 @@ */ public class AutoConfigureNode extends EnvironmentAwareCommand { - public static final String AUTO_CONFIG_ALT_DN = "CN=Elasticsearch security auto-configuration HTTP CA"; + public static final String AUTO_CONFIG_HTTP_ALT_DN = "CN=Elasticsearch security auto-configuration HTTP CA"; + public static final String AUTO_CONFIG_TRANSPORT_ALT_DN = "CN=Elasticsearch security auto-configuration transport CA"; // the transport keystore is also used as a truststore private static final String SIGNATURE_ALGORITHM = "SHA256withRSA"; private static final String TRANSPORT_AUTOGENERATED_KEYSTORE_NAME = "transport"; @@ -272,7 +273,8 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce final List transportAddresses; final String cnValue = NODE_NAME_SETTING.exists(env.settings()) ? NODE_NAME_SETTING.get(env.settings()) : System.getenv("HOSTNAME"); final X500Principal certificatePrincipal = new X500Principal("CN=" + cnValue); - final X500Principal caPrincipal = new X500Principal(AUTO_CONFIG_ALT_DN); + final X500Principal httpCaPrincipal = new X500Principal(AUTO_CONFIG_HTTP_ALT_DN); + final X500Principal transportCaPrincipal = new X500Principal(AUTO_CONFIG_TRANSPORT_ALT_DN); if (inEnrollmentMode) { // this is an enrolling node, get HTTP CA key/certificate and transport layer key/certificate from another node @@ -402,7 +404,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce final KeyPair transportCaKeyPair = CertGenUtils.generateKeyPair(TRANSPORT_CA_KEY_SIZE); final PrivateKey transportCaKey = transportCaKeyPair.getPrivate(); transportCaCert = CertGenUtils.generateSignedCertificate( - caPrincipal, + transportCaPrincipal, null, transportCaKeyPair, null, @@ -429,7 +431,7 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce httpCaKey = httpCaKeyPair.getPrivate(); // self-signed CA httpCaCert = CertGenUtils.generateSignedCertificate( - caPrincipal, + httpCaPrincipal, null, httpCaKeyPair, null, diff --git a/x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/AutoConfigureNodeTests.java b/x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/AutoConfigureNodeTests.java index d1dbe9d037756..129d85d0818b2 100644 --- a/x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/AutoConfigureNodeTests.java +++ b/x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/AutoConfigureNodeTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.ssl.KeyStoreUtil; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.core.Tuple; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.http.HttpTransportSettings; @@ -32,6 +33,8 @@ import java.util.List; import static java.nio.file.StandardOpenOption.CREATE_NEW; +import static org.elasticsearch.xpack.security.cli.AutoConfigureNode.AUTO_CONFIG_HTTP_ALT_DN; +import static org.elasticsearch.xpack.security.cli.AutoConfigureNode.AUTO_CONFIG_TRANSPORT_ALT_DN; import static org.elasticsearch.xpack.security.cli.AutoConfigureNode.anyRemoteHostNodeAddress; import static org.elasticsearch.xpack.security.cli.AutoConfigureNode.removePreviousAutoconfiguration; import static org.hamcrest.Matchers.equalTo; @@ -131,6 +134,21 @@ public void testRemovePreviousAutoconfigurationRetainsUserAdded() throws Excepti assertEquals(file1, removePreviousAutoconfiguration(file2)); } + public void testSubjectAndIssuerForGeneratedCertificates() throws Exception { + // test no publish settings + Path tempDir = createTempDir(); + try { + Files.createDirectory(tempDir.resolve("config")); + // empty yml file, it just has to exist + Files.write(tempDir.resolve("config").resolve("elasticsearch.yml"), List.of(), CREATE_NEW); + Tuple generatedCerts = runAutoConfigAndReturnCertificates(tempDir, Settings.EMPTY); + assertThat(checkSubjectAndIssuerDN(generatedCerts.v1(), "CN=dummy.test.hostname", AUTO_CONFIG_HTTP_ALT_DN), is(true)); + assertThat(checkSubjectAndIssuerDN(generatedCerts.v2(), "CN=dummy.test.hostname", AUTO_CONFIG_TRANSPORT_ALT_DN), is(true)); + } finally { + deleteDirectory(tempDir); + } + } + public void testGeneratedHTTPCertificateSANs() throws Exception { // test no publish settings Path tempDir = createTempDir(); @@ -262,6 +280,14 @@ private boolean checkGeneralNameSan(X509Certificate certificate, String generalN return false; } + private boolean checkSubjectAndIssuerDN(X509Certificate certificate, String subjectName, String issuerName) throws Exception { + if (certificate.getSubjectX500Principal().getName().equals(subjectName) + && certificate.getIssuerX500Principal().getName().equals(issuerName)) { + return true; + } + return false; + } + private void verifyExtendedKeyUsage(X509Certificate httpCertificate) throws Exception { List extendedKeyUsage = httpCertificate.getExtendedKeyUsage(); assertEquals("Only one extended key usage expected for HTTP certificate.", 1, extendedKeyUsage.size()); @@ -270,6 +296,11 @@ private void verifyExtendedKeyUsage(X509Certificate httpCertificate) throws Exce } private X509Certificate runAutoConfigAndReturnHTTPCertificate(Path configDir, Settings settings) throws Exception { + Tuple generatedCertificates = runAutoConfigAndReturnCertificates(configDir, settings); + return generatedCertificates.v1(); + } + + private Tuple runAutoConfigAndReturnCertificates(Path configDir, Settings settings) throws Exception { final Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", configDir).put(settings).build()); // runs the command to auto-generate the config files and the keystore new AutoConfigureNode(false).execute(MockTerminal.create(), new OptionParser().parse(), env, null); @@ -278,16 +309,28 @@ private X509Certificate runAutoConfigAndReturnHTTPCertificate(Path configDir, Se nodeKeystore.decrypt(new char[0]); // the keystore is always bootstrapped with an empty password SecureString httpKeystorePassword = nodeKeystore.getString("xpack.security.http.ssl.keystore.secure_password"); + SecureString transportKeystorePassword = nodeKeystore.getString("xpack.security.transport.ssl.keystore.secure_password"); final Settings newSettings = Settings.builder().loadFromPath(env.configFile().resolve("elasticsearch.yml")).build(); final String httpKeystorePath = newSettings.get("xpack.security.http.ssl.keystore.path"); + final String transportKeystorePath = newSettings.get("xpack.security.transport.ssl.keystore.path"); KeyStore httpKeystore = KeyStoreUtil.readKeyStore( configDir.resolve("config").resolve(httpKeystorePath), "PKCS12", httpKeystorePassword.getChars() ); - return (X509Certificate) httpKeystore.getCertificate("http"); + + KeyStore transportKeystore = KeyStoreUtil.readKeyStore( + configDir.resolve("config").resolve(transportKeystorePath), + "PKCS12", + transportKeystorePassword.getChars() + ); + + X509Certificate httpCertificate = (X509Certificate) httpKeystore.getCertificate("http"); + X509Certificate transportCertificate = (X509Certificate) transportKeystore.getCertificate("transport"); + + return new Tuple<>(httpCertificate, transportCertificate); } private void deleteDirectory(Path directory) throws IOException { From 9b8cd3d5392abadce3a420b935095e763be8484b Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 8 Jul 2024 07:04:21 +0200 Subject: [PATCH 216/216] Introduce utility for concurrent execution of arbitrary Runnable in tests (#110552) We have the same pattern in a bunch of places, I dried up a few here. We want to run N tasks so we create N threads in a loop, start them and join them right away. The node starting logic refactored here is essentially the same since the threads have idle lifetime 0. This can be dried up a little and made more efficient. Might as well always use `N-1` tasks and run one of them on the calling thread. This saves quite a few threads when running tests and speeds things up a little, especially when running many concurrent Gradle workers and CPU is at 100% already (mostly coming from the speedup on starting nodes this brings and the reduction in test thread sleeps). No functional changes to the tests otherwise, except for some replacing of `CountDownLatch` with `CyclicalBarrier` to make things work with the new API. --- .../admin/indices/rollover/RolloverIT.java | 36 +++--- .../action/bulk/BulkWithUpdatesIT.java | 41 +++--- .../elasticsearch/blocks/SimpleBlocksIT.java | 38 +++--- .../index/engine/MaxDocsLimitIT.java | 33 ++--- .../index/mapper/DynamicMappingIT.java | 36 ++---- .../index/seqno/GlobalCheckpointSyncIT.java | 46 ++----- .../indices/state/CloseIndexIT.java | 117 ++++++------------ .../state/CloseWhileRelocatingShardsIT.java | 42 +++---- .../org/elasticsearch/test/ESTestCase.java | 40 ++++++ .../test/InternalTestCluster.java | 28 +---- 10 files changed, 179 insertions(+), 278 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java index 48f1ecb072314..4d52383bfc4e1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java @@ -832,30 +832,22 @@ public void testRolloverConcurrently() throws Exception { assertAcked(client().execute(TransportPutComposableIndexTemplateAction.TYPE, putTemplateRequest).actionGet()); final CyclicBarrier barrier = new CyclicBarrier(numOfThreads); - final Thread[] threads = new Thread[numOfThreads]; - for (int i = 0; i < numOfThreads; i++) { + runInParallel(numOfThreads, i -> { var aliasName = "test-" + i; - threads[i] = new Thread(() -> { - assertAcked(prepareCreate(aliasName + "-000001").addAlias(new Alias(aliasName).writeIndex(true)).get()); - for (int j = 1; j <= numberOfRolloversPerThread; j++) { - try { - barrier.await(); - } catch (Exception e) { - throw new RuntimeException(e); - } - var response = indicesAdmin().prepareRolloverIndex(aliasName).waitForActiveShards(ActiveShardCount.NONE).get(); - assertThat(response.getOldIndex(), equalTo(aliasName + Strings.format("-%06d", j))); - assertThat(response.getNewIndex(), equalTo(aliasName + Strings.format("-%06d", j + 1))); - assertThat(response.isDryRun(), equalTo(false)); - assertThat(response.isRolledOver(), equalTo(true)); + assertAcked(prepareCreate(aliasName + "-000001").addAlias(new Alias(aliasName).writeIndex(true)).get()); + for (int j = 1; j <= numberOfRolloversPerThread; j++) { + try { + barrier.await(); + } catch (Exception e) { + throw new RuntimeException(e); } - }); - threads[i].start(); - } - - for (Thread thread : threads) { - thread.join(); - } + var response = indicesAdmin().prepareRolloverIndex(aliasName).waitForActiveShards(ActiveShardCount.NONE).get(); + assertThat(response.getOldIndex(), equalTo(aliasName + Strings.format("-%06d", j))); + assertThat(response.getNewIndex(), equalTo(aliasName + Strings.format("-%06d", j + 1))); + assertThat(response.isDryRun(), equalTo(false)); + assertThat(response.isRolledOver(), equalTo(true)); + } + }); for (int i = 0; i < numOfThreads; i++) { var aliasName = "test-" + i; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java index 00bd6ee7ee891..cfdf667f6c02e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java @@ -519,33 +519,22 @@ public void testFailingVersionedUpdatedOnBulk() throws Exception { indexDoc("test", "1", "field", "1"); final BulkResponse[] responses = new BulkResponse[30]; final CyclicBarrier cyclicBarrier = new CyclicBarrier(responses.length); - Thread[] threads = new Thread[responses.length]; - - for (int i = 0; i < responses.length; i++) { - final int threadID = i; - threads[threadID] = new Thread(() -> { - try { - cyclicBarrier.await(); - } catch (Exception e) { - return; - } - BulkRequestBuilder requestBuilder = client().prepareBulk(); - requestBuilder.add( - client().prepareUpdate("test", "1") - .setIfSeqNo(0L) - .setIfPrimaryTerm(1) - .setDoc(Requests.INDEX_CONTENT_TYPE, "field", threadID) - ); - responses[threadID] = requestBuilder.get(); - }); - threads[threadID].start(); - - } - - for (int i = 0; i < threads.length; i++) { - threads[i].join(); - } + runInParallel(responses.length, threadID -> { + try { + cyclicBarrier.await(); + } catch (Exception e) { + return; + } + BulkRequestBuilder requestBuilder = client().prepareBulk(); + requestBuilder.add( + client().prepareUpdate("test", "1") + .setIfSeqNo(0L) + .setIfPrimaryTerm(1) + .setDoc(Requests.INDEX_CONTENT_TYPE, "field", threadID) + ); + responses[threadID] = requestBuilder.get(); + }); int successes = 0; for (BulkResponse response : responses) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/blocks/SimpleBlocksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/blocks/SimpleBlocksIT.java index 136db24767d22..1cc771ab72c09 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/blocks/SimpleBlocksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/blocks/SimpleBlocksIT.java @@ -32,6 +32,8 @@ import java.util.List; import java.util.Locale; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.ExecutionException; import java.util.function.Consumer; import java.util.stream.IntStream; @@ -310,7 +312,7 @@ public void testAddBlockToUnassignedIndex() throws Exception { } } - public void testConcurrentAddBlock() throws InterruptedException { + public void testConcurrentAddBlock() throws InterruptedException, ExecutionException { final String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); createIndex(indexName); @@ -322,31 +324,21 @@ public void testConcurrentAddBlock() throws InterruptedException { IntStream.range(0, nbDocs).mapToObj(i -> prepareIndex(indexName).setId(String.valueOf(i)).setSource("num", i)).collect(toList()) ); ensureYellowAndNoInitializingShards(indexName); - - final CountDownLatch startClosing = new CountDownLatch(1); - final Thread[] threads = new Thread[randomIntBetween(2, 5)]; - final APIBlock block = randomAddableBlock(); + final int threadCount = randomIntBetween(2, 5); + final CyclicBarrier barrier = new CyclicBarrier(threadCount); try { - for (int i = 0; i < threads.length; i++) { - threads[i] = new Thread(() -> { - safeAwait(startClosing); - try { - indicesAdmin().prepareAddBlock(block, indexName).get(); - assertIndexHasBlock(block, indexName); - } catch (final ClusterBlockException e) { - assertThat(e.blocks(), hasSize(1)); - assertTrue(e.blocks().stream().allMatch(b -> b.id() == block.getBlock().id())); - } - }); - threads[i].start(); - } - - startClosing.countDown(); - for (Thread thread : threads) { - thread.join(); - } + runInParallel(threadCount, i -> { + safeAwait(barrier); + try { + indicesAdmin().prepareAddBlock(block, indexName).get(); + assertIndexHasBlock(block, indexName); + } catch (final ClusterBlockException e) { + assertThat(e.blocks(), hasSize(1)); + assertTrue(e.blocks().stream().allMatch(b -> b.id() == block.getBlock().id())); + } + }); assertIndexHasBlock(block, indexName); } finally { disableIndexBlock(indexName, block); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java index acfc38ca12f89..409a57b35ac4b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java @@ -155,27 +155,20 @@ static IndexingResult indexDocs(int numRequests, int numThreads) throws Exceptio final AtomicInteger completedRequests = new AtomicInteger(); final AtomicInteger numSuccess = new AtomicInteger(); final AtomicInteger numFailure = new AtomicInteger(); - Thread[] indexers = new Thread[numThreads]; - Phaser phaser = new Phaser(indexers.length); - for (int i = 0; i < indexers.length; i++) { - indexers[i] = new Thread(() -> { - phaser.arriveAndAwaitAdvance(); - while (completedRequests.incrementAndGet() <= numRequests) { - try { - final DocWriteResponse resp = prepareIndex("test").setSource("{}", XContentType.JSON).get(); - numSuccess.incrementAndGet(); - assertThat(resp.status(), equalTo(RestStatus.CREATED)); - } catch (IllegalArgumentException e) { - numFailure.incrementAndGet(); - assertThat(e.getMessage(), containsString("Number of documents in the index can't exceed [" + maxDocs.get() + "]")); - } + Phaser phaser = new Phaser(numThreads); + runInParallel(numThreads, i -> { + phaser.arriveAndAwaitAdvance(); + while (completedRequests.incrementAndGet() <= numRequests) { + try { + final DocWriteResponse resp = prepareIndex("test").setSource("{}", XContentType.JSON).get(); + numSuccess.incrementAndGet(); + assertThat(resp.status(), equalTo(RestStatus.CREATED)); + } catch (IllegalArgumentException e) { + numFailure.incrementAndGet(); + assertThat(e.getMessage(), containsString("Number of documents in the index can't exceed [" + maxDocs.get() + "]")); } - }); - indexers[i].start(); - } - for (Thread indexer : indexers) { - indexer.join(); - } + } + }); internalCluster().assertNoInFlightDocsInEngine(); return new IndexingResult(numSuccess.get(), numFailure.get()); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/DynamicMappingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/DynamicMappingIT.java index 76d305ce8ea4b..463ac49d60e47 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/DynamicMappingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/DynamicMappingIT.java @@ -46,6 +46,7 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CyclicBarrier; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; @@ -161,31 +162,20 @@ public void testConcurrentDynamicIgnoreBeyondLimitUpdates() throws Throwable { private Map indexConcurrently(int numberOfFieldsToCreate, Settings.Builder settings) throws Throwable { indicesAdmin().prepareCreate("index").setSettings(settings).get(); ensureGreen("index"); - final Thread[] indexThreads = new Thread[numberOfFieldsToCreate]; - final CountDownLatch startLatch = new CountDownLatch(1); + final CyclicBarrier barrier = new CyclicBarrier(numberOfFieldsToCreate); final AtomicReference error = new AtomicReference<>(); - for (int i = 0; i < indexThreads.length; ++i) { + runInParallel(numberOfFieldsToCreate, i -> { final String id = Integer.toString(i); - indexThreads[i] = new Thread(new Runnable() { - @Override - public void run() { - try { - startLatch.await(); - assertEquals( - DocWriteResponse.Result.CREATED, - prepareIndex("index").setId(id).setSource("field" + id, "bar").get().getResult() - ); - } catch (Exception e) { - error.compareAndSet(null, e); - } - } - }); - indexThreads[i].start(); - } - startLatch.countDown(); - for (Thread thread : indexThreads) { - thread.join(); - } + try { + barrier.await(); + assertEquals( + DocWriteResponse.Result.CREATED, + prepareIndex("index").setId(id).setSource("field" + id, "bar").get().getResult() + ); + } catch (Exception e) { + error.compareAndSet(null, e); + } + }); if (error.get() != null) { throw error.get(); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncIT.java index c60b6bb72e8ed..6a7c7bcf9d9bf 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncIT.java @@ -25,9 +25,7 @@ import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.xcontent.XContentType; -import java.util.ArrayList; import java.util.Collection; -import java.util.List; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.TimeUnit; @@ -143,37 +141,20 @@ private void runGlobalCheckpointSyncTest( final int numberOfDocuments = randomIntBetween(0, 256); final int numberOfThreads = randomIntBetween(1, 4); - final CyclicBarrier barrier = new CyclicBarrier(1 + numberOfThreads); + final CyclicBarrier barrier = new CyclicBarrier(numberOfThreads); // start concurrent indexing threads - final List threads = new ArrayList<>(numberOfThreads); - for (int i = 0; i < numberOfThreads; i++) { - final int index = i; - final Thread thread = new Thread(() -> { - try { - barrier.await(); - } catch (BrokenBarrierException | InterruptedException e) { - throw new RuntimeException(e); - } - for (int j = 0; j < numberOfDocuments; j++) { - final String id = Integer.toString(index * numberOfDocuments + j); - prepareIndex("test").setId(id).setSource("{\"foo\": " + id + "}", XContentType.JSON).get(); - } - try { - barrier.await(); - } catch (BrokenBarrierException | InterruptedException e) { - throw new RuntimeException(e); - } - }); - threads.add(thread); - thread.start(); - } - - // synchronize the start of the threads - barrier.await(); - - // wait for the threads to finish - barrier.await(); + runInParallel(numberOfThreads, index -> { + try { + barrier.await(); + } catch (BrokenBarrierException | InterruptedException e) { + throw new RuntimeException(e); + } + for (int j = 0; j < numberOfDocuments; j++) { + final String id = Integer.toString(index * numberOfDocuments + j); + prepareIndex("test").setId(id).setSource("{\"foo\": " + id + "}", XContentType.JSON).get(); + } + }); afterIndexing.accept(client()); @@ -203,9 +184,6 @@ private void runGlobalCheckpointSyncTest( } }, 60, TimeUnit.SECONDS); ensureGreen("test"); - for (final Thread thread : threads) { - thread.join(); - } } public void testPersistGlobalCheckpoint() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseIndexIT.java index 77cdc2e99977d..1751ffd7f1cfb 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseIndexIT.java @@ -38,12 +38,12 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalTestCluster; -import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Locale; import java.util.Set; -import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.ExecutionException; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -170,7 +170,7 @@ public void testCloseUnassignedIndex() throws Exception { assertIndexIsClosed(indexName); } - public void testConcurrentClose() throws InterruptedException { + public void testConcurrentClose() throws InterruptedException, ExecutionException { final String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); createIndex(indexName); @@ -196,25 +196,16 @@ public void testConcurrentClose() throws InterruptedException { assertThat(healthResponse.isTimedOut(), equalTo(false)); assertThat(healthResponse.getIndices().get(indexName).getStatus().value(), lessThanOrEqualTo(ClusterHealthStatus.YELLOW.value())); - final CountDownLatch startClosing = new CountDownLatch(1); - final Thread[] threads = new Thread[randomIntBetween(2, 5)]; - - for (int i = 0; i < threads.length; i++) { - threads[i] = new Thread(() -> { - safeAwait(startClosing); - try { - indicesAdmin().prepareClose(indexName).get(); - } catch (final Exception e) { - assertException(e, indexName); - } - }); - threads[i].start(); - } - - startClosing.countDown(); - for (Thread thread : threads) { - thread.join(); - } + final int tasks = randomIntBetween(2, 5); + final CyclicBarrier barrier = new CyclicBarrier(tasks); + runInParallel(tasks, i -> { + safeAwait(barrier); + try { + indicesAdmin().prepareClose(indexName).get(); + } catch (final Exception e) { + assertException(e, indexName); + } + }); assertIndexIsClosed(indexName); } @@ -256,37 +247,20 @@ public void testCloseWhileDeletingIndices() throws Exception { } assertThat(clusterAdmin().prepareState().get().getState().metadata().indices().size(), equalTo(indices.length)); - final List threads = new ArrayList<>(); - final CountDownLatch latch = new CountDownLatch(1); - - for (final String indexToDelete : indices) { - threads.add(new Thread(() -> { - safeAwait(latch); - try { - assertAcked(indicesAdmin().prepareDelete(indexToDelete)); - } catch (final Exception e) { - assertException(e, indexToDelete); - } - })); - } - for (final String indexToClose : indices) { - threads.add(new Thread(() -> { - safeAwait(latch); - try { - indicesAdmin().prepareClose(indexToClose).get(); - } catch (final Exception e) { - assertException(e, indexToClose); + final CyclicBarrier barrier = new CyclicBarrier(indices.length * 2); + runInParallel(indices.length * 2, i -> { + safeAwait(barrier); + final String index = indices[i % indices.length]; + try { + if (i < indices.length) { + assertAcked(indicesAdmin().prepareDelete(index)); + } else { + indicesAdmin().prepareClose(index).get(); } - })); - } - - for (Thread thread : threads) { - thread.start(); - } - latch.countDown(); - for (Thread thread : threads) { - thread.join(); - } + } catch (final Exception e) { + assertException(e, index); + } + }); } public void testConcurrentClosesAndOpens() throws Exception { @@ -297,37 +271,22 @@ public void testConcurrentClosesAndOpens() throws Exception { indexer.setFailureAssertion(e -> {}); waitForDocs(1, indexer); - final CountDownLatch latch = new CountDownLatch(1); + final int closes = randomIntBetween(1, 3); + final int opens = randomIntBetween(1, 3); + final CyclicBarrier barrier = new CyclicBarrier(opens + closes); - final List threads = new ArrayList<>(); - for (int i = 0; i < randomIntBetween(1, 3); i++) { - threads.add(new Thread(() -> { - try { - safeAwait(latch); + runInParallel(opens + closes, i -> { + try { + safeAwait(barrier); + if (i < closes) { indicesAdmin().prepareClose(indexName).get(); - } catch (final Exception e) { - throw new AssertionError(e); - } - })); - } - for (int i = 0; i < randomIntBetween(1, 3); i++) { - threads.add(new Thread(() -> { - try { - safeAwait(latch); + } else { assertAcked(indicesAdmin().prepareOpen(indexName).get()); - } catch (final Exception e) { - throw new AssertionError(e); } - })); - } - - for (Thread thread : threads) { - thread.start(); - } - latch.countDown(); - for (Thread thread : threads) { - thread.join(); - } + } catch (final Exception e) { + throw new AssertionError(e); + } + }); indexer.stopAndAwaitStopped(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java index b160834d675d9..9eb69c87a52e8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java @@ -35,10 +35,10 @@ import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CyclicBarrier; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -187,30 +187,22 @@ public void testCloseWhileRelocatingShards() throws Exception { ClusterRerouteUtils.reroute(client(), commands.toArray(AllocationCommand[]::new)); // start index closing threads - final List threads = new ArrayList<>(); - for (final String indexToClose : indices) { - final Thread thread = new Thread(() -> { - try { - safeAwait(latch); - } finally { - release.countDown(); - } - // Closing is not always acknowledged when shards are relocating: this is the case when the target shard is initializing - // or is catching up operations. In these cases the TransportVerifyShardBeforeCloseAction will detect that the global - // and max sequence number don't match and will not ack the close. - AcknowledgedResponse closeResponse = indicesAdmin().prepareClose(indexToClose).get(); - if (closeResponse.isAcknowledged()) { - assertTrue("Index closing should not be acknowledged twice", acknowledgedCloses.add(indexToClose)); - } - }); - threads.add(thread); - thread.start(); - } - - latch.countDown(); - for (Thread thread : threads) { - thread.join(); - } + final CyclicBarrier barrier = new CyclicBarrier(indices.length); + runInParallel(indices.length, i -> { + try { + safeAwait(barrier); + } finally { + release.countDown(); + } + // Closing is not always acknowledged when shards are relocating: this is the case when the target shard is initializing + // or is catching up operations. In these cases the TransportVerifyShardBeforeCloseAction will detect that the global + // and max sequence number don't match and will not ack the close. + final String indexToClose = indices[i]; + AcknowledgedResponse closeResponse = indicesAdmin().prepareClose(indexToClose).get(); + if (closeResponse.isAcknowledged()) { + assertTrue("Index closing should not be acknowledged twice", acknowledgedCloses.add(indexToClose)); + } + }); // stop indexers first without waiting for stop to not redundantly index on some while waiting for another one to stop for (BackgroundIndexer indexer : indexers.values()) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index b8860690fffc4..68fc6b41e0be0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -38,6 +38,7 @@ import org.apache.lucene.tests.util.TestUtil; import org.apache.lucene.tests.util.TimeUnits; import org.apache.lucene.util.SetOnce; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.RequestBuilder; @@ -179,12 +180,14 @@ import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; +import java.util.concurrent.FutureTask; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BooleanSupplier; import java.util.function.Consumer; +import java.util.function.IntConsumer; import java.util.function.IntFunction; import java.util.function.Predicate; import java.util.function.Supplier; @@ -2430,4 +2433,41 @@ public static T expectThrows(Class expectedType, Reques () -> builder.get().decRef() // dec ref if we unexpectedly fail to not leak transport response ); } + + /** + * Run {@code numberOfTasks} parallel tasks that were created by the given {@code taskFactory}. On of the tasks will be run on the + * calling thread, the rest will be run on a new thread. + * @param numberOfTasks number of tasks to run in parallel + * @param taskFactory task factory + */ + public static void runInParallel(int numberOfTasks, IntConsumer taskFactory) throws InterruptedException { + final ArrayList> futures = new ArrayList<>(numberOfTasks); + final Thread[] threads = new Thread[numberOfTasks - 1]; + for (int i = 0; i < numberOfTasks; i++) { + final int index = i; + var future = new FutureTask(() -> taskFactory.accept(index), null); + futures.add(future); + if (i == numberOfTasks - 1) { + future.run(); + } else { + threads[i] = new Thread(future); + threads[i].setName("runInParallel-T#" + i); + threads[i].start(); + } + } + for (Thread thread : threads) { + thread.join(); + } + Exception e = null; + for (Future future : futures) { + try { + future.get(); + } catch (Exception ex) { + e = ExceptionsHelper.useOrSuppress(e, ex); + } + } + if (e != null) { + throw new AssertionError(e); + } + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index bb78c43fca449..af37fb6feefbd 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -61,8 +61,6 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.common.util.concurrent.FutureUtils; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Predicates; @@ -126,8 +124,6 @@ import java.util.TreeMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; @@ -148,6 +144,7 @@ import static org.elasticsearch.node.Node.INITIAL_STATE_TIMEOUT_SETTING; import static org.elasticsearch.test.ESTestCase.assertBusy; import static org.elasticsearch.test.ESTestCase.randomFrom; +import static org.elasticsearch.test.ESTestCase.runInParallel; import static org.elasticsearch.test.ESTestCase.safeAwait; import static org.elasticsearch.test.NodeRoles.dataOnlyNode; import static org.elasticsearch.test.NodeRoles.masterOnlyNode; @@ -246,8 +243,6 @@ public String toString() { private final NodeConfigurationSource nodeConfigurationSource; - private final ExecutorService executor; - private final boolean autoManageMasterNodes; private final Collection> mockPlugins; @@ -452,16 +447,6 @@ public InternalTestCluster( builder.put(NoMasterBlockService.NO_MASTER_BLOCK_SETTING.getKey(), randomFrom(random, "write", "metadata_write")); builder.put(DestructiveOperations.REQUIRES_NAME_SETTING.getKey(), false); defaultSettings = builder.build(); - executor = EsExecutors.newScaling( - "internal_test_cluster_executor", - 0, - Integer.MAX_VALUE, - 0, - TimeUnit.SECONDS, - true, - EsExecutors.daemonThreadFactory("test_" + clusterName), - new ThreadContext(Settings.EMPTY) - ); } /** @@ -931,7 +916,6 @@ public synchronized void close() throws IOException { } finally { nodes = Collections.emptyNavigableMap(); Loggers.setLevel(nodeConnectionLogger, initialLogLevel); - executor.shutdownNow(); } } } @@ -1760,18 +1744,10 @@ private synchronized void startAndPublishNodesAndClients(List nod .filter(nac -> nodes.containsKey(nac.name) == false) // filter out old masters .count(); rebuildUnicastHostFiles(nodeAndClients); // ensure that new nodes can find the existing nodes when they start - List> futures = nodeAndClients.stream().map(node -> executor.submit(node::startNode)).collect(Collectors.toList()); - try { - for (Future future : futures) { - future.get(); - } + runInParallel(nodeAndClients.size(), i -> nodeAndClients.get(i).startNode()); } catch (InterruptedException e) { throw new AssertionError("interrupted while starting nodes", e); - } catch (ExecutionException e) { - RuntimeException re = FutureUtils.rethrowExecutionException(e); - re.addSuppressed(new RuntimeException("failed to start nodes")); - throw re; } nodeAndClients.forEach(this::publishNode);