diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/JavaModulePrecommitTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/JavaModulePrecommitTask.java index cee10cf70b19c..d031113550295 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/JavaModulePrecommitTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/JavaModulePrecommitTask.java @@ -116,8 +116,9 @@ private void checkModuleVersion(ModuleReference mref) { private void checkModuleNamePrefix(ModuleReference mref) { getLogger().info("{} checking module name prefix for {}", this, mref.descriptor().name()); - if (mref.descriptor().name().startsWith("org.elasticsearch.") == false) { - throw new GradleException("Expected name starting with \"org.elasticsearch.\", in " + mref.descriptor()); + if (mref.descriptor().name().startsWith("org.elasticsearch.") == false + && mref.descriptor().name().startsWith("co.elastic.") == false) { + throw new GradleException("Expected name starting with \"org.elasticsearch.\" or \"co.elastic\" in " + mref.descriptor()); } } diff --git a/docs/changelog/96515.yaml b/docs/changelog/96515.yaml new file mode 100644 index 0000000000000..bf6403f6990ce --- /dev/null +++ b/docs/changelog/96515.yaml @@ -0,0 +1,5 @@ +pr: 96515 +summary: Support boxplot aggregation in transform +area: Transform +type: enhancement +issues: [] diff --git a/docs/changelog/97683.yaml b/docs/changelog/97683.yaml new file mode 100644 index 0000000000000..8b843eb7e9cf6 --- /dev/null +++ b/docs/changelog/97683.yaml @@ -0,0 +1,5 @@ +pr: 97683 +summary: Refactor nested field handling in `FieldFetcher` +area: Search +type: enhancement +issues: [] diff --git a/docs/changelog/97840.yaml b/docs/changelog/97840.yaml new file mode 100644 index 0000000000000..3bcf62328c873 --- /dev/null +++ b/docs/changelog/97840.yaml @@ -0,0 +1,6 @@ +pr: 97840 +summary: Improve exception handling in Coordinator#publish +area: Cluster Coordination +type: bug +issues: + - 97798 diff --git a/docs/reference/aggregations/metrics/geoline-aggregation.asciidoc b/docs/reference/aggregations/metrics/geoline-aggregation.asciidoc index 4ed684aa090d3..aabe8d172e4a0 100644 --- a/docs/reference/aggregations/metrics/geoline-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/geoline-aggregation.asciidoc @@ -1,8 +1,8 @@ [role="xpack"] [[search-aggregations-metrics-geo-line]] -=== Geo-Line Aggregation +=== Geo-line aggregation ++++ -Geo-Line +Geo-line ++++ The `geo_line` aggregation aggregates all `geo_point` values within a bucket into a `LineString` ordered @@ -77,13 +77,12 @@ Which returns: The resulting https://tools.ietf.org/html/rfc7946#section-3.2[GeoJSON Feature] contains both a `LineString` geometry for the path generated by the aggregation, as well as a map of `properties`. The property `complete` informs of whether all documents matched were used to generate the geometry. -The `size` option described below can be used to limit the number of documents included in the aggregation, +The <> can be used to limit the number of documents included in the aggregation, leading to results with `complete: false`. -Exactly which documents are dropped from results depends on whether the aggregation is based -on `time_series` or not, and this is discussed in -<>. +Exactly which documents are dropped from results <>. -The above result could be displayed in a map user interface: +This result could be displayed in a map user interface: image:images/spatial/geo_line.png[Kibana map with museum tour of Amsterdam] @@ -132,18 +131,19 @@ feature properties. The line is sorted in ascending order by the sort key when set to "ASC", and in descending with "DESC". +[[search-aggregations-metrics-geo-line-size]] `size`:: (Optional, integer, default: `10000`) The maximum length of the line represented in the aggregation. Valid sizes are between one and 10000. Within <> the aggregation uses line simplification to constrain the size, otherwise it uses truncation. -See <> +Refer to <> for a discussion on the subtleties involved. [[search-aggregations-metrics-geo-line-grouping]] ==== Grouping -The simple example above will produce a single track for all the data selected by the query. However, it is far more +This simple example produces a single track for all the data selected by the query. However, it is far more common to need to group the data into multiple tracks. For example, grouping flight transponder measurements by flight call-sign before sorting each flight by timestamp and producing a separate track for each. @@ -210,7 +210,7 @@ POST /tour/_bulk?refresh [[search-aggregations-metrics-geo-line-grouping-terms]] ==== Grouping with terms -Using the above data, for a non-time-series use case, the grouping can be done using a +Using this data, for a non-time-series use case, the grouping can be done using a <> based on city name. This would work whether or not we had defined the `tour` index as a time series index. @@ -294,17 +294,19 @@ Which returns: ---- // TESTRESPONSE -The above results contain an array of buckets, where each bucket is a JSON object with the `key` showing the name +These results contain an array of buckets, where each bucket is a JSON object with the `key` showing the name of the `city` field, and an inner aggregation result called `museum_tour` containing a https://tools.ietf.org/html/rfc7946#section-3.2[GeoJSON Feature] describing the actual route between the various attractions in that city. Each result also includes a `properties` object with a `complete` value which will be `false` if the geometry was truncated to the limits specified in the `size` parameter. -Note that when we use `time_series` in the example below, we will get the same results structured a little differently. +Note that when we use `time_series` in the next example, we will get the same results structured a little differently. [[search-aggregations-metrics-geo-line-grouping-time-series]] ==== Grouping with time-series +preview::[] + Using the same data as before, we can also perform the grouping with a <>. This will group by TSID, which is defined as the combinations of all fields with `time_series_dimension: true`, @@ -337,7 +339,7 @@ NOTE: The `geo_line` aggregation no longer requires the `sort` field when nested This is because the sort field is set to `@timestamp`, which all time-series indexes are pre-sorted by. If you do set this parameter, and set it to something other than `@timestamp` you will get an error. -The above query will result in: +This query will result in: [source,js] ---- @@ -400,7 +402,7 @@ The above query will result in: ---- // TESTRESPONSE -The above results are essentially the same as with the previous `terms` aggregation example, but structured differently. +These results are essentially the same as with the previous `terms` aggregation example, but structured differently. Here we see the buckets returned as a map, where the key is an internal description of the TSID. This TSID is unique for each unique combination of fields with `time_series_dimension: true`. Each bucket contains a `key` field which is also a map of all dimension values for the TSID, in this case only the city @@ -414,7 +416,7 @@ was simplified to the limits specified in the `size` parameter. [[search-aggregations-metrics-geo-line-grouping-time-series-advantages]] ==== Why group with time-series? -When reviewing the above examples, you might think that there is little difference between using +When reviewing these examples, you might think that there is little difference between using <> or <> to group the geo-lines. However, there are some important differences in behaviour between the two cases. diff --git a/docs/reference/how-to/size-your-shards.asciidoc b/docs/reference/how-to/size-your-shards.asciidoc index a94d0d0161a01..8b631dbbaa5ce 100644 --- a/docs/reference/how-to/size-your-shards.asciidoc +++ b/docs/reference/how-to/size-your-shards.asciidoc @@ -140,20 +140,21 @@ Every new backing index is an opportunity to further tune your strategy. [discrete] [[shard-size-recommendation]] -==== Aim for shard sizes between 10GB and 50GB - -Larger shards take longer to recover after a failure. When a node fails, {es} -rebalances the node's shards across the data tier's remaining nodes. This -recovery process typically involves copying the shard contents across the -network, so a 100GB shard will take twice as long to recover than a 50GB shard. -In contrast, small shards carry proportionally more overhead and are less -efficient to search. Searching fifty 1GB shards will take substantially more -resources than searching a single 50GB shard containing the same data. - -There are no hard limits on shard size, but experience shows that shards -between 10GB and 50GB typically work well for logs and time series data. You -may be able to use larger shards depending on your network and use case. -Smaller shards may be appropriate for +==== Aim for shards of up to 200M documents, or with sizes between 10GB and 50GB + +There is some overhead associated with each shard, both in terms of cluster +management and search performance. Searching a thousand 50MB shards will be +substantially more expensive than searching a single 50GB shard containing the +same data. However, very large shards can also cause slower searches and will +take longer to recover after a failure. + +There is no hard limit on the physical size of a shard, and each shard can in +theory contain up to just over two billion documents. However, experience shows +that shards between 10GB and 50GB typically work well for many use cases, as +long as the per-shard document count is kept below 200 million. + +You may be able to use larger shards depending on your network and use case, +and smaller shards may be appropriate for {enterprise-search-ref}/index.html[Enterprise Search] and similar use cases. If you use {ilm-init}, set the <>'s diff --git a/docs/reference/migration/apis/feature-migration.asciidoc b/docs/reference/migration/apis/feature-migration.asciidoc index 87903fbb7758e..9a6306dc2f596 100644 --- a/docs/reference/migration/apis/feature-migration.asciidoc +++ b/docs/reference/migration/apis/feature-migration.asciidoc @@ -142,7 +142,7 @@ Example response: "migration_status" : "NO_MIGRATION_NEEDED" } -------------------------------------------------- -// TESTRESPONSE[s/"minimum_index_version" : "8100099"/"minimum_index_version" : $body.$_path/] +// TESTRESPONSE[skip:"AwaitsFix https://github.com/elastic/elasticsearch/issues/97780] When you submit a POST request to the `_migration/system_features` endpoint to start the migration process, the response indicates what features will be diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc index 6c33b9db4c597..afba5e18c1631 100644 --- a/docs/reference/rest-api/common-parms.asciidoc +++ b/docs/reference/rest-api/common-parms.asciidoc @@ -767,6 +767,7 @@ currently supported: + -- * <> +* <> * <> * <> * <> diff --git a/docs/reference/scripting/security.asciidoc b/docs/reference/scripting/security.asciidoc index 2726938cb179d..0f322d08726b9 100644 --- a/docs/reference/scripting/security.asciidoc +++ b/docs/reference/scripting/security.asciidoc @@ -36,7 +36,7 @@ configured to run both types of scripts. To limit what type of scripts are run, set `script.allowed_types` to `inline` or `stored`. To prevent any scripts from running, set `script.allowed_types` to `none`. -IMPORTANT: If you use {kib}, set `script.allowed_types` to `both` or `inline`. +IMPORTANT: If you use {kib}, set `script.allowed_types` to both or just `inline`. Some {kib} features rely on inline scripts and do not function as expected if {es} does not allow inline scripts. diff --git a/modules/rest-root/src/main/java/org/elasticsearch/rest/root/MainResponse.java b/modules/rest-root/src/main/java/org/elasticsearch/rest/root/MainResponse.java index b4d127ffd39d9..116476225027f 100644 --- a/modules/rest-root/src/main/java/org/elasticsearch/rest/root/MainResponse.java +++ b/modules/rest-root/src/main/java/org/elasticsearch/rest/root/MainResponse.java @@ -131,7 +131,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("cluster_uuid", clusterUuid); builder.startObject("version") .field("number", build.qualifiedVersion()) - .field("build_flavor", "default") + .field("build_flavor", build.flavor()) .field("build_type", build.type().displayName()) .field("build_hash", build.hash()) .field("build_date", build.date()) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/FeatureStateResetApiIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/FeatureStateResetApiIT.java index 1f86d4cb39ea4..32a1d6724e0fd 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/FeatureStateResetApiIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/FeatureStateResetApiIT.java @@ -48,6 +48,7 @@ protected Collection> nodePlugins() { } /** Check that the reset method cleans up a feature */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/97780") public void testResetSystemIndices() throws Exception { String systemIndex1 = ".test-system-idx-1"; String systemIndex2 = ".second-test-system-idx-1"; diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index 30b3e8c6e3c00..99134da6bc216 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -541,15 +541,25 @@ static ReducedQueryPhase reducedQueryPhase( ); } int total = queryResults.size(); - queryResults = queryResults.stream().filter(res -> res.queryResult().isNull() == false).toList(); - String errorMsg = "must have at least one non-empty search result, got 0 out of " + total; - assert queryResults.isEmpty() == false : errorMsg; - if (queryResults.isEmpty()) { - throw new IllegalStateException(errorMsg); + final Collection nonNullResults = new ArrayList<>(); + boolean hasSuggest = false; + boolean hasProfileResults = false; + for (SearchPhaseResult queryResult : queryResults) { + var res = queryResult.queryResult(); + if (res.isNull()) { + continue; + } + hasSuggest |= res.suggest() != null; + hasProfileResults |= res.hasProfileResults(); + nonNullResults.add(queryResult); } + queryResults = nonNullResults; validateMergeSortValueFormats(queryResults); - final boolean hasSuggest = queryResults.stream().anyMatch(res -> res.queryResult().suggest() != null); - final boolean hasProfileResults = queryResults.stream().anyMatch(res -> res.queryResult().hasProfileResults()); + if (queryResults.isEmpty()) { + var ex = new IllegalStateException("must have at least one non-empty search result, got 0 out of " + total); + assert false : ex; + throw ex; + } // count the total (we use the query result provider here, since we might not get any hits (we scrolled past them)) final Map>> groupedSuggestions = hasSuggest ? new HashMap<>() : Collections.emptyMap(); @@ -578,9 +588,7 @@ static ReducedQueryPhase reducedQueryPhase( } } } - if (bufferedTopDocs.isEmpty() == false) { - assert result.hasConsumedTopDocs() : "firstResult has no aggs but we got non null buffered aggs?"; - } + assert bufferedTopDocs.isEmpty() || result.hasConsumedTopDocs() : "firstResult has no aggs but we got non null buffered aggs?"; if (hasProfileResults) { String key = result.getSearchShardTarget().toString(); profileShardResults.put(key, result.consumeProfileResult()); diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java index c8781809af8be..482080f2b2c23 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java @@ -995,6 +995,11 @@ public void execute(ClusterState currentState) { allocationService.cleanCaches(); } } + + @Override + public String toString() { + return "cleanMasterService"; + } }.submit(masterService, "clean-up after stepping down as master"); } @@ -1518,82 +1523,98 @@ public void publish( clusterStatePublicationEvent.getNewState().term() ) ); - publishListener.onFailure( - new FailedToCommitClusterStateException( - "node is no longer master for term " - + clusterStatePublicationEvent.getNewState().term() - + " while handling publication" - ) + throw new FailedToCommitClusterStateException( + "node is no longer master for term " + + clusterStatePublicationEvent.getNewState().term() + + " while handling publication" ); - return; } if (currentPublication.isPresent()) { assert false : "[" + currentPublication.get() + "] in progress, cannot start new publication"; - logger.warn( + logger.error( () -> format( "[%s] failed publication as already publication in progress", clusterStatePublicationEvent.getSummary() ) ); - publishListener.onFailure( - new FailedToCommitClusterStateException("publication " + currentPublication.get() + " already in progress") - ); - return; + throw new FailedToCommitClusterStateException("publication " + currentPublication.get() + " already in progress"); } assert assertPreviousStateConsistency(clusterStatePublicationEvent); - final ClusterState clusterState = clusterStatePublicationEvent.getNewState(); + final ClusterState clusterState; + final long publicationContextConstructionStartMillis; + final PublicationTransportHandler.PublicationContext publicationContext; + final PublishRequest publishRequest; - assert getLocalNode().equals(clusterState.getNodes().get(getLocalNode().getId())) - : getLocalNode() + " should be in published " + clusterState; - - final long publicationContextConstructionStartMillis = transportService.getThreadPool().rawRelativeTimeInMillis(); - final PublicationTransportHandler.PublicationContext publicationContext = publicationHandler.newPublicationContext( - clusterStatePublicationEvent - ); try { - clusterStatePublicationEvent.setPublicationContextConstructionElapsedMillis( - transportService.getThreadPool().rawRelativeTimeInMillis() - publicationContextConstructionStartMillis - ); + clusterState = clusterStatePublicationEvent.getNewState(); + assert getLocalNode().equals(clusterState.getNodes().get(getLocalNode().getId())) + : getLocalNode() + " should be in published " + clusterState; + publicationContextConstructionStartMillis = transportService.getThreadPool().rawRelativeTimeInMillis(); + publicationContext = publicationHandler.newPublicationContext(clusterStatePublicationEvent); + } catch (Exception e) { + logger.debug(() -> "[" + clusterStatePublicationEvent.getSummary() + "] publishing failed during context creation", e); + becomeCandidate("publication context creation"); + throw new FailedToCommitClusterStateException("publishing failed during context creation", e); + } - final PublishRequest publishRequest = coordinationState.get().handleClientValue(clusterState); - final CoordinatorPublication publication = new CoordinatorPublication( - clusterStatePublicationEvent, - publishRequest, - publicationContext, - new ListenableFuture<>(), - ackListener, - publishListener - ); - currentPublication = Optional.of(publication); + try (Releasable ignored = publicationContext::decRef) { + try { + clusterStatePublicationEvent.setPublicationContextConstructionElapsedMillis( + transportService.getThreadPool().rawRelativeTimeInMillis() - publicationContextConstructionStartMillis + ); + publishRequest = coordinationState.get().handleClientValue(clusterState); + } catch (Exception e) { + logger.warn( + "failed to start publication of state version [" + + clusterState.version() + + "] in term [" + + clusterState.term() + + "] for [" + + clusterStatePublicationEvent.getSummary() + + "]", + e + ); + becomeCandidate("publication creation"); + throw new FailedToCommitClusterStateException("publishing failed while starting", e); + } - final DiscoveryNodes publishNodes = publishRequest.getAcceptedState().nodes(); - leaderChecker.setCurrentNodes(publishNodes); - followersChecker.setCurrentNodes(publishNodes); - lagDetector.setTrackedNodes(publishNodes); - publication.start(followersChecker.getFaultyNodes()); - } catch (Exception e) { - logger.warn( - "failed to start publication of state version [" - + clusterState.version() - + "] in term [" - + clusterState.term() - + "] for [" - + clusterStatePublicationEvent.getSummary() - + "]", - e - ); - assert currentPublication.isEmpty() : e; // should not fail after setting currentPublication - becomeCandidate("publish"); - } finally { - publicationContext.decRef(); + try { + final var publication = new CoordinatorPublication( + clusterStatePublicationEvent, + publishRequest, + publicationContext, + new ListenableFuture<>(), + ackListener, + publishListener + ); + currentPublication = Optional.of(publication); + + final var publishNodes = publishRequest.getAcceptedState().nodes(); + leaderChecker.setCurrentNodes(publishNodes); + followersChecker.setCurrentNodes(publishNodes); + lagDetector.setTrackedNodes(publishNodes); + publication.start(followersChecker.getFaultyNodes()); + } catch (Exception e) { + assert false : e; + if (currentPublication.isEmpty()) { + // log an error and fail the listener + throw new IllegalStateException(e); + } else { + // becoming candidate will clean up the publication, completing the listener + becomeCandidate("publication start"); + } + } } } + } catch (FailedToCommitClusterStateException failedToCommitClusterStateException) { + publishListener.onFailure(failedToCommitClusterStateException); } catch (Exception e) { - logger.debug(() -> "[" + clusterStatePublicationEvent.getSummary() + "] publishing failed", e); - publishListener.onFailure(new FailedToCommitClusterStateException("publishing failed", e)); + assert false : e; // all exceptions should already be caught and wrapped in a FailedToCommitClusterStateException + logger.error(() -> "[" + clusterStatePublicationEvent.getSummary() + "] publishing unexpectedly failed", e); + publishListener.onFailure(new FailedToCommitClusterStateException("publishing unexpectedly failed", e)); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/stateless/StoreHeartbeatService.java b/server/src/main/java/org/elasticsearch/cluster/coordination/stateless/StoreHeartbeatService.java index d68ebeb871501..6555054fd9973 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/stateless/StoreHeartbeatService.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/stateless/StoreHeartbeatService.java @@ -103,7 +103,7 @@ public void onResponse(Heartbeat heartBeat) { || maxTimeSinceLastHeartbeat.millis() <= heartBeat.timeSinceLastHeartbeatInMillis(absoluteTimeInMillis())) { runnable.run(); } else { - logger.trace("runIfNoRecentLeader: found recent leader"); + logger.trace("runIfNoRecentLeader: found recent leader [{}]", heartBeat); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NestedLookup.java b/server/src/main/java/org/elasticsearch/index/mapper/NestedLookup.java index a227c1f946ea4..911c6e5382a86 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NestedLookup.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NestedLookup.java @@ -10,6 +10,7 @@ import org.apache.lucene.search.Query; +import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; @@ -42,6 +43,12 @@ public interface NestedLookup { */ String getNestedParent(String path); + /** + * Given a nested object path, returns a list of paths of its + * immediate children + */ + List getImmediateChildMappers(String path); + /** * A NestedLookup for a mapping with no nested mappers */ @@ -60,6 +67,11 @@ public Map getNestedParentFilters() { public String getNestedParent(String path) { return null; } + + @Override + public List getImmediateChildMappers(String path) { + return List.of(); + } }; /** @@ -84,6 +96,7 @@ static NestedLookup build(List mappers) { previous = mapper; } List nestedPathNames = mappers.stream().map(NestedObjectMapper::name).toList(); + return new NestedLookup() { @Override @@ -98,6 +111,9 @@ public Map getNestedParentFilters() { @Override public String getNestedParent(String path) { + if (path.contains(".") == false) { + return null; + } String parent = null; for (String parentPath : nestedPathNames) { if (path.startsWith(parentPath + ".")) { @@ -108,6 +124,33 @@ public String getNestedParent(String path) { } return parent; } + + @Override + public List getImmediateChildMappers(String path) { + String prefix = "".equals(path) ? "" : path + "."; + List childMappers = new ArrayList<>(); + int parentPos = Collections.binarySearch(nestedPathNames, path); + if (parentPos < -1 || parentPos >= nestedPathNames.size() - 1) { + return List.of(); + } + int i = parentPos + 1; + String lastChild = nestedPathNames.get(i); + if (lastChild.startsWith(prefix)) { + childMappers.add(lastChild); + } + i++; + while (i < nestedPathNames.size() && nestedPathNames.get(i).startsWith(prefix)) { + if (nestedPathNames.get(i).startsWith(lastChild + ".")) { + // child of child, skip + i++; + continue; + } + lastChild = nestedPathNames.get(i); + childMappers.add(lastChild); + i++; + } + return childMappers; + } }; } } diff --git a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java index e0ee2f178e4ef..43566392e5b6a 100644 --- a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java @@ -136,6 +136,7 @@ final class DefaultSearchContext extends SearchContext { SearchShardTarget shardTarget, LongSupplier relativeTimeSupplier, TimeValue timeout, + int minimumDocsPerSlice, FetchPhase fetchPhase, boolean lowLevelCancellation ) throws IOException { @@ -153,7 +154,9 @@ final class DefaultSearchContext extends SearchContext { engineSearcher.getSimilarity(), engineSearcher.getQueryCache(), engineSearcher.getQueryCachingPolicy(), - lowLevelCancellation + minimumDocsPerSlice, + lowLevelCancellation, + null ); releasables.addAll(List.of(engineSearcher, searcher)); diff --git a/server/src/main/java/org/elasticsearch/search/NestedUtils.java b/server/src/main/java/org/elasticsearch/search/NestedUtils.java new file mode 100644 index 0000000000000..714ae87ec01f0 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/NestedUtils.java @@ -0,0 +1,131 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.function.Function; + +/** + * Utility methods for dealing with nested mappers + */ +public final class NestedUtils { + + private NestedUtils() {} + + /** + * Partition a set of input objects by the children of a specific nested scope + * + * The returned map will contain an entry for all children, even if some of them + * are empty in the inputs. + * + * All children, and all input paths, must begin with the scope. Both children + * and inputs should be in sorted order. + * + * @param scope the nested scope to base partitions on + * @param children the immediate children of the nested scope + * @param inputs a set of inputs to partition + * @param pathFunction a function to retrieve a path for each input + * @param the type of the inputs + * @return a map of nested paths to lists of inputs + */ + public static Map> partitionByChildren( + String scope, + List children, + List inputs, + Function pathFunction + ) { + // No immediate nested children, so we can shortcut and just return all inputs + // under the current scope + if (children.isEmpty()) { + return Map.of(scope, inputs); + } + + // Set up the output map, with one entry for the current scope and one for each + // of its children + Map> output = new HashMap<>(); + output.put(scope, new ArrayList<>()); + for (String child : children) { + output.put(child, new ArrayList<>()); + } + + // No inputs, so we can return the output map with all entries empty + if (inputs.isEmpty()) { + return output; + } + + Iterator childrenIterator = children.iterator(); + String currentChild = childrenIterator.next(); + Iterator inputIterator = inputs.iterator(); + T currentInput = inputIterator.next(); + String currentInputName = pathFunction.apply(currentInput); + assert currentInputName.startsWith(scope); + + // Find all the inputs that sort before the first child, and add them to the current scope entry + while (currentInputName.compareTo(currentChild) < 0) { + output.get(scope).add(currentInput); + if (inputIterator.hasNext() == false) { + return output; + } + currentInput = inputIterator.next(); + currentInputName = pathFunction.apply(currentInput); + assert currentInputName.startsWith(scope); + } + + // Iterate through all the children + while (currentChild != null) { + if (currentInputName.startsWith(currentChild + ".")) { + // If this input sits under the current child, add it to that child scope + // and then get the next input + output.get(currentChild).add(currentInput); + if (inputIterator.hasNext() == false) { + // return if no more inputs + return output; + } + currentInput = inputIterator.next(); + currentInputName = pathFunction.apply(currentInput); + assert currentInputName.startsWith(scope); + } else { + // If there are no more children then skip to filling up the parent scope again + if (childrenIterator.hasNext() == false) { + break; + } + // Move to the next child + currentChild = childrenIterator.next(); + if (currentChild == null || currentInputName.compareTo(currentChild) < 0) { + // If we still sort before the next child, then add to the parent scope + // and move to the next input + output.get(scope).add(currentInput); + if (inputIterator.hasNext() == false) { + // if no more inputs then return + return output; + } + currentInput = inputIterator.next(); + currentInputName = pathFunction.apply(currentInput); + assert currentInputName.startsWith(scope); + } + } + } + output.get(scope).add(currentInput); + + // if there are inputs left, then they all sort after the last child but + // are not contained by them, so just add them all to the parent scope + while (inputIterator.hasNext()) { + currentInput = inputIterator.next(); + currentInputName = pathFunction.apply(currentInput); + assert currentInputName.startsWith(scope); + output.get(scope).add(currentInput); + } + return output; + } + +} diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 643aa6cff272e..7e662acff319a 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -202,6 +202,14 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv Property.NodeScope ); + // This setting is only registered on tests to force concurrent search even when segments contains very few documents. + public static final Setting MINIMUM_DOCS_PER_SLICE = Setting.intSetting( + "search.minimum_docs_per_slice", + 50_000, + 1, + Property.NodeScope + ); + public static final Setting MAX_OPEN_SCROLL_CONTEXT = Setting.intSetting( "search.max_open_scroll_context", 500, @@ -251,6 +259,8 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv private volatile TimeValue defaultSearchTimeout; + private final int minimumDocsPerSlice; + private volatile boolean defaultAllowPartialSearchResults; private volatile boolean lowLevelCancellation; @@ -316,6 +326,8 @@ public SearchService( defaultSearchTimeout = DEFAULT_SEARCH_TIMEOUT_SETTING.get(settings); clusterService.getClusterSettings().addSettingsUpdateConsumer(DEFAULT_SEARCH_TIMEOUT_SETTING, this::setDefaultSearchTimeout); + minimumDocsPerSlice = MINIMUM_DOCS_PER_SLICE.get(settings); + defaultAllowPartialSearchResults = DEFAULT_ALLOW_PARTIAL_SEARCH_RESULTS.get(settings); clusterService.getClusterSettings() .addSettingsUpdateConsumer(DEFAULT_ALLOW_PARTIAL_SEARCH_RESULTS, this::setDefaultAllowPartialSearchResults); @@ -1037,6 +1049,7 @@ private DefaultSearchContext createSearchContext(ReaderContext reader, ShardSear shardTarget, threadPool::relativeTimeInMillis, timeout, + minimumDocsPerSlice, fetchPhase, lowLevelCancellation ); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldFetcher.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldFetcher.java index ba1d0b1697c05..afc9fc26a1338 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldFetcher.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldFetcher.java @@ -9,29 +9,26 @@ package org.elasticsearch.search.fetch.subphase; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NestedValueFetcher; import org.elasticsearch.index.mapper.ValueFetcher; import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.search.NestedUtils; import org.elasticsearch.search.fetch.StoredFieldsSpec; import org.elasticsearch.search.lookup.Source; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; +import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.function.Supplier; -import java.util.stream.Collectors; /** * A helper class to {@link FetchFieldsPhase} that's initialized with a list of field patterns to fetch. @@ -39,34 +36,19 @@ */ public class FieldFetcher { + private record ResolvedField(String field, String matchingPattern, MappedFieldType ft, String format) {} + /** - * Default maximum number of states in the automaton that looks up unmapped fields. + * Build a FieldFetcher for a given search context and collection of fields and formats */ - private static final int AUTOMATON_MAX_DETERMINIZED_STATES = 100000; - public static FieldFetcher create(SearchExecutionContext context, Collection fieldAndFormats) { - Set nestedMappingPaths = context.nestedLookup().getNestedMappers().keySet(); - return create(context, fieldAndFormats, nestedMappingPaths, ""); - } - - private static FieldFetcher create( - SearchExecutionContext context, - Collection fieldAndFormats, - Set nestedMappingsInScope, - String nestedScopePath - ) { - // here we only need the nested paths that are closest to the root, e.g. only "foo" if also "foo.bar" is present. - // the remaining nested field paths are handled recursively - Set nestedParentPaths = getParentPaths(nestedMappingsInScope, context); - // Using a LinkedHashMap so fields are returned in the order requested. - // We won't formally guarantee this, but it's good for readability of the response - Map fieldContexts = new LinkedHashMap<>(); List unmappedFetchPattern = new ArrayList<>(); + List resolvedFields = new ArrayList<>(); for (FieldAndFormat fieldAndFormat : fieldAndFormats) { String fieldPattern = fieldAndFormat.field; - boolean isWildcardPattern = Regex.isSimpleMatchPattern(fieldPattern); + String matchingPattern = Regex.isSimpleMatchPattern(fieldPattern) ? fieldPattern : null; if (fieldAndFormat.includeUnmapped != null && fieldAndFormat.includeUnmapped) { unmappedFetchPattern.add(fieldAndFormat.field); } @@ -74,95 +56,114 @@ private static FieldFetcher create( for (String field : context.getMatchingFieldNames(fieldPattern)) { MappedFieldType ft = context.getFieldType(field); // we want to skip metadata fields if we have a wildcard pattern - if (context.isMetadataField(field) && isWildcardPattern) { - continue; - } - if (field.startsWith(nestedScopePath) == false) { - // this field is out of scope for this FieldFetcher (its likely nested) so ignore + if (context.isMetadataField(field) && matchingPattern != null) { continue; } - String nestedParentPath = null; - if (nestedParentPaths.isEmpty() == false) { - // try to find the shortest nested parent path for this field - for (String nestedFieldPath : nestedParentPaths) { - if (field.startsWith(nestedFieldPath) - && field.length() > nestedFieldPath.length() - && field.charAt(nestedFieldPath.length()) == '.') { - nestedParentPath = nestedFieldPath; - break; - } - } - } - // only add concrete fields if they are not beneath a known nested field - if (nestedParentPath == null) { - ValueFetcher valueFetcher; - try { - valueFetcher = ft.valueFetcher(context, fieldAndFormat.format); - } catch (IllegalArgumentException e) { - StringBuilder error = new StringBuilder("error fetching [").append(field).append(']'); - if (isWildcardPattern) { - error.append(" which matched [").append(fieldAndFormat.field).append(']'); - } - error.append(": ").append(e.getMessage()); - throw new IllegalArgumentException(error.toString(), e); - } - fieldContexts.put(field, new FieldContext(field, valueFetcher)); - } + resolvedFields.add(new ResolvedField(field, matchingPattern, ft, fieldAndFormat.format)); } } - // create a new nested value fetcher for patterns under nested field - for (String nestedFieldPath : nestedParentPaths) { - // We construct a field fetcher that narrows the allowed lookup scope to everything beneath its nested field path. - // We also need to remove this nested field path and everything beneath it from the list of available nested fields before - // creating this internal field fetcher to avoid infinite loops on this recursion - Set narrowedScopeNestedMappings = nestedMappingsInScope.stream() - .filter(s -> nestedParentPaths.contains(s) == false) - .collect(Collectors.toSet()); + // The fields need to be sorted so that the nested partition functions will work correctly. + resolvedFields.sort(Comparator.comparing(f -> f.field)); + + Map fieldContexts = buildFieldContexts(context, "", resolvedFields, unmappedFetchPattern); + + UnmappedFieldFetcher unmappedFieldFetcher = buildUnmappedFieldFetcher(context, fieldContexts.keySet(), "", unmappedFetchPattern); - FieldFetcher nestedSubFieldFetcher = FieldFetcher.create( - context, - fieldAndFormats, - narrowedScopeNestedMappings, - nestedFieldPath - ); + return new FieldFetcher(fieldContexts, unmappedFieldFetcher); + } + + private static UnmappedFieldFetcher buildUnmappedFieldFetcher( + SearchExecutionContext context, + Set mappedFields, + String nestedScope, + List unmappedFetchPatterns + ) { + if (unmappedFetchPatterns.isEmpty()) { + return UnmappedFieldFetcher.EMPTY; + } + // We pass in all mapped field names, and all the names of nested mappers that appear + // immediately below the current scope. This means that the unmapped field fetcher won't + // retrieve any fields that live inside a nested child, instead leaving this to the + // NestedFieldFetchers defined for each child scope in buildFieldContexts() + Set mappedAndNestedFields = new HashSet<>(mappedFields); + mappedAndNestedFields.addAll(context.nestedLookup().getImmediateChildMappers(nestedScope)); + return new UnmappedFieldFetcher(mappedAndNestedFields, unmappedFetchPatterns); + } - // add a special ValueFetcher that filters source and collects its subfields - fieldContexts.put( - nestedFieldPath, - new FieldContext(nestedFieldPath, new NestedValueFetcher(nestedFieldPath, nestedSubFieldFetcher)) - ); + private static ValueFetcher buildValueFetcher(SearchExecutionContext context, ResolvedField fieldAndFormat) { + try { + return fieldAndFormat.ft.valueFetcher(context, fieldAndFormat.format); + } catch (IllegalArgumentException e) { + StringBuilder error = new StringBuilder("error fetching [").append(fieldAndFormat.field).append(']'); + if (fieldAndFormat.matchingPattern != null) { + error.append(" which matched [").append(fieldAndFormat.matchingPattern).append(']'); + } + error.append(": ").append(e.getMessage()); + throw new IllegalArgumentException(error.toString(), e); } + } + + // Builds field contexts for each resolved field. If there are child mappers below + // the nested scope, then the resolved fields are partitioned by where they fall in + // the nested hierarchy, and we build a nested FieldContext for each child by calling + // this method again for the subset of resolved fields that live within it. + private static Map buildFieldContexts( + SearchExecutionContext context, + String nestedScope, + List fields, + List unmappedFetchPatterns + ) { - CharacterRunAutomaton unmappedFieldsFetchAutomaton = null; - // We separate the "include_unmapped" field patters with wildcards from the rest in order to use less - // space in the lookup automaton - Map> partitions = unmappedFetchPattern.stream() - .collect(Collectors.partitioningBy((s -> Regex.isSimpleMatchPattern(s)))); - List unmappedWildcardPattern = partitions.get(true); - List unmappedConcreteFields = partitions.get(false); - if (unmappedWildcardPattern.isEmpty() == false) { - unmappedFieldsFetchAutomaton = new CharacterRunAutomaton( - Regex.simpleMatchToAutomaton(unmappedWildcardPattern.toArray(new String[unmappedWildcardPattern.size()])), - AUTOMATON_MAX_DETERMINIZED_STATES - ); + final boolean includeUnmapped = unmappedFetchPatterns.isEmpty() == false; + + Map> fieldsByNestedMapper = NestedUtils.partitionByChildren( + nestedScope, + context.nestedLookup().getImmediateChildMappers(nestedScope), + fields, + f -> f.field + ); + + // Keep the outputs sorted for easier testing + Map output = new LinkedHashMap<>(); + for (String scope : fieldsByNestedMapper.keySet()) { + if (nestedScope.equals(scope)) { + // These are fields in the current scope, so add them directly to the output map + for (ResolvedField ff : fieldsByNestedMapper.get(nestedScope)) { + output.put(ff.field, new FieldContext(ff.field, buildValueFetcher(context, ff))); + } + } else { + // don't create nested fetchers if no children have been requested as part of the fields + // request, unless we are trying to also fetch unmapped fields`` + if (includeUnmapped || fieldsByNestedMapper.get(scope).isEmpty() == false) { + // These fields are in a child scope, so build a nested mapper for them + Map scopedFields = buildFieldContexts( + context, + scope, + fieldsByNestedMapper.get(scope), + unmappedFetchPatterns + ); + UnmappedFieldFetcher unmappedFieldFetcher = buildUnmappedFieldFetcher( + context, + scopedFields.keySet(), + scope, + unmappedFetchPatterns + ); + NestedValueFetcher nvf = new NestedValueFetcher(scope, new FieldFetcher(scopedFields, unmappedFieldFetcher)); + output.put(scope, new FieldContext(scope, nvf)); + } + } } - return new FieldFetcher(fieldContexts, unmappedFieldsFetchAutomaton, unmappedConcreteFields); + return output; } private final Map fieldContexts; - private final CharacterRunAutomaton unmappedFieldsFetchAutomaton; - private final List unmappedConcreteFields; + private final UnmappedFieldFetcher unmappedFieldFetcher; private final StoredFieldsSpec storedFieldsSpec; - private FieldFetcher( - Map fieldContexts, - @Nullable CharacterRunAutomaton unmappedFieldsFetchAutomaton, - @Nullable List unmappedConcreteFields - ) { + private FieldFetcher(Map fieldContexts, UnmappedFieldFetcher unmappedFieldFetcher) { this.fieldContexts = fieldContexts; - this.unmappedFieldsFetchAutomaton = unmappedFieldsFetchAutomaton; - this.unmappedConcreteFields = unmappedConcreteFields; + this.unmappedFieldFetcher = unmappedFieldFetcher; this.storedFieldsSpec = StoredFieldsSpec.build(fieldContexts.values(), fc -> fc.valueFetcher.storedFieldsSpec()); } @@ -180,118 +181,10 @@ public Map fetch(Source source, int doc) throws IOExcepti documentFields.put(field, docField); } } - collectUnmapped(documentFields, source::source, "", 0); + unmappedFieldFetcher.collectUnmapped(documentFields, source); return documentFields; } - private void collectUnmapped( - Map documentFields, - Supplier> source, - String parentPath, - int lastState - ) { - // lookup field patterns containing wildcards - if (this.unmappedFieldsFetchAutomaton != null) { - for (String key : source.get().keySet()) { - Object value = source.get().get(key); - String currentPath = parentPath + key; - if (this.fieldContexts.containsKey(currentPath)) { - continue; - } - int currentState = step(this.unmappedFieldsFetchAutomaton, key, lastState); - if (currentState == -1) { - // current path doesn't match any fields pattern - continue; - } - if (value instanceof Map) { - // one step deeper into source tree - @SuppressWarnings("unchecked") - Map objectMap = (Map) value; - collectUnmapped( - documentFields, - () -> objectMap, - currentPath + ".", - step(this.unmappedFieldsFetchAutomaton, ".", currentState) - ); - } else if (value instanceof List) { - // iterate through list values - collectUnmappedList(documentFields, (List) value, currentPath, currentState); - } else { - // we have a leaf value - if (this.unmappedFieldsFetchAutomaton.isAccept(currentState)) { - if (value != null) { - DocumentField currentEntry = documentFields.get(currentPath); - if (currentEntry == null) { - List list = new ArrayList<>(); - list.add(value); - documentFields.put(currentPath, new DocumentField(currentPath, list)); - } else { - currentEntry.getValues().add(value); - } - } - } - } - } - } - - // lookup concrete fields - if (this.unmappedConcreteFields != null) { - for (String path : unmappedConcreteFields) { - if (this.fieldContexts.containsKey(path)) { - continue; // this is actually a mapped field - } - List values = XContentMapValues.extractRawValues(path, source.get()); - if (values.isEmpty() == false) { - documentFields.put(path, new DocumentField(path, values)); - } - } - } - } - - private void collectUnmappedList(Map documentFields, Iterable iterable, String parentPath, int lastState) { - List list = new ArrayList<>(); - for (Object value : iterable) { - if (value instanceof Map) { - @SuppressWarnings("unchecked") - final Map objectMap = (Map) value; - collectUnmapped(documentFields, () -> objectMap, parentPath + ".", step(this.unmappedFieldsFetchAutomaton, ".", lastState)); - } else if (value instanceof List) { - // weird case, but can happen for objects with "enabled" : "false" - collectUnmappedList(documentFields, (List) value, parentPath, lastState); - } else if (this.unmappedFieldsFetchAutomaton.isAccept(lastState) && this.fieldContexts.containsKey(parentPath) == false) { - list.add(value); - } - } - if (list.isEmpty() == false) { - DocumentField currentEntry = documentFields.get(parentPath); - if (currentEntry == null) { - documentFields.put(parentPath, new DocumentField(parentPath, list)); - } else { - currentEntry.getValues().addAll(list); - } - } - } - - private static Set getParentPaths(Set nestedPathsInScope, SearchExecutionContext context) { - Set parentPaths = new HashSet<>(); - for (String candidate : nestedPathsInScope) { - String nestedParent = context.nestedLookup().getNestedParent(candidate); - // if the candidate has no nested parent itself, its a minimal parent path - // if the candidate has a parent which is out of scope this means it minimal itself - if (nestedParent == null || nestedPathsInScope.contains(nestedParent) == false) { - parentPaths.add(candidate); - } - } - return parentPaths; - } - - private static int step(CharacterRunAutomaton automaton, String key, int state) { - for (int i = 0; state != -1 && i < key.length(); ++i) { - state = automaton.step(state, key.charAt(i)); - } - return state; - } - public void setNextReader(LeafReaderContext readerContext) { for (FieldContext field : fieldContexts.values()) { field.valueFetcher.setNextReader(readerContext); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java new file mode 100644 index 0000000000000..b54e3891ce8a3 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java @@ -0,0 +1,170 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.fetch.subphase; + +import org.apache.lucene.util.automaton.CharacterRunAutomaton; +import org.elasticsearch.common.document.DocumentField; +import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.search.lookup.Source; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * Class to fetch all unmapped fields from a Source that match a set of patterns + * + * Takes a set of mapped fields to ignore when matching, which should include + * any nested mappers. + */ +public class UnmappedFieldFetcher { + + /** + * Default maximum number of states in the automaton that looks up unmapped fields. + */ + private static final int AUTOMATON_MAX_DETERMINIZED_STATES = 100000; + + private final CharacterRunAutomaton unmappedFieldsFetchAutomaton; + private final List unmappedConcreteFields = new ArrayList<>(); + private final Set mappedFields; + + public static final UnmappedFieldFetcher EMPTY = new UnmappedFieldFetcher(Set.of(), List.of()); + + /** + * Builds an UnmappedFieldFetcher + * @param mappedFields a set of fields to ignore when iterating through the map + * @param unmappedFetchPatterns a set of patterns to match unmapped fields in the source against + */ + public UnmappedFieldFetcher(Set mappedFields, List unmappedFetchPatterns) { + List unmappedWildcardPatterns = new ArrayList<>(); + // We separate the "include_unmapped" field patters with wildcards from the rest in order to use less + // space in the lookup automaton + for (String pattern : unmappedFetchPatterns) { + if (Regex.isSimpleMatchPattern(pattern)) { + unmappedWildcardPatterns.add(pattern); + } else { + unmappedConcreteFields.add(pattern); + } + } + this.unmappedFieldsFetchAutomaton = buildAutomaton(unmappedWildcardPatterns); + this.mappedFields = mappedFields; + } + + private static CharacterRunAutomaton buildAutomaton(List patterns) { + if (patterns.isEmpty()) { + return null; + } + return new CharacterRunAutomaton(Regex.simpleMatchToAutomaton(patterns.toArray(String[]::new)), AUTOMATON_MAX_DETERMINIZED_STATES); + } + + /** + * Collect unmapped fields from a Source + * @param documentFields a map to receive unmapped field values as DocumentFields + * @param source the Source + */ + public void collectUnmapped(Map documentFields, Source source) { + if (this.unmappedFieldsFetchAutomaton == null && this.unmappedConcreteFields.isEmpty()) { + return; + } + collectUnmapped(documentFields, source.source(), "", 0); + } + + private void collectUnmapped(Map documentFields, Map source, String parentPath, int lastState) { + // lookup field patterns containing wildcards + if (this.unmappedFieldsFetchAutomaton != null) { + for (String key : source.keySet()) { + Object value = source.get(key); + String currentPath = parentPath + key; + if (this.mappedFields.contains(currentPath)) { + continue; + } + int currentState = step(this.unmappedFieldsFetchAutomaton, key, lastState); + if (currentState == -1) { + // current path doesn't match any fields pattern + continue; + } + if (value instanceof Map) { + // one step deeper into source tree + @SuppressWarnings("unchecked") + Map objectMap = (Map) value; + collectUnmapped( + documentFields, + objectMap, + currentPath + ".", + step(this.unmappedFieldsFetchAutomaton, ".", currentState) + ); + } else if (value instanceof List) { + // iterate through list values + collectUnmappedList(documentFields, (List) value, currentPath, currentState); + } else { + // we have a leaf value + if (this.unmappedFieldsFetchAutomaton.isAccept(currentState)) { + if (value != null) { + DocumentField currentEntry = documentFields.get(currentPath); + if (currentEntry == null) { + List list = new ArrayList<>(); + list.add(value); + documentFields.put(currentPath, new DocumentField(currentPath, list)); + } else { + currentEntry.getValues().add(value); + } + } + } + } + } + } + + // lookup concrete fields + if (this.unmappedConcreteFields != null) { + for (String path : unmappedConcreteFields) { + if (this.mappedFields.contains(path)) { + continue; // this is actually a mapped field + } + List values = XContentMapValues.extractRawValues(path, source); + if (values.isEmpty() == false) { + documentFields.put(path, new DocumentField(path, values)); + } + } + } + } + + private void collectUnmappedList(Map documentFields, Iterable iterable, String parentPath, int lastState) { + List list = new ArrayList<>(); + for (Object value : iterable) { + if (value instanceof Map) { + @SuppressWarnings("unchecked") + final Map objectMap = (Map) value; + collectUnmapped(documentFields, objectMap, parentPath + ".", step(this.unmappedFieldsFetchAutomaton, ".", lastState)); + } else if (value instanceof List) { + // weird case, but can happen for objects with "enabled" : "false" + collectUnmappedList(documentFields, (List) value, parentPath, lastState); + } else if (this.unmappedFieldsFetchAutomaton.isAccept(lastState) && this.mappedFields.contains(parentPath) == false) { + list.add(value); + } + } + if (list.isEmpty() == false) { + DocumentField currentEntry = documentFields.get(parentPath); + if (currentEntry == null) { + documentFields.put(parentPath, new DocumentField(parentPath, list)); + } else { + currentEntry.getValues().addAll(list); + } + } + } + + private static int step(CharacterRunAutomaton automaton, String key, int state) { + for (int i = 0; state != -1 && i < key.length(); ++i) { + state = automaton.step(state, key.charAt(i)); + } + return state; + } + +} diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index 6f5d07e0ebc56..d9c68e4a62de0 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -71,9 +71,6 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { */ private static final int CHECK_CANCELLED_SCORER_INTERVAL = 1 << 11; - // don't create slices with less than 50k docs - private static final int MINIMUM_DOCS_PER_SLICE = 50_000; - // make sure each slice has at least 10% of the documents as a way to limit memory usage and // to keep the error margin of terms aggregation low private static final double MINIMUM_DOCS_PERCENT_PER_SLICE = 0.1; @@ -84,6 +81,8 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { private final QueueSizeBasedExecutor queueSizeBasedExecutor; private final LeafSlice[] leafSlices; + // don't create slices with less than this number of docs + private final int minimumDocsPerSlice; /** constructor for non-concurrent search */ public ContextIndexSearcher( @@ -93,7 +92,16 @@ public ContextIndexSearcher( QueryCachingPolicy queryCachingPolicy, boolean wrapWithExitableDirectoryReader ) throws IOException { - this(reader, similarity, queryCache, queryCachingPolicy, new MutableQueryTimeout(), wrapWithExitableDirectoryReader, null); + this( + reader, + similarity, + queryCache, + queryCachingPolicy, + new MutableQueryTimeout(), + Integer.MAX_VALUE, + wrapWithExitableDirectoryReader, + null + ); } /** constructor for concurrent search */ @@ -102,10 +110,20 @@ public ContextIndexSearcher( Similarity similarity, QueryCache queryCache, QueryCachingPolicy queryCachingPolicy, + int minimumDocsPerSlice, boolean wrapWithExitableDirectoryReader, ThreadPoolExecutor executor ) throws IOException { - this(reader, similarity, queryCache, queryCachingPolicy, new MutableQueryTimeout(), wrapWithExitableDirectoryReader, executor); + this( + reader, + similarity, + queryCache, + queryCachingPolicy, + new MutableQueryTimeout(), + minimumDocsPerSlice, + wrapWithExitableDirectoryReader, + executor + ); } private ContextIndexSearcher( @@ -114,6 +132,7 @@ private ContextIndexSearcher( QueryCache queryCache, QueryCachingPolicy queryCachingPolicy, MutableQueryTimeout cancellable, + int minimumDocsPerSlice, boolean wrapWithExitableDirectoryReader, ThreadPoolExecutor executor ) throws IOException { @@ -124,9 +143,15 @@ private ContextIndexSearcher( setQueryCachingPolicy(queryCachingPolicy); this.cancellable = cancellable; this.queueSizeBasedExecutor = executor != null ? new QueueSizeBasedExecutor(executor) : null; + this.minimumDocsPerSlice = minimumDocsPerSlice; this.leafSlices = executor == null ? null : slices(leafContexts); } + // package private for testing + int getMinimumDocsPerSlice() { + return minimumDocsPerSlice; + } + public void setProfiler(QueryProfiler profiler) { this.profiler = profiler; } @@ -203,7 +228,7 @@ public Weight createWeight(Query query, ScoreMode scoreMode, float boost) throws @Override protected LeafSlice[] slices(List leaves) { - return computeSlices(leaves, queueSizeBasedExecutor.threadPoolExecutor.getMaximumPoolSize(), MINIMUM_DOCS_PER_SLICE); + return computeSlices(leaves, queueSizeBasedExecutor.threadPoolExecutor.getMaximumPoolSize(), minimumDocsPerSlice); } /** diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NestedLookupTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NestedLookupTests.java index f20516f96a075..07b80ee2f41e8 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NestedLookupTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NestedLookupTests.java @@ -81,4 +81,41 @@ public void testAllParentFilters() { assertThat(lookup.getNestedParentFilters().keySet(), containsInAnyOrder("a", "a.b", "a.b.c.d")); } + public void testGetNestedParent() { + List mappers = List.of( + buildMapper("a.b"), + buildMapper("a.d"), + buildMapper("a.b.c.d.e"), + buildMapper("a.b.d"), + buildMapper("a"), + buildMapper("a.b.c.d") + ); + + NestedLookup lookup = NestedLookup.build(mappers); + assertNull(lookup.getNestedParent("foo")); + assertEquals("a.b", lookup.getNestedParent("a.b.foo")); + assertEquals("a", lookup.getNestedParent("a.foo.bar")); + assertEquals("a.b.d", lookup.getNestedParent("a.b.d.foo")); + assertEquals("a.b", lookup.getNestedParent("a.b.c.foo")); + assertNull(lookup.getNestedParent("aa.b")); + } + + public void testGetImmediateChildren() { + List mappers = List.of( + buildMapper("a.b"), + buildMapper("a.d"), + buildMapper("a.b.c.d.e"), + buildMapper("a.b.d"), + buildMapper("a"), + buildMapper("a.b.c.d") + ); + + NestedLookup lookup = NestedLookup.build(mappers); + assertEquals(List.of("a.b.c.d", "a.b.d"), lookup.getImmediateChildMappers("a.b")); + assertEquals(List.of(), lookup.getImmediateChildMappers("a.b.d")); + assertEquals(List.of("a"), lookup.getImmediateChildMappers("")); + assertEquals(List.of(), lookup.getImmediateChildMappers("aa")); + assertEquals(List.of(), lookup.getImmediateChildMappers("a.c")); + } + } diff --git a/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java b/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java index fe4c4687768d9..ead3d02529631 100644 --- a/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java +++ b/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java @@ -142,6 +142,7 @@ protected Engine.Searcher acquireSearcherInternal(String source) { target, null, timeout, + randomIntBetween(1, Integer.MAX_VALUE), null, false ); @@ -173,7 +174,16 @@ protected Engine.Searcher acquireSearcherInternal(String source) { shardSearchRequest, randomNonNegativeLong() ); - DefaultSearchContext context1 = new DefaultSearchContext(readerContext, shardSearchRequest, target, null, timeout, null, false); + DefaultSearchContext context1 = new DefaultSearchContext( + readerContext, + shardSearchRequest, + target, + null, + timeout, + randomIntBetween(1, Integer.MAX_VALUE), + null, + false + ); context1.from(300); exception = expectThrows(IllegalArgumentException.class, () -> context1.preProcess()); assertThat( @@ -237,7 +247,16 @@ public ScrollContext scrollContext() { } }; // rescore is null but sliceBuilder is not null - DefaultSearchContext context2 = new DefaultSearchContext(readerContext, shardSearchRequest, target, null, timeout, null, false); + DefaultSearchContext context2 = new DefaultSearchContext( + readerContext, + shardSearchRequest, + target, + null, + timeout, + randomIntBetween(1, Integer.MAX_VALUE), + null, + false + ); SliceBuilder sliceBuilder = mock(SliceBuilder.class); int numSlices = maxSlicesPerScroll + randomIntBetween(1, 100); @@ -263,7 +282,16 @@ public ScrollContext scrollContext() { when(shardSearchRequest.getAliasFilter()).thenReturn(AliasFilter.EMPTY); when(shardSearchRequest.indexBoost()).thenReturn(AbstractQueryBuilder.DEFAULT_BOOST); - DefaultSearchContext context3 = new DefaultSearchContext(readerContext, shardSearchRequest, target, null, timeout, null, false); + DefaultSearchContext context3 = new DefaultSearchContext( + readerContext, + shardSearchRequest, + target, + null, + timeout, + randomIntBetween(1, Integer.MAX_VALUE), + null, + false + ); ParsedQuery parsedQuery = ParsedQuery.parsedMatchAllQuery(); context3.sliceBuilder(null).parsedQuery(parsedQuery).preProcess(); assertEquals(context3.query(), context3.buildFilteredQuery(parsedQuery.query())); @@ -279,7 +307,16 @@ public ScrollContext scrollContext() { randomNonNegativeLong(), false ); - DefaultSearchContext context4 = new DefaultSearchContext(readerContext, shardSearchRequest, target, null, timeout, null, false); + DefaultSearchContext context4 = new DefaultSearchContext( + readerContext, + shardSearchRequest, + target, + null, + timeout, + randomIntBetween(1, Integer.MAX_VALUE), + null, + false + ); context4.sliceBuilder(new SliceBuilder(1, 2)).parsedQuery(parsedQuery).preProcess(); Query query1 = context4.query(); context4.sliceBuilder(new SliceBuilder(0, 2)).parsedQuery(parsedQuery).preProcess(); @@ -336,7 +373,16 @@ protected Engine.Searcher acquireSearcherInternal(String source) { randomNonNegativeLong(), false ); - DefaultSearchContext context = new DefaultSearchContext(readerContext, shardSearchRequest, target, null, timeout, null, false); + DefaultSearchContext context = new DefaultSearchContext( + readerContext, + shardSearchRequest, + target, + null, + timeout, + randomIntBetween(1, Integer.MAX_VALUE), + null, + false + ); assertThat(context.searcher().hasCancellations(), is(false)); context.searcher().addQueryCancellation(() -> {}); diff --git a/server/src/test/java/org/elasticsearch/search/NestedUtilsTests.java b/server/src/test/java/org/elasticsearch/search/NestedUtilsTests.java new file mode 100644 index 0000000000000..d06afb5013955 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/NestedUtilsTests.java @@ -0,0 +1,89 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search; + +import org.elasticsearch.test.ESTestCase; + +import java.util.List; +import java.util.Map; + +public class NestedUtilsTests extends ESTestCase { + + public void testPartitionByChild() { + List children = List.of("child1", "child2", "stepchild"); + List inputs = List.of("a", "b", "child1.grandchild", "child1.grandchild2", "child11", "child2.grandchild", "frog"); + Map> partitioned = NestedUtils.partitionByChildren("", children, inputs, s -> s); + assertEquals( + Map.of( + "", + List.of("a", "b", "child11", "frog"), + "child1", + List.of("child1.grandchild", "child1.grandchild2"), + "child2", + List.of("child2.grandchild"), + "stepchild", + List.of() + ), + partitioned + ); + } + + public void testScopedPartitionByChild() { + List children = List.of("a.child1", "a.child2", "a.stepchild"); + List inputs = List.of( + "a.a", + "a.b", + "a.child1.grandchild", + "a.child1.grandchild2", + "a.child11", + "a.child2.grandchild", + "a.frog" + ); + Map> partitioned = NestedUtils.partitionByChildren("a", children, inputs, s -> s); + assertEquals( + Map.of( + "a", + List.of("a.a", "a.b", "a.child11", "a.frog"), + "a.child1", + List.of("a.child1.grandchild", "a.child1.grandchild2"), + "a.child2", + List.of("a.child2.grandchild"), + "a.stepchild", + List.of() + ), + partitioned + ); + } + + public void testScopedPartitionWithMultifields() { + List children = List.of("user.address"); + List inputs = List.of("user.address.city", "user.address.zip", "user.first", "user.last", "user.last.keyword"); + Map> partitioned = NestedUtils.partitionByChildren("user", children, inputs, s -> s); + assertEquals( + Map.of( + "user", + List.of("user.first", "user.last", "user.last.keyword"), + "user.address", + List.of("user.address.city", "user.address.zip") + ), + partitioned + ); + } + + public void testEmptyCases() { + // No children, everything gets mapped under the scope + assertEquals(Map.of("scope", List.of("foo")), NestedUtils.partitionByChildren("scope", List.of(), List.of("foo"), s -> s)); + // No inputs, we get an empty map under the scope + assertEquals( + Map.of("scope", List.of(), "scope.child", List.of()), + NestedUtils.partitionByChildren("scope", List.of("scope.child"), List.of(), s -> s) + ); + } + +} diff --git a/server/src/test/java/org/elasticsearch/search/dfs/DfsPhaseTests.java b/server/src/test/java/org/elasticsearch/search/dfs/DfsPhaseTests.java index fcf6da9c22f95..a93c7ef4abf1c 100644 --- a/server/src/test/java/org/elasticsearch/search/dfs/DfsPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/dfs/DfsPhaseTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.KnnFloatVectorField; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.KnnFloatVectorQuery; import org.apache.lucene.search.Query; @@ -74,15 +73,10 @@ public void testSingleKnnSearch() throws IOException { IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), + 1, randomBoolean(), this.threadPoolExecutor - ) { - @Override - protected LeafSlice[] slices(List leaves) { - // get a thread per segment - return slices(leaves, 1, 1); - } - }; + ); Query query = new KnnFloatVectorQuery("float_vector", new float[] { 0, 0, 0 }, numDocs, null); diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java index 0de84728e5f7f..e2ecd6e38a23c 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.settings.ClusterSettings; @@ -807,6 +808,77 @@ public void testNestedFields() throws IOException { assertEquals("value4b", eval("inner_nested.0.f4.0", obj1)); } + public void testDoublyNestedWithMultifields() throws IOException { + MapperService mapperService = createMapperService(""" + { "_doc" : { "properties" : { + "user" : { + "type" : "nested", + "properties" : { + "first" : { "type" : "keyword" }, + "last" : { "type" : "text", "fields" : { "keyword" : { "type" : "keyword" } } }, + "address" : { + "type" : "nested", + "properties" : { + "city" : { "type" : "keyword" }, + "zip" : { "type" : "keyword" } + } + } + } + } + }}} + """); + + String source = """ + { "user" : [ { "first" : "John", + "last" : "Smith", + "address" : [ { "city" : "Berlin", "zip" : "1111" }, { "city" : "Ottawa", "zip" : "1111" } ] } ] } + """; + + var results = fetchFields(mapperService, source, fieldAndFormatList("*", null, false)); + DocumentField user = results.get("user"); + Map fields = (Map) user.getValues().get(0); + assertThat(fields.keySet(), hasSize(4)); + } + + public void testNestedUnmappedFields() throws IOException { + MapperService mapperService = createMapperService(""" + { "_doc" : { "properties" : { + "id" : { "type" : "keyword" }, + "user_account" : { + "type" : "nested", + "properties" : { + "details" : { + "type" : "object", + "enabled" : false + } + } + }, + "user" : { + "type" : "nested", + "properties" : { + "first" : { "type" : "keyword" }, + "address" : { + "type" : "object", + "enabled" : false + } + } + } + }}} + """); + String source = """ + { "id" : "1", "user" : { "first" : "John", "address" : { "city" : "Toronto" } }, "user_account" : { "details" : { "id" : 2 } } } + """; + + var results = fetchFields(mapperService, source, fieldAndFormatList("*", null, true)); + assertNotNull(results.get("user_account")); + assertEquals("2", eval(new String[] { "details.id", "0" }, results.get("user_account").getValues().get(0)).toString()); + + results = fetchFields(mapperService, source, fieldAndFormatList("user.address.*", null, true)); + assertNotNull(results.get("user")); + assertNull(eval("first", results.get("user").getValues().get(0))); + assertEquals("Toronto", eval(new String[] { "address.city", "0" }, results.get("user").getValues().get(0))); + } + @SuppressWarnings("unchecked") public void testFlattenedField() throws IOException { XContentBuilder mapping = mapping(b -> b.startObject("flat").field("type", "flattened").endObject()); @@ -1209,6 +1281,12 @@ private static Map fetchFields(MapperService mapperServic return fieldFetcher.fetch(s, -1); } + private static Map fetchFields(MapperService mapperService, String source, List fields) + throws IOException { + FieldFetcher fieldFetcher = FieldFetcher.create(newSearchExecutionContext(mapperService), fields); + return fieldFetcher.fetch(Source.fromBytes(new BytesArray(source), XContentType.JSON), -1); + } + public MapperService createMapperService() throws IOException { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() diff --git a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java index e37ac97c912f7..28cd9f15a9b48 100644 --- a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java @@ -247,14 +247,12 @@ public Void reduce(Collection collectors) { IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), + 1, randomBoolean(), executor ) { @Override protected LeafSlice[] slices(List leaves) { - if (leaves.size() == 1) { - return super.slices(leaves); - } return slices(leaves, 1, 1); } }; diff --git a/test/framework/src/main/java/org/elasticsearch/search/ConcurrentSearchTestPlugin.java b/test/framework/src/main/java/org/elasticsearch/search/ConcurrentSearchTestPlugin.java new file mode 100644 index 0000000000000..9d7f3e8210ba6 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/search/ConcurrentSearchTestPlugin.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search; + +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.plugins.Plugin; + +import java.util.List; + +/** + * This plugin is used to register the {@link SearchService#MINIMUM_DOCS_PER_SLICE} setting. + * This setting forces the {@link SearchService} to create many slices even when very few documents + * are available, something we don;t really want to happen in real usage. + */ +public class ConcurrentSearchTestPlugin extends Plugin { + @Override + public List> getSettings() { + return List.of(SearchService.MINIMUM_DOCS_PER_SLICE); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 0ba2a7661d328..2cd2f69d8fa85 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -929,15 +929,10 @@ protected IndexSearcher newIndexSearcher(DirectoryReader indexReader) throws IOE IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), + 1, // forces multiple slices randomBoolean(), this.threadPoolExecutor - ) { - @Override - protected LeafSlice[] slices(List leaves) { - // get a thread per segment - return slices(leaves, 1, 1); - } - }; + ); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 65d0b670e97f3..217fe712cb419 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -123,6 +123,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestCancellableNodeClient; import org.elasticsearch.script.MockScriptService; +import org.elasticsearch.search.ConcurrentSearchTestPlugin; import org.elasticsearch.search.MockSearchService; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchService; @@ -2045,6 +2046,10 @@ private NodeConfigurationSource getNodeConfigSource() { if (addMockTransportService()) { initialNodeSettings.put(NetworkModule.TRANSPORT_TYPE_KEY, getTestTransportType()); } + boolean eagerConcurrentSearch = eagerConcurrentSearch(); + if (eagerConcurrentSearch) { + initialNodeSettings.put(SearchService.MINIMUM_DOCS_PER_SLICE.getKey(), 1); + } return new NodeConfigurationSource() { @Override public Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { @@ -2061,6 +2066,11 @@ public Path nodeConfigPath(int nodeOrdinal) { @Override public Collection> nodePlugins() { + if (eagerConcurrentSearch) { + List> plugins = new ArrayList<>(ESIntegTestCase.this.nodePlugins()); + plugins.add(ConcurrentSearchTestPlugin.class); + return plugins; + } return ESIntegTestCase.this.nodePlugins(); } }; @@ -2074,6 +2084,15 @@ protected boolean addMockTransportService() { return true; } + /** + * Whether we'd like to increase the likelihood of leveraging inter-segment search concurrency, by creating multiple slices + * with a low amount of documents in them, which would not be allowed in production. + * Default is true, can be disabled if it causes problems in specific tests. + */ + protected boolean eagerConcurrentSearch() { + return true; + } + /** Returns {@code true} iff this test cluster should use a dummy http transport */ protected boolean addMockHttpTransport() { return true; diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index d3669086d2d13..2b13c9b67d345 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -45,6 +45,7 @@ import org.elasticsearch.node.NodeValidationException; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.MockScriptService; +import org.elasticsearch.search.ConcurrentSearchTestPlugin; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.rest.ESRestTestCase; @@ -226,7 +227,10 @@ protected List filteredWarnings() { private Node newNode() { final Path tempDir = createTempDir(); final String nodeName = nodeSettings().get(Node.NODE_NAME_SETTING.getKey(), "node_s_0"); - + boolean eagerConcurrentSearch = eagerConcurrentSearch(); + Settings concurrentSetting = eagerConcurrentSearch + ? Settings.builder().put(SearchService.MINIMUM_DOCS_PER_SLICE.getKey(), 1).build() + : Settings.EMPTY; Settings settings = Settings.builder() .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), InternalTestCluster.clusterName("single-node-cluster", random().nextLong())) .put(DestructiveOperations.REQUIRES_NAME_SETTING.getKey(), false) @@ -251,6 +255,7 @@ private Node newNode() { .putList(DISCOVERY_SEED_HOSTS_SETTING.getKey()) // empty list disables a port scan for other nodes .putList(INITIAL_MASTER_NODES_SETTING.getKey(), nodeName) .put(nodeSettings()) // allow test cases to provide their own settings or override these + .put(concurrentSetting) .build(); Collection> plugins = new ArrayList<>(getPlugins()); @@ -260,6 +265,9 @@ private Node newNode() { if (addMockHttpTransport()) { plugins.add(MockHttpTransport.TestPlugin.class); } + if (eagerConcurrentSearch) { + plugins.add(ConcurrentSearchTestPlugin.class); + } plugins.add(MockScriptService.TestPlugin.class); Node node = new MockNode(settings, plugins, forbidPrivateIndexSettings()); try { @@ -436,4 +444,12 @@ protected void ensureNoInitializingShards() { assertFalse("timed out waiting for shards to initialize", actionGet.isTimedOut()); } + /** + * Whether we'd like to increase the likelihood of leveraging inter-segment search concurrency, by creating multiple slices + * with a low amount of documents in them, which would not be allowed in production. + * Default is true, can be disabled if it causes problems in specific tests. + */ + protected boolean eagerConcurrentSearch() { + return true; + } } diff --git a/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchSingleNodeTests.java b/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchSingleNodeTests.java new file mode 100644 index 0000000000000..b1be7bb3f1a4f --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchSingleNodeTests.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.internal; + +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.search.SearchService; +import org.elasticsearch.test.ESSingleNodeTestCase; + +import java.io.IOException; + +public class ConcurrentSearchSingleNodeTests extends ESSingleNodeTestCase { + + private final boolean eagerConcurrentSearch = randomBoolean(); + + public void testEagerConcurrentSearch() throws IOException { + client().admin().indices().prepareCreate("index").get(); + IndicesService indicesService = getInstanceFromNode(IndicesService.class); + IndexService indexService = indicesService.iterator().next(); + IndexShard shard = indexService.getShard(0); + SearchService searchService = getInstanceFromNode(SearchService.class); + ShardSearchRequest shardSearchRequest = new ShardSearchRequest(shard.shardId(), 0L, AliasFilter.EMPTY); + try (SearchContext searchContext = searchService.createSearchContext(shardSearchRequest, TimeValue.MINUS_ONE)) { + ContextIndexSearcher searcher = searchContext.searcher(); + if (eagerConcurrentSearch) { + assertEquals(1, searcher.getMinimumDocsPerSlice()); + } else { + assertEquals(50_000, searcher.getMinimumDocsPerSlice()); + } + } + } + + @Override + protected boolean eagerConcurrentSearch() { + return eagerConcurrentSearch; + } +} diff --git a/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchTestPluginTests.java b/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchTestPluginTests.java new file mode 100644 index 0000000000000..3b27aa3b51138 --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/search/internal/ConcurrentSearchTestPluginTests.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.internal; + +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.search.SearchService; +import org.elasticsearch.test.ESIntegTestCase; + +import java.io.IOException; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 1) +public class ConcurrentSearchTestPluginTests extends ESIntegTestCase { + + private final boolean eagerConcurrentSearch = randomBoolean(); + + public void testEagerConcurrentSearch() throws IOException { + client().admin().indices().prepareCreate("index").get(); + IndicesService indicesService = internalCluster().getDataNodeInstance(IndicesService.class); + IndexService indexService = indicesService.iterator().next(); + IndexShard shard = indexService.getShard(0); + SearchService searchService = internalCluster().getDataNodeInstance(SearchService.class); + ShardSearchRequest shardSearchRequest = new ShardSearchRequest(shard.shardId(), 0L, AliasFilter.EMPTY); + try (SearchContext searchContext = searchService.createSearchContext(shardSearchRequest, TimeValue.MINUS_ONE)) { + ContextIndexSearcher searcher = searchContext.searcher(); + if (eagerConcurrentSearch) { + assertEquals(1, searcher.getMinimumDocsPerSlice()); + } else { + assertEquals(50_000, searcher.getMinimumDocsPerSlice()); + } + } + } + + @Override + protected boolean eagerConcurrentSearch() { + return eagerConcurrentSearch; + } +} diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index 1227549c74a66..2183e5659ebfa 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -38,7 +38,7 @@ tasks.named("yamlRestTest").configure { if (BuildParams.isSnapshotBuild()) { systemProperty 'tests.rest.blacklist', '*/get-builtin-privileges/*' } else { - systemProperty 'tests.rest.blacklist', ['*/create-cross-cluster-api-key/*', '*/update-cross-cluster-api-key/*'] + systemProperty 'tests.rest.blacklist', ['*/create-cross-cluster-api-key/*', '*/update-cross-cluster-api-key/*'].join(',') } } diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java index 038d5714d2273..2bc1433ef615c 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java @@ -391,7 +391,7 @@ private long getRegionSize(long fileLength, int region) { return effectiveRegionSize; } - public CacheFileRegion get(KeyType cacheKey, long fileLength, int region) { + public Entry get(KeyType cacheKey, long fileLength, int region) { final RegionKey regionKey = new RegionKey<>(cacheKey, region); final long now = threadPool.relativeTimeInMillis(); // try to just get from the map on the fast-path to save instantiating the capturing lambda needed on the slow path if we did not @@ -416,10 +416,10 @@ public CacheFileRegion get(KeyType cacheKey, long fileLength, int region) { maybePromote(now, entry); } - return entry.chunk; + return entry; } - private CacheFileRegion initChunk(Entry entry) { + private Entry initChunk(Entry entry) { assert Thread.holdsLock(entry.chunk); RegionKey regionKey = entry.chunk.regionKey; if (keyMapping.get(regionKey) != entry) { @@ -448,7 +448,7 @@ private CacheFileRegion initChunk(Entry entry) { } } - return entry.chunk; + return entry; } private void maybePromote(long now, Entry entry) { @@ -783,10 +783,8 @@ private static void throwAlreadyEvicted() { boolean tryRead(ByteBuffer buf, long offset) throws IOException { int startingPos = buf.position(); - try (SharedBytes.IO fileChannel = sharedBytes.getFileChannel(sharedBytesPos)) { - fileChannel.read(buf, physicalStartOffset() + getRegionRelativePosition(offset)); - } - if (evicted.get() || hasReferences() == false) { + sharedBytes.getFileChannel(sharedBytesPos).read(buf, physicalStartOffset() + getRegionRelativePosition(offset)); + if (isEvicted() || hasReferences() == false) { buf.position(startingPos); return false; } @@ -801,24 +799,19 @@ void populateAndRead( final ActionListener listener ) { assert rangeToRead.length() > 0; - final Releasable[] resources = new Releasable[2]; + Releasable resource = null; try { - ensureOpen(); incRef(); - resources[1] = Releasables.releaseOnce(this::decRef); - + resource = Releasables.releaseOnce(this::decRef); ensureOpen(); - final SharedBytes.IO fileChannel = sharedBytes.getFileChannel(sharedBytesPos); - resources[0] = Releasables.releaseOnce(fileChannel); - final List gaps = tracker.waitForRange( rangeToWrite, rangeToRead, - ActionListener.runBefore(listener, () -> Releasables.close(resources)).delegateFailureAndWrap((l, success) -> { + ActionListener.runBefore(listener, resource::close).delegateFailureAndWrap((l, success) -> { final long physicalStartOffset = physicalStartOffset(); assert regionOwners.get(sharedBytesPos) == this; final int read = reader.onRangeAvailable( - fileChannel, + sharedBytes.getFileChannel(sharedBytesPos), physicalStartOffset + rangeToRead.start(), rangeToRead.start(), rangeToRead.length() @@ -837,14 +830,15 @@ void populateAndRead( ); if (gaps.isEmpty() == false) { - fillGaps(writer, fileChannel, gaps); + fillGaps(writer, gaps); } } catch (Exception e) { - releaseAndFail(listener, Releasables.wrap(resources), e); + releaseAndFail(listener, resource, e); } } - private void fillGaps(RangeMissingHandler writer, SharedBytes.IO fileChannel, List gaps) { + private void fillGaps(RangeMissingHandler writer, List gaps) { + SharedBytes.IO fileChannel = sharedBytes.getFileChannel(sharedBytesPos); for (SparseFileTracker.Gap gap : gaps) { ioExecutor.execute(new AbstractRunnable() { @@ -899,11 +893,17 @@ public class CacheFile { private final KeyType cacheKey; private final long length; + private Entry lastAccessedRegion; + private CacheFile(KeyType cacheKey, long length) { this.cacheKey = cacheKey; this.length = length; } + public CacheFile copy() { + return new CacheFile(cacheKey, length); + } + public long getLength() { return length; } @@ -919,11 +919,23 @@ public boolean tryRead(ByteBuffer buf, long offset) throws IOException { if (startRegion != endRegion) { return false; } - final CacheFileRegion fileRegion = get(cacheKey, length, startRegion); - if (fileRegion.tracker.checkAvailable(getRegionRelativePosition(end)) == false) { + var fileRegion = lastAccessedRegion; + if (fileRegion != null && fileRegion.chunk.regionKey.region == startRegion) { + // existing item, check if we need to promote item + long now = threadPool.relativeTimeInMillis(); + if (now - fileRegion.lastAccessed >= minTimeDelta) { + maybePromote(now, fileRegion); + } + } else { + fileRegion = get(cacheKey, length, startRegion); + } + final var region = fileRegion.chunk; + if (region.tracker.checkAvailable(end - getRegionStart(startRegion)) == false) { return false; } - return fileRegion.tryRead(buf, offset); + boolean res = region.tryRead(buf, offset); + lastAccessedRegion = res ? fileRegion : null; + return res; } public int populateAndRead( @@ -952,7 +964,7 @@ private int readSingleRegion( int region ) throws InterruptedException, ExecutionException { final PlainActionFuture readFuture = PlainActionFuture.newFuture(); - final CacheFileRegion fileRegion = get(cacheKey, length, region); + final CacheFileRegion fileRegion = get(cacheKey, length, region).chunk; final long regionStart = getRegionStart(region); fileRegion.populateAndRead( mapSubRangeToRegion(rangeToWrite, region), @@ -981,7 +993,7 @@ private int readMultiRegions( // nothing to read, skip continue; } - final CacheFileRegion fileRegion = get(cacheKey, length, region); + final CacheFileRegion fileRegion = get(cacheKey, length, region).chunk; final long regionStart = getRegionStart(region); fileRegion.populateAndRead( mapSubRangeToRegion(rangeToWrite, region), diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBytes.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBytes.java index f71517448b2ac..9c056f72c1fb7 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBytes.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBytes.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.IOUtils; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; @@ -252,9 +251,7 @@ protected void closeInternal() { public IO getFileChannel(int sharedBytesPos) { assert fileChannel != null; - var res = ios[sharedBytesPos]; - incRef(); - return res; + return ios[sharedBytesPos]; } long getPhysicalOffset(long chunkPosition) { @@ -263,7 +260,7 @@ long getPhysicalOffset(long chunkPosition) { return physicalOffset; } - public final class IO implements Releasable { + public final class IO { private final long pageStart; @@ -315,11 +312,6 @@ private void checkOffsets(long position, long length) { throw new IllegalArgumentException("bad access"); } } - - @Override - public void close() { - decRef(); - } } public static ByteSizeValue pageAligned(ByteSizeValue val) { diff --git a/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBlobCacheServiceTests.java b/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBlobCacheServiceTests.java index 3dc137a8fe29f..701c5eab56fd9 100644 --- a/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBlobCacheServiceTests.java +++ b/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBlobCacheServiceTests.java @@ -63,13 +63,13 @@ public void testBasicEviction() throws IOException { ) { final var cacheKey = generateCacheKey(); assertEquals(5, cacheService.freeRegionCount()); - final var region0 = cacheService.get(cacheKey, size(250), 0); + final var region0 = cacheService.get(cacheKey, size(250), 0).chunk; assertEquals(size(100), region0.tracker.getLength()); assertEquals(4, cacheService.freeRegionCount()); - final var region1 = cacheService.get(cacheKey, size(250), 1); + final var region1 = cacheService.get(cacheKey, size(250), 1).chunk; assertEquals(size(100), region1.tracker.getLength()); assertEquals(3, cacheService.freeRegionCount()); - final var region2 = cacheService.get(cacheKey, size(250), 2); + final var region2 = cacheService.get(cacheKey, size(250), 2).chunk; assertEquals(size(50), region2.tracker.getLength()); assertEquals(2, cacheService.freeRegionCount()); @@ -122,17 +122,17 @@ public void testAutoEviction() throws IOException { ) { final var cacheKey = generateCacheKey(); assertEquals(2, cacheService.freeRegionCount()); - final var region0 = cacheService.get(cacheKey, size(250), 0); + final var region0 = cacheService.get(cacheKey, size(250), 0).chunk; assertEquals(size(100), region0.tracker.getLength()); assertEquals(1, cacheService.freeRegionCount()); - final var region1 = cacheService.get(cacheKey, size(250), 1); + final var region1 = cacheService.get(cacheKey, size(250), 1).chunk; assertEquals(size(100), region1.tracker.getLength()); assertEquals(0, cacheService.freeRegionCount()); assertFalse(region0.isEvicted()); assertFalse(region1.isEvicted()); // acquire region 2, which should evict region 0 (oldest) - final var region2 = cacheService.get(cacheKey, size(250), 2); + final var region2 = cacheService.get(cacheKey, size(250), 2).chunk; assertEquals(size(50), region2.tracker.getLength()); assertEquals(0, cacheService.freeRegionCount()); assertTrue(region0.isEvicted()); @@ -161,9 +161,9 @@ public void testForceEviction() throws IOException { final var cacheKey1 = generateCacheKey(); final var cacheKey2 = generateCacheKey(); assertEquals(5, cacheService.freeRegionCount()); - final var region0 = cacheService.get(cacheKey1, size(250), 0); + final var region0 = cacheService.get(cacheKey1, size(250), 0).chunk; assertEquals(4, cacheService.freeRegionCount()); - final var region1 = cacheService.get(cacheKey2, size(250), 1); + final var region1 = cacheService.get(cacheKey2, size(250), 1).chunk; assertEquals(3, cacheService.freeRegionCount()); assertFalse(region0.isEvicted()); assertFalse(region1.isEvicted()); @@ -189,9 +189,9 @@ public void testForceEvictResponse() throws IOException { final var cacheKey1 = generateCacheKey(); final var cacheKey2 = generateCacheKey(); assertEquals(5, cacheService.freeRegionCount()); - final var region0 = cacheService.get(cacheKey1, size(250), 0); + final var region0 = cacheService.get(cacheKey1, size(250), 0).chunk; assertEquals(4, cacheService.freeRegionCount()); - final var region1 = cacheService.get(cacheKey2, size(250), 1); + final var region1 = cacheService.get(cacheKey2, size(250), 1).chunk; assertEquals(3, cacheService.freeRegionCount()); assertFalse(region0.isEvicted()); assertFalse(region1.isEvicted()); @@ -216,9 +216,9 @@ public void testDecay() throws IOException { final var cacheKey1 = generateCacheKey(); final var cacheKey2 = generateCacheKey(); assertEquals(5, cacheService.freeRegionCount()); - final var region0 = cacheService.get(cacheKey1, size(250), 0); + final var region0 = cacheService.get(cacheKey1, size(250), 0).chunk; assertEquals(4, cacheService.freeRegionCount()); - final var region1 = cacheService.get(cacheKey2, size(250), 1); + final var region1 = cacheService.get(cacheKey2, size(250), 1).chunk; assertEquals(3, cacheService.freeRegionCount()); assertEquals(0, cacheService.getFreq(region0)); @@ -227,7 +227,7 @@ public void testDecay() throws IOException { taskQueue.advanceTime(); taskQueue.runAllRunnableTasks(); - final var region0Again = cacheService.get(cacheKey1, size(250), 0); + final var region0Again = cacheService.get(cacheKey1, size(250), 0).chunk; assertSame(region0Again, region0); assertEquals(1, cacheService.getFreq(region0)); assertEquals(0, cacheService.getFreq(region1)); @@ -294,7 +294,7 @@ public void testGetMultiThreaded() throws IOException { cacheKeys[i], fileLength, regions[i] - ); + ).chunk; if (cacheFileRegion.tryIncRef()) { if (yield[i] == 0) { Thread.yield(); diff --git a/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBytesTests.java b/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBytesTests.java index 81899f7891e86..24625a91d0975 100644 --- a/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBytesTests.java +++ b/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/shared/SharedBytesTests.java @@ -37,17 +37,7 @@ public void testReleasesFileCorrectly() throws Exception { ); final var sharedBytesPath = nodeEnv.nodeDataPaths()[0].resolve("shared_snapshot_cache"); assertTrue(Files.exists(sharedBytesPath)); - SharedBytes.IO fileChannel = sharedBytes.getFileChannel(randomInt(regions - 1)); - assertTrue(Files.exists(sharedBytesPath)); - if (randomBoolean()) { - fileChannel.close(); - assertTrue(Files.exists(sharedBytesPath)); - sharedBytes.decRef(); - } else { - sharedBytes.decRef(); - assertTrue(Files.exists(sharedBytesPath)); - fileChannel.close(); - } + sharedBytes.decRef(); assertFalse(Files.exists(sharedBytesPath)); } } diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index d8c41ccab381b..f173e934df266 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -41,7 +41,6 @@ dependencies { api "org.apache.httpcomponents:httpcore:${versions.httpcore}" api "org.apache.httpcomponents:httpcore-nio:${versions.httpcore}" api "org.apache.httpcomponents:httpasyncclient:${versions.httpasyncclient}" - api "commons-logging:commons-logging:${versions.commonslogging}" api "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" api "commons-codec:commons-codec:${versions.commonscodec}" @@ -49,6 +48,8 @@ dependencies { // security deps api 'com.unboundid:unboundid-ldapsdk:6.0.3' + implementation project(":x-pack:plugin:core:template-resources") + testImplementation "org.elasticsearch:mocksocket:${versions.mocksocket}" testImplementation "org.apache.logging.log4j:log4j-slf4j-impl:${versions.log4j}" // this might suffer from https://github.com/elastic/elasticsearch/issues/93714 testImplementation "org.slf4j:slf4j-api:${versions.slf4j}" diff --git a/x-pack/plugin/core/src/main/java/module-info.java b/x-pack/plugin/core/src/main/java/module-info.java index d47c90b09e125..fe2146a4e62fb 100644 --- a/x-pack/plugin/core/src/main/java/module-info.java +++ b/x-pack/plugin/core/src/main/java/module-info.java @@ -21,6 +21,7 @@ requires org.apache.lucene.join; requires unboundid.ldapsdk; requires org.elasticsearch.tdigest; + requires org.elasticsearch.xcore.templates; exports org.elasticsearch.index.engine.frozen; exports org.elasticsearch.license; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyUtils.java index 484369303a83a..295132dc57f0d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyUtils.java @@ -14,19 +14,15 @@ import org.elasticsearch.cluster.metadata.ItemUsage; import org.elasticsearch.cluster.metadata.MetadataIndexTemplateService; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.NotXContentException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.Streams; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.template.resources.TemplateResources; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.InputStream; import java.util.List; import java.util.Map; import java.util.regex.Pattern; @@ -49,13 +45,13 @@ public static LifecyclePolicy loadPolicy( NamedXContentRegistry xContentRegistry ) { try { - BytesReference source = load(resource); + String source = TemplateResources.load(resource); source = replaceVariables(source, variables); validate(source); try ( XContentParser parser = XContentType.JSON.xContent() - .createParser(XContentParserConfiguration.EMPTY.withRegistry(xContentRegistry), source.utf8ToString()) + .createParser(XContentParserConfiguration.EMPTY.withRegistry(xContentRegistry), source) ) { LifecyclePolicy policy = LifecyclePolicy.parse(parser, name); policy.validate(); @@ -66,24 +62,11 @@ public static LifecyclePolicy loadPolicy( } } - /** - * Loads a resource from the classpath and returns it as a {@link BytesReference} - */ - private static BytesReference load(String name) throws IOException { - try (InputStream is = LifecyclePolicyUtils.class.getResourceAsStream(name)) { - try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { - Streams.copy(is, out); - return new BytesArray(out.toByteArray()); - } - } - } - - private static BytesReference replaceVariables(BytesReference input, Map variables) { - String template = input.utf8ToString(); + private static String replaceVariables(String template, Map variables) { for (Map.Entry variable : variables.entrySet()) { template = replaceVariable(template, variable.getKey(), variable.getValue()); } - return new BytesArray(template); + return template; } /** @@ -96,13 +79,13 @@ public static String replaceVariable(String input, String variable, String value /** * Parses and validates that the source is not empty. */ - private static void validate(BytesReference source) { + private static void validate(String source) { if (source == null) { throw new ElasticsearchParseException("policy must not be null"); } try { - XContentHelper.convertToMap(source, false, XContentType.JSON).v2(); + XContentHelper.convertToMap(new BytesArray(source), false, XContentType.JSON).v2(); } catch (NotXContentException e) { throw new ElasticsearchParseException("policy must not be empty"); } catch (Exception e) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlConfigIndex.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlConfigIndex.java index 8e9382a65983e..2499734672053 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlConfigIndex.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlConfigIndex.java @@ -29,11 +29,7 @@ public static String indexName() { } public static String mapping() { - return TemplateUtils.loadTemplate( - "/org/elasticsearch/xpack/core/ml/config_index_mappings.json", - Version.CURRENT.toString(), - MAPPINGS_VERSION_VARIABLE - ); + return TemplateUtils.loadTemplate("/ml/config_index_mappings.json", Version.CURRENT.toString(), MAPPINGS_VERSION_VARIABLE); } public static Settings settings() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetaIndex.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetaIndex.java index d09db7ece0363..a72a051414677 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetaIndex.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetaIndex.java @@ -27,11 +27,7 @@ public static String indexName() { } public static String mapping() { - return TemplateUtils.loadTemplate( - "/org/elasticsearch/xpack/core/ml/meta_index_mappings.json", - Version.CURRENT.toString(), - MAPPINGS_VERSION_VARIABLE - ); + return TemplateUtils.loadTemplate("/ml/meta_index_mappings.json", Version.CURRENT.toString(), MAPPINGS_VERSION_VARIABLE); } public static Settings settings() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlStatsIndex.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlStatsIndex.java index 9b4696c2486e7..76234954b4107 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlStatsIndex.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlStatsIndex.java @@ -36,11 +36,7 @@ public static String wrappedMapping() { } public static String mapping() { - return TemplateUtils.loadTemplate( - "/org/elasticsearch/xpack/core/ml/stats_index_mappings.json", - Version.CURRENT.toString(), - MAPPINGS_VERSION_VARIABLE - ); + return TemplateUtils.loadTemplate("/ml/stats_index_mappings.json", Version.CURRENT.toString(), MAPPINGS_VERSION_VARIABLE); } public static String indexPattern() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java index cf24a7677383c..c509e52675e38 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java @@ -208,10 +208,6 @@ public static void createAnnotationsIndexIfNecessary( } public static String annotationsMapping() { - return TemplateUtils.loadTemplate( - "/org/elasticsearch/xpack/core/ml/annotations_index_mappings.json", - Version.CURRENT.toString(), - MAPPINGS_VERSION_VARIABLE - ); + return TemplateUtils.loadTemplate("/ml/annotations_index_mappings.json", Version.CURRENT.toString(), MAPPINGS_VERSION_VARIABLE); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/persistence/InferenceIndexConstants.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/persistence/InferenceIndexConstants.java index 11607c8207ac2..aa8fa7c88e719 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/persistence/InferenceIndexConstants.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/persistence/InferenceIndexConstants.java @@ -46,11 +46,7 @@ public final class InferenceIndexConstants { private static final String MAPPINGS_VERSION_VARIABLE = "xpack.ml.version"; public static String mapping() { - return TemplateUtils.loadTemplate( - "/org/elasticsearch/xpack/core/ml/inference_index_mappings.json", - Version.CURRENT.toString(), - MAPPINGS_VERSION_VARIABLE - ); + return TemplateUtils.loadTemplate("/ml/inference_index_mappings.json", Version.CURRENT.toString(), MAPPINGS_VERSION_VARIABLE); } public static String nativeDefinitionStore() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java index 0e9349361758e..0dbed16d3b821 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java @@ -28,7 +28,7 @@ public final class AnomalyDetectorsIndex { private static final String RESULTS_MAPPINGS_VERSION_VARIABLE = "xpack.ml.version"; - private static final String RESOURCE_PATH = "/org/elasticsearch/xpack/core/ml/anomalydetection/"; + private static final String RESOURCE_PATH = "/ml/anomalydetection/"; private AnomalyDetectorsIndex() {} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/notifications/NotificationsIndex.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/notifications/NotificationsIndex.java index 675db3dd92b24..11c4ec2aa0b77 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/notifications/NotificationsIndex.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/notifications/NotificationsIndex.java @@ -13,7 +13,7 @@ public final class NotificationsIndex { public static final String NOTIFICATIONS_INDEX = ".ml-notifications-000002"; - private static final String RESOURCE_PATH = "/org/elasticsearch/xpack/core/ml/"; + private static final String RESOURCE_PATH = "/ml/"; private static final String MAPPINGS_VERSION_VARIABLE = "xpack.ml.version"; private NotificationsIndex() {} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/TemplateUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/TemplateUtils.java index 803b0f4d2bbda..895305d9372b8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/TemplateUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/TemplateUtils.java @@ -14,16 +14,15 @@ import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.core.template.resources.TemplateResources; import java.io.IOException; -import java.io.InputStream; import java.util.Collections; import java.util.Map; import java.util.function.Predicate; @@ -72,7 +71,7 @@ public static String loadTemplate(String resource, String version, String versio */ public static String loadTemplate(String resource, String version, String versionProperty, Map variables) { try { - String source = load(resource); + String source = TemplateResources.load(resource); source = replaceVariables(source, version, versionProperty, variables); validate(source); return source; @@ -81,17 +80,6 @@ public static String loadTemplate(String resource, String version, String versio } } - /** - * Loads a resource from the classpath and returns it as a {@link String} - */ - public static String load(String name) throws IOException { - InputStream is = TemplateUtils.class.getResourceAsStream(name); - if (is == null) { - throw new IOException("Template [" + name + "] not found in classpath."); - } - return Streams.readFully(is).utf8ToString(); - } - /** * Parses and validates that the source is not empty. */ diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditorTests.java index 8116267e3d8b5..babf57d41a85e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditorTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditorTests.java @@ -293,7 +293,7 @@ public static class TestAuditor extends AbstractAuditor TemplateUtils.validate(null)); assertThat(exception.getMessage(), is("Template must not be null")); @@ -111,7 +106,10 @@ public void testValidateInvalidSource() { public void testValidate() throws IOException { String resource = Strings.format(SIMPLE_TEST_TEMPLATE, "test"); - TemplateUtils.validate(TemplateUtils.load(resource)); + try (InputStream is = TemplateUtilsTests.class.getResourceAsStream(resource)) { + assert is != null; + TemplateUtils.validate(new String(is.readAllBytes(), StandardCharsets.UTF_8)); + } } public void testReplaceVariable() { diff --git a/x-pack/plugin/core/template-resources/build.gradle b/x-pack/plugin/core/template-resources/build.gradle new file mode 100644 index 0000000000000..dc12435403a33 --- /dev/null +++ b/x-pack/plugin/core/template-resources/build.gradle @@ -0,0 +1,11 @@ + +apply plugin: 'elasticsearch.build' + +base { + archivesName = 'elasticsearch-x-pack-template-resources' +} + +tasks.named('forbiddenApisMain').configure { + // lz4 does not depend on core, so only jdk signatures should be checked + replaceSignatureFiles 'jdk-signatures' +} diff --git a/x-pack/plugin/core/template-resources/src/main/java/module-info.java b/x-pack/plugin/core/template-resources/src/main/java/module-info.java new file mode 100644 index 0000000000000..aa2f159354942 --- /dev/null +++ b/x-pack/plugin/core/template-resources/src/main/java/module-info.java @@ -0,0 +1,10 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +module org.elasticsearch.xcore.templates { + exports org.elasticsearch.xpack.core.template.resources to org.elasticsearch.xcore; +} diff --git a/x-pack/plugin/core/template-resources/src/main/java/org/elasticsearch/xpack/core/template/resources/TemplateResources.java b/x-pack/plugin/core/template-resources/src/main/java/org/elasticsearch/xpack/core/template/resources/TemplateResources.java new file mode 100644 index 0000000000000..f1a89b6101fed --- /dev/null +++ b/x-pack/plugin/core/template-resources/src/main/java/org/elasticsearch/xpack/core/template/resources/TemplateResources.java @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.template.resources; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; + +/** + * TemplateResources is a bridge to xpack template resource files. This class should only be used within x-pack core. + */ +public class TemplateResources { + + private TemplateResources() {} + + /** + * Returns a template resource for the given resource path. + * @throws IOException if the resource name is not found + */ + public static String load(String name) throws IOException { + try (InputStream is = TemplateResources.class.getResourceAsStream(name)) { + if (is == null) { + throw new IOException("Template [" + name + "] not found in x-pack template resources."); + } + return new String(is.readAllBytes(), StandardCharsets.UTF_8); + } + } +} diff --git a/x-pack/plugin/core/src/main/resources/180-days-default.json b/x-pack/plugin/core/template-resources/src/main/resources/180-days-default.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/180-days-default.json rename to x-pack/plugin/core/template-resources/src/main/resources/180-days-default.json diff --git a/x-pack/plugin/core/src/main/resources/30-days-default.json b/x-pack/plugin/core/template-resources/src/main/resources/30-days-default.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/30-days-default.json rename to x-pack/plugin/core/template-resources/src/main/resources/30-days-default.json diff --git a/x-pack/plugin/core/src/main/resources/365-days-default.json b/x-pack/plugin/core/template-resources/src/main/resources/365-days-default.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/365-days-default.json rename to x-pack/plugin/core/template-resources/src/main/resources/365-days-default.json diff --git a/x-pack/plugin/core/src/main/resources/7-days-default.json b/x-pack/plugin/core/template-resources/src/main/resources/7-days-default.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/7-days-default.json rename to x-pack/plugin/core/template-resources/src/main/resources/7-days-default.json diff --git a/x-pack/plugin/core/src/main/resources/90-days-default.json b/x-pack/plugin/core/template-resources/src/main/resources/90-days-default.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/90-days-default.json rename to x-pack/plugin/core/template-resources/src/main/resources/90-days-default.json diff --git a/x-pack/plugin/core/src/main/resources/data-streams-mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/data-streams-mappings.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/data-streams-mappings.json rename to x-pack/plugin/core/template-resources/src/main/resources/data-streams-mappings.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/deprecation/deprecation-indexing-ilm-policy.json b/x-pack/plugin/core/template-resources/src/main/resources/deprecation/deprecation-indexing-ilm-policy.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/deprecation/deprecation-indexing-ilm-policy.json rename to x-pack/plugin/core/template-resources/src/main/resources/deprecation/deprecation-indexing-ilm-policy.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/deprecation/deprecation-indexing-mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/deprecation/deprecation-indexing-mappings.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/deprecation/deprecation-indexing-mappings.json rename to x-pack/plugin/core/template-resources/src/main/resources/deprecation/deprecation-indexing-mappings.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/deprecation/deprecation-indexing-settings.json b/x-pack/plugin/core/template-resources/src/main/resources/deprecation/deprecation-indexing-settings.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/deprecation/deprecation-indexing-settings.json rename to x-pack/plugin/core/template-resources/src/main/resources/deprecation/deprecation-indexing-settings.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/deprecation/deprecation-indexing-template.json b/x-pack/plugin/core/template-resources/src/main/resources/deprecation/deprecation-indexing-template.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/deprecation/deprecation-indexing-template.json rename to x-pack/plugin/core/template-resources/src/main/resources/deprecation/deprecation-indexing-template.json diff --git a/x-pack/plugin/core/src/main/resources/ecs-dynamic-mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/ecs-dynamic-mappings.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/ecs-dynamic-mappings.json rename to x-pack/plugin/core/template-resources/src/main/resources/ecs-dynamic-mappings.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/analytics/behavioral_analytics-events-default_policy.json b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/analytics/behavioral_analytics-events-default_policy.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/analytics/behavioral_analytics-events-default_policy.json rename to x-pack/plugin/core/template-resources/src/main/resources/entsearch/analytics/behavioral_analytics-events-default_policy.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/analytics/behavioral_analytics-events-final_pipeline.json b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/analytics/behavioral_analytics-events-final_pipeline.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/analytics/behavioral_analytics-events-final_pipeline.json rename to x-pack/plugin/core/template-resources/src/main/resources/entsearch/analytics/behavioral_analytics-events-final_pipeline.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/analytics/behavioral_analytics-events-mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/analytics/behavioral_analytics-events-mappings.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/analytics/behavioral_analytics-events-mappings.json rename to x-pack/plugin/core/template-resources/src/main/resources/entsearch/analytics/behavioral_analytics-events-mappings.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/analytics/behavioral_analytics-events-settings.json b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/analytics/behavioral_analytics-events-settings.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/analytics/behavioral_analytics-events-settings.json rename to x-pack/plugin/core/template-resources/src/main/resources/entsearch/analytics/behavioral_analytics-events-settings.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/analytics/behavioral_analytics-events-template.json b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/analytics/behavioral_analytics-events-template.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/analytics/behavioral_analytics-events-template.json rename to x-pack/plugin/core/template-resources/src/main/resources/entsearch/analytics/behavioral_analytics-events-template.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/elastic-connectors-mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-mappings.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/elastic-connectors-mappings.json rename to x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-mappings.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/elastic-connectors-settings.json b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-settings.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/elastic-connectors-settings.json rename to x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-settings.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/elastic-connectors-sync-jobs-mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-sync-jobs-mappings.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/elastic-connectors-sync-jobs-mappings.json rename to x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-sync-jobs-mappings.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/elastic-connectors-sync-jobs.json b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-sync-jobs.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/elastic-connectors-sync-jobs.json rename to x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-sync-jobs.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/elastic-connectors.json b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/elastic-connectors.json rename to x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/search-acl-filter.json b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/search-acl-filter.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/search-acl-filter.json rename to x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/search-acl-filter.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/generic_ingestion_pipeline.json b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/generic_ingestion_pipeline.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/generic_ingestion_pipeline.json rename to x-pack/plugin/core/template-resources/src/main/resources/entsearch/generic_ingestion_pipeline.json diff --git a/x-pack/plugin/core/src/main/resources/fleet-actions-results-ilm-policy.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions-results-ilm-policy.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/fleet-actions-results-ilm-policy.json rename to x-pack/plugin/core/template-resources/src/main/resources/fleet-actions-results-ilm-policy.json diff --git a/x-pack/plugin/core/src/main/resources/fleet-actions-results.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions-results.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/fleet-actions-results.json rename to x-pack/plugin/core/template-resources/src/main/resources/fleet-actions-results.json diff --git a/x-pack/plugin/core/src/main/resources/fleet-actions.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-actions.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/fleet-actions.json rename to x-pack/plugin/core/template-resources/src/main/resources/fleet-actions.json diff --git a/x-pack/plugin/core/src/main/resources/fleet-agents.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-agents.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/fleet-agents.json rename to x-pack/plugin/core/template-resources/src/main/resources/fleet-agents.json diff --git a/x-pack/plugin/core/src/main/resources/fleet-artifacts.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-artifacts.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/fleet-artifacts.json rename to x-pack/plugin/core/template-resources/src/main/resources/fleet-artifacts.json diff --git a/x-pack/plugin/core/src/main/resources/fleet-enrollment-api-keys.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-enrollment-api-keys.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/fleet-enrollment-api-keys.json rename to x-pack/plugin/core/template-resources/src/main/resources/fleet-enrollment-api-keys.json diff --git a/x-pack/plugin/core/src/main/resources/fleet-file-fromhost-data-ilm-policy.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-data-ilm-policy.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/fleet-file-fromhost-data-ilm-policy.json rename to x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-data-ilm-policy.json diff --git a/x-pack/plugin/core/src/main/resources/fleet-file-fromhost-data.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-data.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/fleet-file-fromhost-data.json rename to x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-data.json diff --git a/x-pack/plugin/core/src/main/resources/fleet-file-fromhost-meta-ilm-policy.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-meta-ilm-policy.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/fleet-file-fromhost-meta-ilm-policy.json rename to x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-meta-ilm-policy.json diff --git a/x-pack/plugin/core/src/main/resources/fleet-file-fromhost-meta.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-meta.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/fleet-file-fromhost-meta.json rename to x-pack/plugin/core/template-resources/src/main/resources/fleet-file-fromhost-meta.json diff --git a/x-pack/plugin/core/src/main/resources/fleet-file-tohost-data-ilm-policy.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-data-ilm-policy.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/fleet-file-tohost-data-ilm-policy.json rename to x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-data-ilm-policy.json diff --git a/x-pack/plugin/core/src/main/resources/fleet-file-tohost-data.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-data.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/fleet-file-tohost-data.json rename to x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-data.json diff --git a/x-pack/plugin/core/src/main/resources/fleet-file-tohost-meta-ilm-policy.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-meta-ilm-policy.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/fleet-file-tohost-meta-ilm-policy.json rename to x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-meta-ilm-policy.json diff --git a/x-pack/plugin/core/src/main/resources/fleet-file-tohost-meta.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-meta.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/fleet-file-tohost-meta.json rename to x-pack/plugin/core/template-resources/src/main/resources/fleet-file-tohost-meta.json diff --git a/x-pack/plugin/core/src/main/resources/fleet-policies-leader.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-policies-leader.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/fleet-policies-leader.json rename to x-pack/plugin/core/template-resources/src/main/resources/fleet-policies-leader.json diff --git a/x-pack/plugin/core/src/main/resources/fleet-policies.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-policies.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/fleet-policies.json rename to x-pack/plugin/core/template-resources/src/main/resources/fleet-policies.json diff --git a/x-pack/plugin/core/src/main/resources/fleet-secrets.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-secrets.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/fleet-secrets.json rename to x-pack/plugin/core/template-resources/src/main/resources/fleet-secrets.json diff --git a/x-pack/plugin/core/src/main/resources/fleet-servers.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-servers.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/fleet-servers.json rename to x-pack/plugin/core/template-resources/src/main/resources/fleet-servers.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/idp/saml-service-provider-template.json b/x-pack/plugin/core/template-resources/src/main/resources/idp/saml-service-provider-template.json similarity index 98% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/idp/saml-service-provider-template.json rename to x-pack/plugin/core/template-resources/src/main/resources/idp/saml-service-provider-template.json index 60f964fe13483..dd69b9cecefc5 100644 --- a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/idp/saml-service-provider-template.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/idp/saml-service-provider-template.json @@ -11,7 +11,6 @@ "number_of_replicas": 0, "auto_expand_replicas": "0-1", "index.priority": 10, - "index.refresh_interval": "1s", "index.format": 1 }, "mappings": { diff --git a/x-pack/plugin/core/src/main/resources/ilm-history-ilm-policy.json b/x-pack/plugin/core/template-resources/src/main/resources/ilm-history-ilm-policy.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/ilm-history-ilm-policy.json rename to x-pack/plugin/core/template-resources/src/main/resources/ilm-history-ilm-policy.json diff --git a/x-pack/plugin/core/src/main/resources/ilm-history.json b/x-pack/plugin/core/template-resources/src/main/resources/ilm-history.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/ilm-history.json rename to x-pack/plugin/core/template-resources/src/main/resources/ilm-history.json diff --git a/x-pack/plugin/core/src/main/resources/logs-default-pipeline.json b/x-pack/plugin/core/template-resources/src/main/resources/logs-default-pipeline.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/logs-default-pipeline.json rename to x-pack/plugin/core/template-resources/src/main/resources/logs-default-pipeline.json diff --git a/x-pack/plugin/core/src/main/resources/logs-json-message-pipeline.json b/x-pack/plugin/core/template-resources/src/main/resources/logs-json-message-pipeline.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/logs-json-message-pipeline.json rename to x-pack/plugin/core/template-resources/src/main/resources/logs-json-message-pipeline.json diff --git a/x-pack/plugin/core/src/main/resources/logs-mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/logs-mappings.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/logs-mappings.json rename to x-pack/plugin/core/template-resources/src/main/resources/logs-mappings.json diff --git a/x-pack/plugin/core/src/main/resources/logs-policy.json b/x-pack/plugin/core/template-resources/src/main/resources/logs-policy.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/logs-policy.json rename to x-pack/plugin/core/template-resources/src/main/resources/logs-policy.json diff --git a/x-pack/plugin/core/src/main/resources/logs-settings.json b/x-pack/plugin/core/template-resources/src/main/resources/logs-settings.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/logs-settings.json rename to x-pack/plugin/core/template-resources/src/main/resources/logs-settings.json diff --git a/x-pack/plugin/core/src/main/resources/logs-template.json b/x-pack/plugin/core/template-resources/src/main/resources/logs-template.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/logs-template.json rename to x-pack/plugin/core/template-resources/src/main/resources/logs-template.json diff --git a/x-pack/plugin/core/src/main/resources/metrics-mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/metrics-mappings.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/metrics-mappings.json rename to x-pack/plugin/core/template-resources/src/main/resources/metrics-mappings.json diff --git a/x-pack/plugin/core/src/main/resources/metrics-policy.json b/x-pack/plugin/core/template-resources/src/main/resources/metrics-policy.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/metrics-policy.json rename to x-pack/plugin/core/template-resources/src/main/resources/metrics-policy.json diff --git a/x-pack/plugin/core/src/main/resources/metrics-settings.json b/x-pack/plugin/core/template-resources/src/main/resources/metrics-settings.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/metrics-settings.json rename to x-pack/plugin/core/template-resources/src/main/resources/metrics-settings.json diff --git a/x-pack/plugin/core/src/main/resources/metrics-template.json b/x-pack/plugin/core/template-resources/src/main/resources/metrics-template.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/metrics-template.json rename to x-pack/plugin/core/template-resources/src/main/resources/metrics-template.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/annotations_index_mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/ml/annotations_index_mappings.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/annotations_index_mappings.json rename to x-pack/plugin/core/template-resources/src/main/resources/ml/annotations_index_mappings.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/anomalydetection/results_index_mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/ml/anomalydetection/results_index_mappings.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/anomalydetection/results_index_mappings.json rename to x-pack/plugin/core/template-resources/src/main/resources/ml/anomalydetection/results_index_mappings.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/anomalydetection/results_index_template.json b/x-pack/plugin/core/template-resources/src/main/resources/ml/anomalydetection/results_index_template.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/anomalydetection/results_index_template.json rename to x-pack/plugin/core/template-resources/src/main/resources/ml/anomalydetection/results_index_template.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/anomalydetection/state_index_template.json b/x-pack/plugin/core/template-resources/src/main/resources/ml/anomalydetection/state_index_template.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/anomalydetection/state_index_template.json rename to x-pack/plugin/core/template-resources/src/main/resources/ml/anomalydetection/state_index_template.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/anomalydetection/state_index_template_no_ilm.json b/x-pack/plugin/core/template-resources/src/main/resources/ml/anomalydetection/state_index_template_no_ilm.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/anomalydetection/state_index_template_no_ilm.json rename to x-pack/plugin/core/template-resources/src/main/resources/ml/anomalydetection/state_index_template_no_ilm.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/config_index_mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/ml/config_index_mappings.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/config_index_mappings.json rename to x-pack/plugin/core/template-resources/src/main/resources/ml/config_index_mappings.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/inference_index_mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/ml/inference_index_mappings.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/inference_index_mappings.json rename to x-pack/plugin/core/template-resources/src/main/resources/ml/inference_index_mappings.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/meta_index_mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/ml/meta_index_mappings.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/meta_index_mappings.json rename to x-pack/plugin/core/template-resources/src/main/resources/ml/meta_index_mappings.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/notifications_index_mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/ml/notifications_index_mappings.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/notifications_index_mappings.json rename to x-pack/plugin/core/template-resources/src/main/resources/ml/notifications_index_mappings.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/notifications_index_template.json b/x-pack/plugin/core/template-resources/src/main/resources/ml/notifications_index_template.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/notifications_index_template.json rename to x-pack/plugin/core/template-resources/src/main/resources/ml/notifications_index_template.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/size_based_ilm_policy.json b/x-pack/plugin/core/template-resources/src/main/resources/ml/size_based_ilm_policy.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/size_based_ilm_policy.json rename to x-pack/plugin/core/template-resources/src/main/resources/ml/size_based_ilm_policy.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/stats_index_mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/ml/stats_index_mappings.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/stats_index_mappings.json rename to x-pack/plugin/core/template-resources/src/main/resources/ml/stats_index_mappings.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/stats_index_template.json b/x-pack/plugin/core/template-resources/src/main/resources/ml/stats_index_template.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/stats_index_template.json rename to x-pack/plugin/core/template-resources/src/main/resources/ml/stats_index_template.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/stats_index_template_no_ilm.json b/x-pack/plugin/core/template-resources/src/main/resources/ml/stats_index_template_no_ilm.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/stats_index_template_no_ilm.json rename to x-pack/plugin/core/template-resources/src/main/resources/ml/stats_index_template_no_ilm.json diff --git a/x-pack/plugin/core/src/main/resources/monitoring-alerts-7.json b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-alerts-7.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/monitoring-alerts-7.json rename to x-pack/plugin/core/template-resources/src/main/resources/monitoring-alerts-7.json diff --git a/x-pack/plugin/core/src/main/resources/monitoring-beats-mb.json b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-beats-mb.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/monitoring-beats-mb.json rename to x-pack/plugin/core/template-resources/src/main/resources/monitoring-beats-mb.json diff --git a/x-pack/plugin/core/src/main/resources/monitoring-beats.json b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-beats.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/monitoring-beats.json rename to x-pack/plugin/core/template-resources/src/main/resources/monitoring-beats.json diff --git a/x-pack/plugin/core/src/main/resources/monitoring-ent-search-mb.json b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-ent-search-mb.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/monitoring-ent-search-mb.json rename to x-pack/plugin/core/template-resources/src/main/resources/monitoring-ent-search-mb.json diff --git a/x-pack/plugin/core/src/main/resources/monitoring-es-mb.json b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-es-mb.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/monitoring-es-mb.json rename to x-pack/plugin/core/template-resources/src/main/resources/monitoring-es-mb.json diff --git a/x-pack/plugin/core/src/main/resources/monitoring-es.json b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-es.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/monitoring-es.json rename to x-pack/plugin/core/template-resources/src/main/resources/monitoring-es.json diff --git a/x-pack/plugin/core/src/main/resources/monitoring-kibana-mb.json b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-kibana-mb.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/monitoring-kibana-mb.json rename to x-pack/plugin/core/template-resources/src/main/resources/monitoring-kibana-mb.json diff --git a/x-pack/plugin/core/src/main/resources/monitoring-kibana.json b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-kibana.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/monitoring-kibana.json rename to x-pack/plugin/core/template-resources/src/main/resources/monitoring-kibana.json diff --git a/x-pack/plugin/core/src/main/resources/monitoring-logstash-mb.json b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-logstash-mb.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/monitoring-logstash-mb.json rename to x-pack/plugin/core/template-resources/src/main/resources/monitoring-logstash-mb.json diff --git a/x-pack/plugin/core/src/main/resources/monitoring-logstash.json b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-logstash.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/monitoring-logstash.json rename to x-pack/plugin/core/template-resources/src/main/resources/monitoring-logstash.json diff --git a/x-pack/plugin/core/src/main/resources/monitoring-mb-ilm-policy.json b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-mb-ilm-policy.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/monitoring-mb-ilm-policy.json rename to x-pack/plugin/core/template-resources/src/main/resources/monitoring-mb-ilm-policy.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/component-template/profiling-events.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-events.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/component-template/profiling-events.json rename to x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-events.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/component-template/profiling-executables.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-executables.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/component-template/profiling-executables.json rename to x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-executables.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/component-template/profiling-hosts.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/component-template/profiling-hosts.json rename to x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/component-template/profiling-hot-tier.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hot-tier.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/component-template/profiling-hot-tier.json rename to x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hot-tier.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/component-template/profiling-ilm.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-ilm.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/component-template/profiling-ilm.json rename to x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-ilm.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/component-template/profiling-metrics.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-metrics.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/component-template/profiling-metrics.json rename to x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-metrics.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/component-template/profiling-stackframes.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-stackframes.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/component-template/profiling-stackframes.json rename to x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-stackframes.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/component-template/profiling-stacktraces.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-stacktraces.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/component-template/profiling-stacktraces.json rename to x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-stacktraces.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/component-template/profiling-symbols.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-symbols.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/component-template/profiling-symbols.json rename to x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-symbols.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/ilm-policy/profiling-60-days.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/ilm-policy/profiling-60-days.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/ilm-policy/profiling-60-days.json rename to x-pack/plugin/core/template-resources/src/main/resources/profiling/ilm-policy/profiling-60-days.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/index-template/profiling-events.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-events.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/index-template/profiling-events.json rename to x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-events.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/index-template/profiling-executables.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-executables.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/index-template/profiling-executables.json rename to x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-executables.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/index-template/profiling-hosts.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-hosts.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/index-template/profiling-hosts.json rename to x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-hosts.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/index-template/profiling-metrics.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-metrics.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/index-template/profiling-metrics.json rename to x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-metrics.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/index-template/profiling-returnpads-private.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-returnpads-private.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/index-template/profiling-returnpads-private.json rename to x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-returnpads-private.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/index-template/profiling-sq-executables.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-sq-executables.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/index-template/profiling-sq-executables.json rename to x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-sq-executables.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/index-template/profiling-sq-leafframes.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-sq-leafframes.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/index-template/profiling-sq-leafframes.json rename to x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-sq-leafframes.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/index-template/profiling-stackframes.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-stackframes.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/index-template/profiling-stackframes.json rename to x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-stackframes.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/index-template/profiling-stacktraces.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-stacktraces.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/index-template/profiling-stacktraces.json rename to x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-stacktraces.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/index-template/profiling-symbols-global.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-symbols-global.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/index-template/profiling-symbols-global.json rename to x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-symbols-global.json diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/index-template/profiling-symbols-private.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-symbols-private.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/profiler/index-template/profiling-symbols-private.json rename to x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-symbols-private.json diff --git a/x-pack/plugin/core/src/main/resources/slm-history-ilm-policy.json b/x-pack/plugin/core/template-resources/src/main/resources/slm-history-ilm-policy.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/slm-history-ilm-policy.json rename to x-pack/plugin/core/template-resources/src/main/resources/slm-history-ilm-policy.json diff --git a/x-pack/plugin/core/src/main/resources/slm-history.json b/x-pack/plugin/core/template-resources/src/main/resources/slm-history.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/slm-history.json rename to x-pack/plugin/core/template-resources/src/main/resources/slm-history.json diff --git a/x-pack/plugin/core/src/main/resources/synthetics-mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/synthetics-mappings.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/synthetics-mappings.json rename to x-pack/plugin/core/template-resources/src/main/resources/synthetics-mappings.json diff --git a/x-pack/plugin/core/src/main/resources/synthetics-policy.json b/x-pack/plugin/core/template-resources/src/main/resources/synthetics-policy.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/synthetics-policy.json rename to x-pack/plugin/core/template-resources/src/main/resources/synthetics-policy.json diff --git a/x-pack/plugin/core/src/main/resources/synthetics-settings.json b/x-pack/plugin/core/template-resources/src/main/resources/synthetics-settings.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/synthetics-settings.json rename to x-pack/plugin/core/template-resources/src/main/resources/synthetics-settings.json diff --git a/x-pack/plugin/core/src/main/resources/synthetics-template.json b/x-pack/plugin/core/template-resources/src/main/resources/synthetics-template.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/synthetics-template.json rename to x-pack/plugin/core/template-resources/src/main/resources/synthetics-template.json diff --git a/x-pack/plugin/core/src/main/resources/watch-history-ilm-policy.json b/x-pack/plugin/core/template-resources/src/main/resources/watch-history-ilm-policy.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/watch-history-ilm-policy.json rename to x-pack/plugin/core/template-resources/src/main/resources/watch-history-ilm-policy.json diff --git a/x-pack/plugin/core/src/main/resources/watch-history-no-ilm.json b/x-pack/plugin/core/template-resources/src/main/resources/watch-history-no-ilm.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/watch-history-no-ilm.json rename to x-pack/plugin/core/template-resources/src/main/resources/watch-history-no-ilm.json diff --git a/x-pack/plugin/core/src/main/resources/watch-history.json b/x-pack/plugin/core/template-resources/src/main/resources/watch-history.json similarity index 100% rename from x-pack/plugin/core/src/main/resources/watch-history.json rename to x-pack/plugin/core/template-resources/src/main/resources/watch-history.json diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/logging/DeprecationIndexingTemplateRegistry.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/logging/DeprecationIndexingTemplateRegistry.java index 68866e9f3eca8..799cec3500be9 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/logging/DeprecationIndexingTemplateRegistry.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/logging/DeprecationIndexingTemplateRegistry.java @@ -60,13 +60,13 @@ public DeprecationIndexingTemplateRegistry( for (IndexTemplateConfig config : List.of( new IndexTemplateConfig( DEPRECATION_INDEXING_MAPPINGS_NAME, - "/org/elasticsearch/xpack/deprecation/deprecation-indexing-mappings.json", + "/deprecation/deprecation-indexing-mappings.json", INDEX_TEMPLATE_VERSION, DEPRECATION_INDEXING_TEMPLATE_VERSION_VARIABLE ), new IndexTemplateConfig( DEPRECATION_INDEXING_SETTINGS_NAME, - "/org/elasticsearch/xpack/deprecation/deprecation-indexing-settings.json", + "/deprecation/deprecation-indexing-settings.json", INDEX_TEMPLATE_VERSION, DEPRECATION_INDEXING_TEMPLATE_VERSION_VARIABLE ) @@ -91,7 +91,7 @@ protected Map getComponentTemplateConfigs() { private static final Map COMPOSABLE_INDEX_TEMPLATE_CONFIGS = parseComposableTemplates( new IndexTemplateConfig( DEPRECATION_INDEXING_TEMPLATE_NAME, - "/org/elasticsearch/xpack/deprecation/deprecation-indexing-template.json", + "/deprecation/deprecation-indexing-template.json", INDEX_TEMPLATE_VERSION, DEPRECATION_INDEXING_TEMPLATE_VERSION_VARIABLE ) @@ -103,10 +103,9 @@ protected Map getComposableTemplateConfigs() { } private static final List LIFECYCLE_POLICIES = List.of( - new LifecyclePolicyConfig( - DEPRECATION_INDEXING_POLICY_NAME, - "/org/elasticsearch/xpack/deprecation/deprecation-indexing-ilm-policy.json" - ).load(LifecyclePolicyConfig.DEFAULT_X_CONTENT_REGISTRY) + new LifecyclePolicyConfig(DEPRECATION_INDEXING_POLICY_NAME, "/deprecation/deprecation-indexing-ilm-policy.json").load( + LifecyclePolicyConfig.DEFAULT_X_CONTENT_REGISTRY + ) ); @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsConstants.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsConstants.java index 2437593615d11..9b45a13d18d01 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsConstants.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsConstants.java @@ -18,7 +18,7 @@ private AnalyticsConstants() {} public static final String EVENT_DATA_STREAM_INDEX_PATTERN = EVENT_DATA_STREAM_INDEX_PREFIX + "*"; // Resource config. - public static final String ROOT_RESOURCE_PATH = "/org/elasticsearch/xpack/entsearch/analytics/"; + public static final String ROOT_RESOURCE_PATH = "/entsearch/analytics/"; // The variable to be replaced with the template version number public static final String TEMPLATE_VERSION_VARIABLE = "xpack.entsearch.analytics.template.version"; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java index e9513b35fd6b6..514f4b1edd812 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java @@ -53,7 +53,7 @@ public class ConnectorTemplateRegistry extends IndexTemplateRegistry { public static final String ENT_SEARCH_GENERIC_PIPELINE_FILE = "generic_ingestion_pipeline"; // Resource config - public static final String ROOT_RESOURCE_PATH = "/org/elasticsearch/xpack/entsearch/"; + public static final String ROOT_RESOURCE_PATH = "/entsearch/"; public static final String ROOT_TEMPLATE_RESOURCE_PATH = ROOT_RESOURCE_PATH + "connector/"; // Variable used to replace template version in index templates diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/EqlFunctionRegistry.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/EqlFunctionRegistry.java index bf183a5b11919..d43f69f1ee662 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/EqlFunctionRegistry.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/expression/function/EqlFunctionRegistry.java @@ -68,7 +68,7 @@ private FunctionDefinition[][] functions() { // Arithmetic new FunctionDefinition[] { def(Add.class, Add::new, "add"), - def(Div.class, Div::new, "divide"), + def(Div.class, (BinaryBuilder
) Div::new, "divide"), def(Mod.class, Mod::new, "modulo"), def(Mul.class, Mul::new, "multiply"), def(ToNumber.class, ToNumber::new, "number"), diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/sp/SamlServiceProviderIndex.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/sp/SamlServiceProviderIndex.java index 01bc951e14ba6..b4826e389393c 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/sp/SamlServiceProviderIndex.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/sp/SamlServiceProviderIndex.java @@ -77,7 +77,7 @@ public class SamlServiceProviderIndex implements Closeable { public static final String INDEX_NAME = "saml-service-provider-v1"; static final String TEMPLATE_NAME = ALIAS_NAME; - private static final String TEMPLATE_RESOURCE = "/org/elasticsearch/xpack/idp/saml-service-provider-template.json"; + private static final String TEMPLATE_RESOURCE = "/idp/saml-service-provider-template.json"; private static final String TEMPLATE_META_VERSION_KEY = "idp-version"; private static final String TEMPLATE_VERSION_SUBSTITUTE = "idp.template.version"; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index c1bfd51cc3a70..22ec06cb567b7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -885,8 +885,9 @@ public Collection createComponents( IndicesService indicesService ) { if (enabled == false) { - // special holder for @link(MachineLearningFeatureSetUsage) which needs access to job manager, empty if ML is disabled - return List.of(new JobManagerHolder()); + // Holders for @link(MachineLearningFeatureSetUsage) which needs access to job manager and ML extension, + // both empty if ML is disabled + return List.of(new JobManagerHolder(), new MachineLearningExtensionHolder()); } machineLearningExtension.get().configure(environment.settings()); @@ -1247,7 +1248,8 @@ public Collection createComponents( trainedModelAssignmentService, trainedModelAllocationClusterServiceSetOnce.get(), deploymentManager.get(), - nodeAvailabilityZoneMapper + nodeAvailabilityZoneMapper, + new MachineLearningExtensionHolder(machineLearningExtension.get()) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningExtensionHolder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningExtensionHolder.java new file mode 100644 index 0000000000000..32305d115580e --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningExtensionHolder.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml; + +import org.elasticsearch.node.Node; + +import java.util.Objects; + +/** + * Wrapper for the {@link MachineLearningExtension} interface that allows it to be used + * given the way {@link Node} does Guice bindings for plugin components. + * TODO: remove this class entirely once Guice is removed entirely. + */ +public class MachineLearningExtensionHolder { + + private final MachineLearningExtension machineLearningExtension; + + /** + * Used by Guice, and in cases where ML is disabled. + */ + public MachineLearningExtensionHolder() { + this.machineLearningExtension = null; + } + + public MachineLearningExtensionHolder(MachineLearningExtension machineLearningExtension) { + this.machineLearningExtension = Objects.requireNonNull(machineLearningExtension); + } + + public boolean isEmpty() { + return machineLearningExtension == null; + } + + public MachineLearningExtension getMachineLearningExtension() { + return machineLearningExtension; + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningUsageTransportAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningUsageTransportAction.java index d8de8d504f976..d16a445133ccb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningUsageTransportAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningUsageTransportAction.java @@ -73,6 +73,7 @@ public class MachineLearningUsageTransportAction extends XPackUsageFeatureTransp private final Client client; private final XPackLicenseState licenseState; private final JobManagerHolder jobManagerHolder; + private final MachineLearningExtension machineLearningExtension; private final boolean enabled; @Inject @@ -85,7 +86,8 @@ public MachineLearningUsageTransportAction( Environment environment, Client client, XPackLicenseState licenseState, - JobManagerHolder jobManagerHolder + JobManagerHolder jobManagerHolder, + MachineLearningExtensionHolder machineLearningExtensionHolder ) { super( XPackUsageFeatureAction.MACHINE_LEARNING.name(), @@ -98,6 +100,11 @@ public MachineLearningUsageTransportAction( this.client = new OriginSettingClient(client, ML_ORIGIN); this.licenseState = licenseState; this.jobManagerHolder = jobManagerHolder; + if (machineLearningExtensionHolder.isEmpty()) { + this.machineLearningExtension = new DefaultMachineLearningExtension(); + } else { + this.machineLearningExtension = machineLearningExtensionHolder.getMachineLearningExtension(); + } this.enabled = XPackSettings.MACHINE_LEARNING_ENABLED.get(environment.settings()); } @@ -187,10 +194,18 @@ protected void masterOperation( dataframeAnalyticsStatsRequest.setPageParams(new PageParams(0, 10_000)); ActionListener datafeedStatsListener = ActionListener.wrap(response -> { addDatafeedsUsage(response, datafeedsUsage); - client.execute(GetDataFrameAnalyticsStatsAction.INSTANCE, dataframeAnalyticsStatsRequest, dataframeAnalyticsStatsListener); + if (machineLearningExtension.isDataFrameAnalyticsEnabled()) { + client.execute(GetDataFrameAnalyticsStatsAction.INSTANCE, dataframeAnalyticsStatsRequest, dataframeAnalyticsStatsListener); + } else { + addInferenceUsage(inferenceUsageListener); + } }, e -> { logger.warn("Failed to get datafeed stats to include in ML usage", e); - client.execute(GetDataFrameAnalyticsStatsAction.INSTANCE, dataframeAnalyticsStatsRequest, dataframeAnalyticsStatsListener); + if (machineLearningExtension.isDataFrameAnalyticsEnabled()) { + client.execute(GetDataFrameAnalyticsStatsAction.INSTANCE, dataframeAnalyticsStatsRequest, dataframeAnalyticsStatsListener); + } else { + addInferenceUsage(inferenceUsageListener); + } }); // Step 1. Extract usage from jobs stats and then request stats for all datafeeds @@ -210,8 +225,14 @@ protected void masterOperation( ); // Step 0. Kick off the chain of callbacks by requesting jobs stats - GetJobsStatsAction.Request jobStatsRequest = new GetJobsStatsAction.Request(Metadata.ALL); - client.execute(GetJobsStatsAction.INSTANCE, jobStatsRequest, jobStatsListener); + if (machineLearningExtension.isAnomalyDetectionEnabled()) { + GetJobsStatsAction.Request jobStatsRequest = new GetJobsStatsAction.Request(Metadata.ALL); + client.execute(GetJobsStatsAction.INSTANCE, jobStatsRequest, jobStatsListener); + } else if (machineLearningExtension.isDataFrameAnalyticsEnabled()) { + client.execute(GetDataFrameAnalyticsStatsAction.INSTANCE, dataframeAnalyticsStatsRequest, dataframeAnalyticsStatsListener); + } else { + addInferenceUsage(inferenceUsageListener); + } } private void addJobsUsage(GetJobsStatsAction.Response response, List jobs, Map jobsUsage) { @@ -361,23 +382,27 @@ private void addDataFrameAnalyticsUsage(GetDataFrameAnalyticsAction.Response res } private void addInferenceUsage(ActionListener> listener) { - GetTrainedModelsAction.Request getModelsRequest = new GetTrainedModelsAction.Request( - "*", - Collections.emptyList(), - Collections.emptySet() - ); - getModelsRequest.setPageParams(new PageParams(0, 10_000)); - client.execute(GetTrainedModelsAction.INSTANCE, getModelsRequest, ActionListener.wrap(getModelsResponse -> { - GetTrainedModelsStatsAction.Request getStatsRequest = new GetTrainedModelsStatsAction.Request("*"); - getStatsRequest.setPageParams(new PageParams(0, 10_000)); - client.execute(GetTrainedModelsStatsAction.INSTANCE, getStatsRequest, ActionListener.wrap(getStatsResponse -> { - Map inferenceUsage = new LinkedHashMap<>(); - addInferenceIngestUsage(getStatsResponse, inferenceUsage); - addTrainedModelStats(getModelsResponse, getStatsResponse, inferenceUsage); - addDeploymentStats(getStatsResponse, inferenceUsage); - listener.onResponse(inferenceUsage); + if (machineLearningExtension.isDataFrameAnalyticsEnabled() || machineLearningExtension.isNlpEnabled()) { + GetTrainedModelsAction.Request getModelsRequest = new GetTrainedModelsAction.Request( + "*", + Collections.emptyList(), + Collections.emptySet() + ); + getModelsRequest.setPageParams(new PageParams(0, 10_000)); + client.execute(GetTrainedModelsAction.INSTANCE, getModelsRequest, ActionListener.wrap(getModelsResponse -> { + GetTrainedModelsStatsAction.Request getStatsRequest = new GetTrainedModelsStatsAction.Request("*"); + getStatsRequest.setPageParams(new PageParams(0, 10_000)); + client.execute(GetTrainedModelsStatsAction.INSTANCE, getStatsRequest, ActionListener.wrap(getStatsResponse -> { + Map inferenceUsage = new LinkedHashMap<>(); + addInferenceIngestUsage(getStatsResponse, inferenceUsage); + addTrainedModelStats(getModelsResponse, getStatsResponse, inferenceUsage); + addDeploymentStats(getStatsResponse, inferenceUsage); + listener.onResponse(inferenceUsage); + }, listener::onFailure)); }, listener::onFailure)); - }, listener::onFailure)); + } else { + listener.onResponse(Map.of()); + } } private void addDeploymentStats(GetTrainedModelsStatsAction.Response statsResponse, Map inferenceUsage) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexTemplateRegistry.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexTemplateRegistry.java index 607f050797067..a26e23e2c367e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexTemplateRegistry.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlIndexTemplateRegistry.java @@ -30,7 +30,7 @@ public class MlIndexTemplateRegistry extends IndexTemplateRegistry { - private static final String ROOT_RESOURCE_PATH = "/org/elasticsearch/xpack/core/ml/"; + private static final String ROOT_RESOURCE_PATH = "/ml/"; private static final String ANOMALY_DETECTION_PATH = ROOT_RESOURCE_PATH + "anomalydetection/"; private static final String VERSION_PATTERN = "xpack.ml.version"; private static final String VERSION_ID_PATTERN = "xpack.ml.version.id"; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlControllerHolder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlControllerHolder.java index 05d4bb13cd656..104b00eb864fa 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlControllerHolder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlControllerHolder.java @@ -17,7 +17,7 @@ */ public class MlControllerHolder { - private MlController mlController; + private final MlController mlController; public MlControllerHolder(MlController mlController) { this.mlController = Objects.requireNonNull(mlController); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningInfoTransportActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningInfoTransportActionTests.java index b1f897a202d78..905555c085736 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningInfoTransportActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningInfoTransportActionTests.java @@ -86,6 +86,7 @@ import java.util.Map; import java.util.Set; +import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; @@ -138,7 +139,12 @@ public void close() { client.threadPool().shutdown(); } - private MachineLearningUsageTransportAction newUsageAction(Settings settings) { + private MachineLearningUsageTransportAction newUsageAction( + Settings settings, + boolean isAnomalyDetectionEnabled, + boolean isDataFrameAnalyticsEnabled, + boolean isNlpEnabled + ) { return new MachineLearningUsageTransportAction( mock(TransportService.class), clusterService, @@ -148,7 +154,10 @@ private MachineLearningUsageTransportAction newUsageAction(Settings settings) { TestEnvironment.newEnvironment(settings), client, licenseState, - jobManagerHolder + jobManagerHolder, + new MachineLearningExtensionHolder( + new MachineLearningTests.MlTestExtension(true, true, isAnomalyDetectionEnabled, isDataFrameAnalyticsEnabled, isNlpEnabled) + ) ); } @@ -162,7 +171,7 @@ public void testAvailable() throws Exception { boolean available = randomBoolean(); when(licenseState.isAllowed(MachineLearningField.ML_API_FEATURE)).thenReturn(available); assertThat(featureSet.available(), is(available)); - var usageAction = newUsageAction(commonSettings); + var usageAction = newUsageAction(commonSettings, randomBoolean(), randomBoolean(), randomBoolean()); PlainActionFuture future = new PlainActionFuture<>(); usageAction.masterOperation(null, null, ClusterState.EMPTY_STATE, future); XPackFeatureSet.Usage usage = future.get().getUsage(); @@ -190,7 +199,7 @@ public void testEnabled() throws Exception { licenseState ); assertThat(featureSet.enabled(), is(expected)); - var usageAction = newUsageAction(settings.build()); + var usageAction = newUsageAction(settings.build(), randomBoolean(), randomBoolean(), randomBoolean()); PlainActionFuture future = new PlainActionFuture<>(); usageAction.masterOperation(null, null, ClusterState.EMPTY_STATE, future); XPackFeatureSet.Usage usage = future.get().getUsage(); @@ -207,235 +216,230 @@ public void testUsage() throws Exception { Settings.Builder settings = Settings.builder().put(commonSettings); settings.put("xpack.ml.enabled", true); - Job opened1 = buildJob( - "opened1", - Collections.singletonList(buildMinDetector("foo")), - Collections.singletonMap("created_by", randomFrom("a-cool-module", "a_cool_module", "a cool module")) - ); - GetJobsStatsAction.Response.JobStats opened1JobStats = buildJobStats("opened1", JobState.OPENED, 100L, 3L); - Job opened2 = buildJob("opened2", Arrays.asList(buildMinDetector("foo"), buildMinDetector("bar"))); - GetJobsStatsAction.Response.JobStats opened2JobStats = buildJobStats("opened2", JobState.OPENED, 200L, 8L); - Job closed1 = buildJob("closed1", Arrays.asList(buildMinDetector("foo"), buildMinDetector("bar"), buildMinDetector("foobar"))); - GetJobsStatsAction.Response.JobStats closed1JobStats = buildJobStats("closed1", JobState.CLOSED, 300L, 0); - givenJobs(Arrays.asList(opened1, opened2, closed1), Arrays.asList(opened1JobStats, opened2JobStats, closed1JobStats)); + Map trainedModelsCountByAnalysis = Map.of("classification", 1, "regression", 1, "ner", 1); - givenDatafeeds( - Arrays.asList( - buildDatafeedStats(DatafeedState.STARTED), - buildDatafeedStats(DatafeedState.STARTED), - buildDatafeedStats(DatafeedState.STOPPED) - ) - ); + Map expectedDfaCountByAnalysis = setupComplexMocks(); - DataFrameAnalyticsConfig dfa1 = DataFrameAnalyticsConfigTests.createRandom("dfa_1"); - DataFrameAnalyticsConfig dfa2 = DataFrameAnalyticsConfigTests.createRandom("dfa_2"); - DataFrameAnalyticsConfig dfa3 = DataFrameAnalyticsConfigTests.createRandom("dfa_3"); + var usageAction = newUsageAction(settings.build(), true, true, true); + PlainActionFuture future = new PlainActionFuture<>(); + usageAction.masterOperation(null, null, ClusterState.EMPTY_STATE, future); + XPackFeatureSet.Usage mlUsage = future.get().getUsage(); - List dataFrameAnalytics = Arrays.asList(dfa1, dfa2, dfa3); - givenDataFrameAnalytics( - dataFrameAnalytics, - Arrays.asList( - buildDataFrameAnalyticsStats(dfa1.getId(), DataFrameAnalyticsState.STOPPED, null), - buildDataFrameAnalyticsStats(dfa2.getId(), DataFrameAnalyticsState.STOPPED, 100L), - buildDataFrameAnalyticsStats(dfa3.getId(), DataFrameAnalyticsState.STARTED, 200L) - ) - ); + BytesStreamOutput out = new BytesStreamOutput(); + mlUsage.writeTo(out); + XPackFeatureSet.Usage serializedUsage = new MachineLearningFeatureSetUsage(out.bytes().streamInput()); - Map expectedDfaCountByAnalysis = new HashMap<>(); - dataFrameAnalytics.forEach(dfa -> { - String analysisName = dfa.getAnalysis().getWriteableName(); - Integer analysisCount = expectedDfaCountByAnalysis.computeIfAbsent(analysisName, c -> 0); - expectedDfaCountByAnalysis.put(analysisName, ++analysisCount); - }); + for (XPackFeatureSet.Usage usage : Arrays.asList(mlUsage, serializedUsage)) { + assertThat(usage, is(notNullValue())); + assertThat(usage.name(), is(XPackField.MACHINE_LEARNING)); + assertThat(usage.enabled(), is(true)); + assertThat(usage.available(), is(true)); + XContentSource source; + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { + usage.toXContent(builder, ToXContent.EMPTY_PARAMS); + source = new XContentSource(builder); + } - TrainedModelConfig trainedModel1 = TrainedModelConfigTests.createTestInstance("model_1") - .setModelSize(100) - .setEstimatedOperations(200) - .setMetadata(Collections.singletonMap("analytics_config", "anything")) - .setInferenceConfig(ClassificationConfig.EMPTY_PARAMS) - .build(); - TrainedModelConfig trainedModel2 = TrainedModelConfigTests.createTestInstance("model_2") - .setModelSize(200) - .setEstimatedOperations(400) - .setMetadata(Collections.singletonMap("analytics_config", "anything")) - .setInferenceConfig(RegressionConfig.EMPTY_PARAMS) - .build(); - TrainedModelConfig trainedModel3 = TrainedModelConfigTests.createTestInstance("model_3") - .setModelSize(300) - .setEstimatedOperations(600) - .setInferenceConfig(new NerConfig(null, null, null, null)) - .build(); - TrainedModelConfig trainedModel4 = TrainedModelConfigTests.createTestInstance("model_4") - .setTags(Collections.singletonList("prepackaged")) - .setModelSize(1000) - .setEstimatedOperations(2000) - .build(); - givenTrainedModels(Arrays.asList(trainedModel1, trainedModel2, trainedModel3, trainedModel4)); + assertThat(source.getValue("jobs._all.count"), equalTo(3)); + assertThat(source.getValue("jobs._all.detectors.min"), equalTo(1.0)); + assertThat(source.getValue("jobs._all.detectors.max"), equalTo(3.0)); + assertThat(source.getValue("jobs._all.detectors.total"), equalTo(6.0)); + assertThat(source.getValue("jobs._all.detectors.avg"), equalTo(2.0)); + assertThat(source.getValue("jobs._all.model_size.min"), equalTo(100.0)); + assertThat(source.getValue("jobs._all.model_size.max"), equalTo(300.0)); + assertThat(source.getValue("jobs._all.model_size.total"), equalTo(600.0)); + assertThat(source.getValue("jobs._all.model_size.avg"), equalTo(200.0)); + assertThat(source.getValue("jobs._all.created_by.a_cool_module"), equalTo(1)); + assertThat(source.getValue("jobs._all.created_by.unknown"), equalTo(2)); + + assertThat(source.getValue("jobs.opened.count"), equalTo(2)); + assertThat(source.getValue("jobs.opened.detectors.min"), equalTo(1.0)); + assertThat(source.getValue("jobs.opened.detectors.max"), equalTo(2.0)); + assertThat(source.getValue("jobs.opened.detectors.total"), equalTo(3.0)); + assertThat(source.getValue("jobs.opened.detectors.avg"), equalTo(1.5)); + assertThat(source.getValue("jobs.opened.model_size.min"), equalTo(100.0)); + assertThat(source.getValue("jobs.opened.model_size.max"), equalTo(200.0)); + assertThat(source.getValue("jobs.opened.model_size.total"), equalTo(300.0)); + assertThat(source.getValue("jobs.opened.model_size.avg"), equalTo(150.0)); + assertThat(source.getValue("jobs.opened.created_by.a_cool_module"), equalTo(1)); + assertThat(source.getValue("jobs.opened.created_by.unknown"), equalTo(1)); + + assertThat(source.getValue("jobs.closed.count"), equalTo(1)); + assertThat(source.getValue("jobs.closed.detectors.min"), equalTo(3.0)); + assertThat(source.getValue("jobs.closed.detectors.max"), equalTo(3.0)); + assertThat(source.getValue("jobs.closed.detectors.total"), equalTo(3.0)); + assertThat(source.getValue("jobs.closed.detectors.avg"), equalTo(3.0)); + assertThat(source.getValue("jobs.closed.model_size.min"), equalTo(300.0)); + assertThat(source.getValue("jobs.closed.model_size.max"), equalTo(300.0)); + assertThat(source.getValue("jobs.closed.model_size.total"), equalTo(300.0)); + assertThat(source.getValue("jobs.closed.model_size.avg"), equalTo(300.0)); + assertThat(source.getValue("jobs.closed.created_by.a_cool_module"), is(nullValue())); + assertThat(source.getValue("jobs.closed.created_by.unknown"), equalTo(1)); + + assertThat(source.getValue("jobs.opening"), is(nullValue())); + assertThat(source.getValue("jobs.closing"), is(nullValue())); + assertThat(source.getValue("jobs.failed"), is(nullValue())); + + assertThat(source.getValue("datafeeds._all.count"), equalTo(3)); + assertThat(source.getValue("datafeeds.started.count"), equalTo(2)); + assertThat(source.getValue("datafeeds.stopped.count"), equalTo(1)); + + assertThat(source.getValue("data_frame_analytics_jobs._all.count"), equalTo(3)); + assertThat(source.getValue("data_frame_analytics_jobs.started.count"), equalTo(1)); + assertThat(source.getValue("data_frame_analytics_jobs.stopped.count"), equalTo(2)); + assertThat(source.getValue("data_frame_analytics_jobs.analysis_counts"), equalTo(expectedDfaCountByAnalysis)); + assertThat(source.getValue("data_frame_analytics_jobs.memory_usage.peak_usage_bytes.min"), equalTo(100.0)); + assertThat(source.getValue("data_frame_analytics_jobs.memory_usage.peak_usage_bytes.max"), equalTo(200.0)); + assertThat(source.getValue("data_frame_analytics_jobs.memory_usage.peak_usage_bytes.total"), equalTo(300.0)); + assertThat(source.getValue("data_frame_analytics_jobs.memory_usage.peak_usage_bytes.avg"), equalTo(150.0)); + + assertThat(source.getValue("jobs._all.forecasts.total"), equalTo(11)); + assertThat(source.getValue("jobs._all.forecasts.forecasted_jobs"), equalTo(2)); + + assertThat(source.getValue("jobs.closed.forecasts.total"), equalTo(0)); + assertThat(source.getValue("jobs.closed.forecasts.forecasted_jobs"), equalTo(0)); + + assertThat(source.getValue("jobs.opened.forecasts.total"), equalTo(11)); + assertThat(source.getValue("jobs.opened.forecasts.forecasted_jobs"), equalTo(2)); + + // TODO error_count here??? + assertThat(source.getValue("inference.trained_models._all.count"), equalTo(4)); + assertThat(source.getValue("inference.trained_models.model_size_bytes.min"), equalTo(100.0)); + assertThat(source.getValue("inference.trained_models.model_size_bytes.max"), equalTo(300.0)); + assertThat(source.getValue("inference.trained_models.model_size_bytes.total"), equalTo(600.0)); + assertThat(source.getValue("inference.trained_models.model_size_bytes.avg"), equalTo(200.0)); + assertThat(source.getValue("inference.trained_models.estimated_operations.min"), equalTo(200.0)); + assertThat(source.getValue("inference.trained_models.estimated_operations.max"), equalTo(600.0)); + assertThat(source.getValue("inference.trained_models.estimated_operations.total"), equalTo(1200.0)); + assertThat(source.getValue("inference.trained_models.estimated_operations.avg"), equalTo(400.0)); + assertThat(source.getValue("inference.trained_models.count.total"), equalTo(4)); + trainedModelsCountByAnalysis.forEach( + (name, count) -> assertThat(source.getValue("inference.trained_models.count." + name), equalTo(count)) + ); + assertThat(source.getValue("inference.trained_models.count.prepackaged"), equalTo(1)); + assertThat(source.getValue("inference.trained_models.count.other"), equalTo(1)); + + assertThat(source.getValue("inference.ingest_processors._all.pipelines.count"), equalTo(10)); + assertThat(source.getValue("inference.ingest_processors._all.num_docs_processed.sum"), equalTo(150)); + assertThat(source.getValue("inference.ingest_processors._all.num_docs_processed.min"), equalTo(10)); + assertThat(source.getValue("inference.ingest_processors._all.num_docs_processed.max"), equalTo(50)); + assertThat(source.getValue("inference.ingest_processors._all.time_ms.sum"), equalTo(15)); + assertThat(source.getValue("inference.ingest_processors._all.time_ms.min"), equalTo(1)); + assertThat(source.getValue("inference.ingest_processors._all.time_ms.max"), equalTo(5)); + assertThat(source.getValue("inference.ingest_processors._all.num_failures.sum"), equalTo(1500)); + assertThat(source.getValue("inference.ingest_processors._all.num_failures.min"), equalTo(100)); + assertThat(source.getValue("inference.ingest_processors._all.num_failures.max"), equalTo(500)); + assertThat(source.getValue("inference.deployments.count"), equalTo(2)); + assertThat(source.getValue("inference.deployments.inference_counts.total"), equalTo(9.0)); + assertThat(source.getValue("inference.deployments.inference_counts.min"), equalTo(4.0)); + assertThat(source.getValue("inference.deployments.inference_counts.total"), equalTo(9.0)); + assertThat(source.getValue("inference.deployments.inference_counts.max"), equalTo(5.0)); + assertThat(source.getValue("inference.deployments.inference_counts.avg"), equalTo(4.5)); + assertThat(source.getValue("inference.deployments.model_sizes_bytes.total"), equalTo(1300.0)); + assertThat(source.getValue("inference.deployments.model_sizes_bytes.min"), equalTo(300.0)); + assertThat(source.getValue("inference.deployments.model_sizes_bytes.max"), equalTo(1000.0)); + assertThat(source.getValue("inference.deployments.model_sizes_bytes.avg"), equalTo(650.0)); + assertThat(source.getValue("inference.deployments.time_ms.avg"), closeTo(45.55555555555556, 1e-10)); + } + } + + public void testAnomalyDetectionDisabled() throws Exception { + when(licenseState.isAllowed(MachineLearningField.ML_API_FEATURE)).thenReturn(true); + Settings.Builder settings = Settings.builder().put(commonSettings); + settings.put("xpack.ml.enabled", true); Map trainedModelsCountByAnalysis = Map.of("classification", 1, "regression", 1, "ner", 1); - givenTrainedModelStats( - new GetTrainedModelsStatsAction.Response( - new QueryPage<>( - List.of( - new GetTrainedModelsStatsAction.Response.TrainedModelStats( - trainedModel1.getModelId(), - new TrainedModelSizeStats(trainedModel1.getModelSize(), 0L), - new IngestStats( - new IngestStats.Stats(0, 0, 0, 0), - List.of(), - Map.of( - "pipeline_1", - List.of( - new IngestStats.ProcessorStat( - InferenceProcessor.TYPE, - InferenceProcessor.TYPE, - new IngestStats.Stats(10, 1, 1000, 100) - ), - new IngestStats.ProcessorStat( - InferenceProcessor.TYPE, - InferenceProcessor.TYPE, - new IngestStats.Stats(20, 2, 2000, 200) - ), - // Adding a non inference processor that should be ignored - new IngestStats.ProcessorStat("grok", "grok", new IngestStats.Stats(100, 100, 100, 100)) - ) - ) - ), - 1, - null, - null - ), - new GetTrainedModelsStatsAction.Response.TrainedModelStats( - trainedModel2.getModelId(), - new TrainedModelSizeStats(trainedModel2.getModelSize(), 0L), - new IngestStats( - new IngestStats.Stats(0, 0, 0, 0), - List.of(), - Map.of( - "pipeline_1", - List.of( - new IngestStats.ProcessorStat( - InferenceProcessor.TYPE, - InferenceProcessor.TYPE, - new IngestStats.Stats(30, 3, 3000, 300) - ) - ) - ) - ), - 2, - null, - null - ), - new GetTrainedModelsStatsAction.Response.TrainedModelStats( - trainedModel3.getModelId(), - new TrainedModelSizeStats(trainedModel3.getModelSize(), 0L), - new IngestStats( - new IngestStats.Stats(0, 0, 0, 0), - List.of(), - Map.of( - "pipeline_2", - List.of( - new IngestStats.ProcessorStat( - InferenceProcessor.TYPE, - InferenceProcessor.TYPE, - new IngestStats.Stats(40, 4, 4000, 400) - ) - ) - ) - ), - 3, - null, - new AssignmentStats( - "deployment_3", - "model_3", - null, - null, - null, - null, - Instant.now(), - List.of(), - Priority.NORMAL - ).setState(AssignmentState.STOPPING) - ), - new GetTrainedModelsStatsAction.Response.TrainedModelStats( - trainedModel4.getModelId(), - new TrainedModelSizeStats(trainedModel4.getModelSize(), 0L), - new IngestStats( - new IngestStats.Stats(0, 0, 0, 0), - List.of(), - Map.of( - "pipeline_3", - List.of( - new IngestStats.ProcessorStat( - InferenceProcessor.TYPE, - InferenceProcessor.TYPE, - new IngestStats.Stats(50, 5, 5000, 500) - ) - ) - ) - ), - 4, - null, - new AssignmentStats( - "deployment_4", - "model_4", - 2, - 2, - 1000, - ByteSizeValue.ofBytes(1000), - Instant.now(), - List.of( - AssignmentStats.NodeStats.forStartedState( - DiscoveryNodeUtils.create("foo", new TransportAddress(TransportAddress.META_ADDRESS, 2)), - 5, - 42.0, - 42.0, - 0, - 1, - 3L, - 2, - 3, - Instant.now(), - Instant.now(), - randomIntBetween(1, 16), - randomIntBetween(1, 16), - 1L, - 2L, - 33.0, - 1L - ), - AssignmentStats.NodeStats.forStartedState( - DiscoveryNodeUtils.create("bar", new TransportAddress(TransportAddress.META_ADDRESS, 3)), - 4, - 50.0, - 50.0, - 0, - 1, - 1L, - 2, - 3, - Instant.now(), - Instant.now(), - randomIntBetween(1, 16), - randomIntBetween(1, 16), - 2L, - 4L, - 34.0, - 1L - ) - ), - Priority.NORMAL - ).setState(AssignmentState.STARTED).setAllocationStatus(new AllocationStatus(2, 2)) - ) - ), - 0, - GetTrainedModelsStatsAction.Response.RESULTS_FIELD - ) - ) - ); + // This test works by setting up a mocks that imply jobs and datafeeds exist, then + // checking that the usage stats don't mention them. This proves that the trained model + // APIs were bypassed. In reality of course the cluster state would not contain trained + // models if the features were disabled. + Map expectedDfaCountByAnalysis = setupComplexMocks(); + + var usageAction = newUsageAction(settings.build(), false, true, true); + PlainActionFuture future = new PlainActionFuture<>(); + usageAction.masterOperation(null, null, ClusterState.EMPTY_STATE, future); + XPackFeatureSet.Usage mlUsage = future.get().getUsage(); + + BytesStreamOutput out = new BytesStreamOutput(); + mlUsage.writeTo(out); + XPackFeatureSet.Usage serializedUsage = new MachineLearningFeatureSetUsage(out.bytes().streamInput()); + + for (XPackFeatureSet.Usage usage : Arrays.asList(mlUsage, serializedUsage)) { + assertThat(usage, is(notNullValue())); + assertThat(usage.name(), is(XPackField.MACHINE_LEARNING)); + assertThat(usage.enabled(), is(true)); + assertThat(usage.available(), is(true)); + XContentSource source; + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { + usage.toXContent(builder, ToXContent.EMPTY_PARAMS); + source = new XContentSource(builder); + } + + assertThat(source.getValue("jobs"), anEmptyMap()); + assertThat(source.getValue("datafeeds"), anEmptyMap()); + + assertThat(source.getValue("data_frame_analytics_jobs._all.count"), equalTo(3)); + assertThat(source.getValue("data_frame_analytics_jobs.started.count"), equalTo(1)); + assertThat(source.getValue("data_frame_analytics_jobs.stopped.count"), equalTo(2)); + assertThat(source.getValue("data_frame_analytics_jobs.analysis_counts"), equalTo(expectedDfaCountByAnalysis)); + assertThat(source.getValue("data_frame_analytics_jobs.memory_usage.peak_usage_bytes.min"), equalTo(100.0)); + assertThat(source.getValue("data_frame_analytics_jobs.memory_usage.peak_usage_bytes.max"), equalTo(200.0)); + assertThat(source.getValue("data_frame_analytics_jobs.memory_usage.peak_usage_bytes.total"), equalTo(300.0)); + assertThat(source.getValue("data_frame_analytics_jobs.memory_usage.peak_usage_bytes.avg"), equalTo(150.0)); + + assertThat(source.getValue("inference.trained_models._all.count"), equalTo(4)); + assertThat(source.getValue("inference.trained_models.model_size_bytes.min"), equalTo(100.0)); + assertThat(source.getValue("inference.trained_models.model_size_bytes.max"), equalTo(300.0)); + assertThat(source.getValue("inference.trained_models.model_size_bytes.total"), equalTo(600.0)); + assertThat(source.getValue("inference.trained_models.model_size_bytes.avg"), equalTo(200.0)); + assertThat(source.getValue("inference.trained_models.estimated_operations.min"), equalTo(200.0)); + assertThat(source.getValue("inference.trained_models.estimated_operations.max"), equalTo(600.0)); + assertThat(source.getValue("inference.trained_models.estimated_operations.total"), equalTo(1200.0)); + assertThat(source.getValue("inference.trained_models.estimated_operations.avg"), equalTo(400.0)); + assertThat(source.getValue("inference.trained_models.count.total"), equalTo(4)); + trainedModelsCountByAnalysis.forEach( + (name, count) -> assertThat(source.getValue("inference.trained_models.count." + name), equalTo(count)) + ); + assertThat(source.getValue("inference.trained_models.count.prepackaged"), equalTo(1)); + assertThat(source.getValue("inference.trained_models.count.other"), equalTo(1)); + + assertThat(source.getValue("inference.ingest_processors._all.pipelines.count"), equalTo(10)); + assertThat(source.getValue("inference.ingest_processors._all.num_docs_processed.sum"), equalTo(150)); + assertThat(source.getValue("inference.ingest_processors._all.num_docs_processed.min"), equalTo(10)); + assertThat(source.getValue("inference.ingest_processors._all.num_docs_processed.max"), equalTo(50)); + assertThat(source.getValue("inference.ingest_processors._all.time_ms.sum"), equalTo(15)); + assertThat(source.getValue("inference.ingest_processors._all.time_ms.min"), equalTo(1)); + assertThat(source.getValue("inference.ingest_processors._all.time_ms.max"), equalTo(5)); + assertThat(source.getValue("inference.ingest_processors._all.num_failures.sum"), equalTo(1500)); + assertThat(source.getValue("inference.ingest_processors._all.num_failures.min"), equalTo(100)); + assertThat(source.getValue("inference.ingest_processors._all.num_failures.max"), equalTo(500)); + assertThat(source.getValue("inference.deployments.count"), equalTo(2)); + assertThat(source.getValue("inference.deployments.inference_counts.total"), equalTo(9.0)); + assertThat(source.getValue("inference.deployments.inference_counts.min"), equalTo(4.0)); + assertThat(source.getValue("inference.deployments.inference_counts.total"), equalTo(9.0)); + assertThat(source.getValue("inference.deployments.inference_counts.max"), equalTo(5.0)); + assertThat(source.getValue("inference.deployments.inference_counts.avg"), equalTo(4.5)); + assertThat(source.getValue("inference.deployments.model_sizes_bytes.total"), equalTo(1300.0)); + assertThat(source.getValue("inference.deployments.model_sizes_bytes.min"), equalTo(300.0)); + assertThat(source.getValue("inference.deployments.model_sizes_bytes.max"), equalTo(1000.0)); + assertThat(source.getValue("inference.deployments.model_sizes_bytes.avg"), equalTo(650.0)); + assertThat(source.getValue("inference.deployments.time_ms.avg"), closeTo(45.55555555555556, 1e-10)); + } + } + + public void testUsageWithTrainedModelsDisabled() throws Exception { + when(licenseState.isAllowed(MachineLearningField.ML_API_FEATURE)).thenReturn(true); + Settings.Builder settings = Settings.builder().put(commonSettings); + settings.put("xpack.ml.enabled", true); + + // This test works by setting up a mocks that imply trained models exist, then checking + // that the usage stats don't mention them. This proves that the trained model APIs + // were bypassed. In reality of course the cluster state would not contain trained + // models if the features were disabled. + setupComplexMocks(); - var usageAction = newUsageAction(settings.build()); + var usageAction = newUsageAction(settings.build(), true, false, false); PlainActionFuture future = new PlainActionFuture<>(); usageAction.masterOperation(null, null, ClusterState.EMPTY_STATE, future); XPackFeatureSet.Usage mlUsage = future.get().getUsage(); @@ -454,7 +458,6 @@ public void testUsage() throws Exception { usage.toXContent(builder, ToXContent.EMPTY_PARAMS); source = new XContentSource(builder); } - assertThat(source.getValue("jobs._all.count"), equalTo(3)); assertThat(source.getValue("jobs._all.detectors.min"), equalTo(1.0)); assertThat(source.getValue("jobs._all.detectors.max"), equalTo(3.0)); @@ -499,14 +502,7 @@ public void testUsage() throws Exception { assertThat(source.getValue("datafeeds.started.count"), equalTo(2)); assertThat(source.getValue("datafeeds.stopped.count"), equalTo(1)); - assertThat(source.getValue("data_frame_analytics_jobs._all.count"), equalTo(3)); - assertThat(source.getValue("data_frame_analytics_jobs.started.count"), equalTo(1)); - assertThat(source.getValue("data_frame_analytics_jobs.stopped.count"), equalTo(2)); - assertThat(source.getValue("data_frame_analytics_jobs.analysis_counts"), equalTo(expectedDfaCountByAnalysis)); - assertThat(source.getValue("data_frame_analytics_jobs.memory_usage.peak_usage_bytes.min"), equalTo(100.0)); - assertThat(source.getValue("data_frame_analytics_jobs.memory_usage.peak_usage_bytes.max"), equalTo(200.0)); - assertThat(source.getValue("data_frame_analytics_jobs.memory_usage.peak_usage_bytes.total"), equalTo(300.0)); - assertThat(source.getValue("data_frame_analytics_jobs.memory_usage.peak_usage_bytes.avg"), equalTo(150.0)); + assertThat(source.getValue("data_frame_analytics_jobs"), anEmptyMap()); assertThat(source.getValue("jobs._all.forecasts.total"), equalTo(11)); assertThat(source.getValue("jobs._all.forecasts.forecasted_jobs"), equalTo(2)); @@ -517,44 +513,7 @@ public void testUsage() throws Exception { assertThat(source.getValue("jobs.opened.forecasts.total"), equalTo(11)); assertThat(source.getValue("jobs.opened.forecasts.forecasted_jobs"), equalTo(2)); - // TODO error_count here??? - assertThat(source.getValue("inference.trained_models._all.count"), equalTo(4)); - assertThat(source.getValue("inference.trained_models.model_size_bytes.min"), equalTo(100.0)); - assertThat(source.getValue("inference.trained_models.model_size_bytes.max"), equalTo(300.0)); - assertThat(source.getValue("inference.trained_models.model_size_bytes.total"), equalTo(600.0)); - assertThat(source.getValue("inference.trained_models.model_size_bytes.avg"), equalTo(200.0)); - assertThat(source.getValue("inference.trained_models.estimated_operations.min"), equalTo(200.0)); - assertThat(source.getValue("inference.trained_models.estimated_operations.max"), equalTo(600.0)); - assertThat(source.getValue("inference.trained_models.estimated_operations.total"), equalTo(1200.0)); - assertThat(source.getValue("inference.trained_models.estimated_operations.avg"), equalTo(400.0)); - assertThat(source.getValue("inference.trained_models.count.total"), equalTo(4)); - trainedModelsCountByAnalysis.forEach( - (name, count) -> assertThat(source.getValue("inference.trained_models.count." + name), equalTo(count)) - ); - assertThat(source.getValue("inference.trained_models.count.prepackaged"), equalTo(1)); - assertThat(source.getValue("inference.trained_models.count.other"), equalTo(1)); - - assertThat(source.getValue("inference.ingest_processors._all.pipelines.count"), equalTo(10)); - assertThat(source.getValue("inference.ingest_processors._all.num_docs_processed.sum"), equalTo(150)); - assertThat(source.getValue("inference.ingest_processors._all.num_docs_processed.min"), equalTo(10)); - assertThat(source.getValue("inference.ingest_processors._all.num_docs_processed.max"), equalTo(50)); - assertThat(source.getValue("inference.ingest_processors._all.time_ms.sum"), equalTo(15)); - assertThat(source.getValue("inference.ingest_processors._all.time_ms.min"), equalTo(1)); - assertThat(source.getValue("inference.ingest_processors._all.time_ms.max"), equalTo(5)); - assertThat(source.getValue("inference.ingest_processors._all.num_failures.sum"), equalTo(1500)); - assertThat(source.getValue("inference.ingest_processors._all.num_failures.min"), equalTo(100)); - assertThat(source.getValue("inference.ingest_processors._all.num_failures.max"), equalTo(500)); - assertThat(source.getValue("inference.deployments.count"), equalTo(2)); - assertThat(source.getValue("inference.deployments.inference_counts.total"), equalTo(9.0)); - assertThat(source.getValue("inference.deployments.inference_counts.min"), equalTo(4.0)); - assertThat(source.getValue("inference.deployments.inference_counts.total"), equalTo(9.0)); - assertThat(source.getValue("inference.deployments.inference_counts.max"), equalTo(5.0)); - assertThat(source.getValue("inference.deployments.inference_counts.avg"), equalTo(4.5)); - assertThat(source.getValue("inference.deployments.model_sizes_bytes.total"), equalTo(1300.0)); - assertThat(source.getValue("inference.deployments.model_sizes_bytes.min"), equalTo(300.0)); - assertThat(source.getValue("inference.deployments.model_sizes_bytes.max"), equalTo(1000.0)); - assertThat(source.getValue("inference.deployments.model_sizes_bytes.avg"), equalTo(650.0)); - assertThat(source.getValue("inference.deployments.time_ms.avg"), closeTo(45.55555555555556, 1e-10)); + assertThat(source.getValue("inference"), anEmptyMap()); } } @@ -575,7 +534,7 @@ public void testUsageWithOrphanedTask() throws Exception { GetJobsStatsAction.Response.JobStats closed1JobStats = buildJobStats("closed1", JobState.CLOSED, 300L, 0); givenJobs(Arrays.asList(opened1, closed1), Arrays.asList(opened1JobStats, opened2JobStats, closed1JobStats)); - var usageAction = newUsageAction(settings.build()); + var usageAction = newUsageAction(settings.build(), true, true, true); PlainActionFuture future = new PlainActionFuture<>(); usageAction.masterOperation(null, null, ClusterState.EMPTY_STATE, future); XPackFeatureSet.Usage usage = future.get().getUsage(); @@ -605,7 +564,7 @@ public void testUsageDisabledML() throws Exception { Settings.Builder settings = Settings.builder().put(commonSettings); settings.put("xpack.ml.enabled", false); - var usageAction = newUsageAction(settings.build()); + var usageAction = newUsageAction(settings.build(), randomBoolean(), randomBoolean(), randomBoolean()); PlainActionFuture future = new PlainActionFuture<>(); usageAction.masterOperation(null, null, ClusterState.EMPTY_STATE, future); XPackFeatureSet.Usage mlUsage = future.get().getUsage(); @@ -627,7 +586,7 @@ public void testNodeCount() throws Exception { Settings.Builder settings = Settings.builder().put(commonSettings); settings.put("xpack.ml.enabled", true); - var usageAction = newUsageAction(settings.build()); + var usageAction = newUsageAction(settings.build(), randomBoolean(), randomBoolean(), randomBoolean()); PlainActionFuture future = new PlainActionFuture<>(); usageAction.masterOperation(null, null, clusterState, future); XPackFeatureSet.Usage usage = future.get().getUsage(); @@ -653,7 +612,7 @@ public void testUsageGivenMlMetadataNotInstalled() throws Exception { settings.put("xpack.ml.enabled", true); when(clusterService.state()).thenReturn(ClusterState.EMPTY_STATE); - var usageAction = newUsageAction(settings.build()); + var usageAction = newUsageAction(settings.build(), true, true, true); PlainActionFuture future = new PlainActionFuture<>(); usageAction.masterOperation(null, null, ClusterState.EMPTY_STATE, future); XPackFeatureSet.Usage usage = future.get().getUsage(); @@ -869,4 +828,233 @@ private static GetDataFrameAnalyticsStatsAction.Response.Stats buildDataFrameAna private static ForecastStats buildForecastStats(long numberOfForecasts) { return new ForecastStatsTests().createForecastStats(numberOfForecasts, numberOfForecasts); } + + private Map setupComplexMocks() { + Job opened1 = buildJob( + "opened1", + Collections.singletonList(buildMinDetector("foo")), + Collections.singletonMap("created_by", randomFrom("a-cool-module", "a_cool_module", "a cool module")) + ); + GetJobsStatsAction.Response.JobStats opened1JobStats = buildJobStats("opened1", JobState.OPENED, 100L, 3L); + Job opened2 = buildJob("opened2", Arrays.asList(buildMinDetector("foo"), buildMinDetector("bar"))); + GetJobsStatsAction.Response.JobStats opened2JobStats = buildJobStats("opened2", JobState.OPENED, 200L, 8L); + Job closed1 = buildJob("closed1", Arrays.asList(buildMinDetector("foo"), buildMinDetector("bar"), buildMinDetector("foobar"))); + GetJobsStatsAction.Response.JobStats closed1JobStats = buildJobStats("closed1", JobState.CLOSED, 300L, 0); + givenJobs(Arrays.asList(opened1, opened2, closed1), Arrays.asList(opened1JobStats, opened2JobStats, closed1JobStats)); + + givenDatafeeds( + Arrays.asList( + buildDatafeedStats(DatafeedState.STARTED), + buildDatafeedStats(DatafeedState.STARTED), + buildDatafeedStats(DatafeedState.STOPPED) + ) + ); + + DataFrameAnalyticsConfig dfa1 = DataFrameAnalyticsConfigTests.createRandom("dfa_1"); + DataFrameAnalyticsConfig dfa2 = DataFrameAnalyticsConfigTests.createRandom("dfa_2"); + DataFrameAnalyticsConfig dfa3 = DataFrameAnalyticsConfigTests.createRandom("dfa_3"); + + List dataFrameAnalytics = Arrays.asList(dfa1, dfa2, dfa3); + givenDataFrameAnalytics( + dataFrameAnalytics, + Arrays.asList( + buildDataFrameAnalyticsStats(dfa1.getId(), DataFrameAnalyticsState.STOPPED, null), + buildDataFrameAnalyticsStats(dfa2.getId(), DataFrameAnalyticsState.STOPPED, 100L), + buildDataFrameAnalyticsStats(dfa3.getId(), DataFrameAnalyticsState.STARTED, 200L) + ) + ); + + Map expectedDfaCountByAnalysis = new HashMap<>(); + dataFrameAnalytics.forEach(dfa -> { + String analysisName = dfa.getAnalysis().getWriteableName(); + Integer analysisCount = expectedDfaCountByAnalysis.computeIfAbsent(analysisName, c -> 0); + expectedDfaCountByAnalysis.put(analysisName, ++analysisCount); + }); + + TrainedModelConfig trainedModel1 = TrainedModelConfigTests.createTestInstance("model_1") + .setModelSize(100) + .setEstimatedOperations(200) + .setMetadata(Collections.singletonMap("analytics_config", "anything")) + .setInferenceConfig(ClassificationConfig.EMPTY_PARAMS) + .build(); + TrainedModelConfig trainedModel2 = TrainedModelConfigTests.createTestInstance("model_2") + .setModelSize(200) + .setEstimatedOperations(400) + .setMetadata(Collections.singletonMap("analytics_config", "anything")) + .setInferenceConfig(RegressionConfig.EMPTY_PARAMS) + .build(); + TrainedModelConfig trainedModel3 = TrainedModelConfigTests.createTestInstance("model_3") + .setModelSize(300) + .setEstimatedOperations(600) + .setInferenceConfig(new NerConfig(null, null, null, null)) + .build(); + TrainedModelConfig trainedModel4 = TrainedModelConfigTests.createTestInstance("model_4") + .setTags(Collections.singletonList("prepackaged")) + .setModelSize(1000) + .setEstimatedOperations(2000) + .build(); + givenTrainedModels(Arrays.asList(trainedModel1, trainedModel2, trainedModel3, trainedModel4)); + + givenTrainedModelStats( + new GetTrainedModelsStatsAction.Response( + new QueryPage<>( + List.of( + new GetTrainedModelsStatsAction.Response.TrainedModelStats( + trainedModel1.getModelId(), + new TrainedModelSizeStats(trainedModel1.getModelSize(), 0L), + new IngestStats( + new IngestStats.Stats(0, 0, 0, 0), + List.of(), + Map.of( + "pipeline_1", + List.of( + new IngestStats.ProcessorStat( + InferenceProcessor.TYPE, + InferenceProcessor.TYPE, + new IngestStats.Stats(10, 1, 1000, 100) + ), + new IngestStats.ProcessorStat( + InferenceProcessor.TYPE, + InferenceProcessor.TYPE, + new IngestStats.Stats(20, 2, 2000, 200) + ), + // Adding a non inference processor that should be ignored + new IngestStats.ProcessorStat("grok", "grok", new IngestStats.Stats(100, 100, 100, 100)) + ) + ) + ), + 1, + null, + null + ), + new GetTrainedModelsStatsAction.Response.TrainedModelStats( + trainedModel2.getModelId(), + new TrainedModelSizeStats(trainedModel2.getModelSize(), 0L), + new IngestStats( + new IngestStats.Stats(0, 0, 0, 0), + List.of(), + Map.of( + "pipeline_1", + List.of( + new IngestStats.ProcessorStat( + InferenceProcessor.TYPE, + InferenceProcessor.TYPE, + new IngestStats.Stats(30, 3, 3000, 300) + ) + ) + ) + ), + 2, + null, + null + ), + new GetTrainedModelsStatsAction.Response.TrainedModelStats( + trainedModel3.getModelId(), + new TrainedModelSizeStats(trainedModel3.getModelSize(), 0L), + new IngestStats( + new IngestStats.Stats(0, 0, 0, 0), + List.of(), + Map.of( + "pipeline_2", + List.of( + new IngestStats.ProcessorStat( + InferenceProcessor.TYPE, + InferenceProcessor.TYPE, + new IngestStats.Stats(40, 4, 4000, 400) + ) + ) + ) + ), + 3, + null, + new AssignmentStats( + "deployment_3", + "model_3", + null, + null, + null, + null, + Instant.now(), + List.of(), + Priority.NORMAL + ).setState(AssignmentState.STOPPING) + ), + new GetTrainedModelsStatsAction.Response.TrainedModelStats( + trainedModel4.getModelId(), + new TrainedModelSizeStats(trainedModel4.getModelSize(), 0L), + new IngestStats( + new IngestStats.Stats(0, 0, 0, 0), + List.of(), + Map.of( + "pipeline_3", + List.of( + new IngestStats.ProcessorStat( + InferenceProcessor.TYPE, + InferenceProcessor.TYPE, + new IngestStats.Stats(50, 5, 5000, 500) + ) + ) + ) + ), + 4, + null, + new AssignmentStats( + "deployment_4", + "model_4", + 2, + 2, + 1000, + ByteSizeValue.ofBytes(1000), + Instant.now(), + List.of( + AssignmentStats.NodeStats.forStartedState( + DiscoveryNodeUtils.create("foo", new TransportAddress(TransportAddress.META_ADDRESS, 2)), + 5, + 42.0, + 42.0, + 0, + 1, + 3L, + 2, + 3, + Instant.now(), + Instant.now(), + randomIntBetween(1, 16), + randomIntBetween(1, 16), + 1L, + 2L, + 33.0, + 1L + ), + AssignmentStats.NodeStats.forStartedState( + DiscoveryNodeUtils.create("bar", new TransportAddress(TransportAddress.META_ADDRESS, 3)), + 4, + 50.0, + 50.0, + 0, + 1, + 1L, + 2, + 3, + Instant.now(), + Instant.now(), + randomIntBetween(1, 16), + randomIntBetween(1, 16), + 2L, + 4L, + 34.0, + 1L + ) + ), + Priority.NORMAL + ).setState(AssignmentState.STARTED).setAllocationStatus(new AllocationStatus(2, 2)) + ) + ), + 0, + GetTrainedModelsStatsAction.Response.RESULTS_FIELD + ) + ) + ); + return expectedDfaCountByAnalysis; + } } diff --git a/x-pack/plugin/profiler/build.gradle b/x-pack/plugin/profiling/build.gradle similarity index 85% rename from x-pack/plugin/profiler/build.gradle rename to x-pack/plugin/profiling/build.gradle index 842efca4d1308..30bcb5a8756dc 100644 --- a/x-pack/plugin/profiler/build.gradle +++ b/x-pack/plugin/profiling/build.gradle @@ -10,8 +10,8 @@ apply plugin: 'elasticsearch.internal-es-plugin' esplugin { name 'x-pack-profiling' - description 'The profiler plugin adds support for retrieving data from Universal Profiler.' - classname 'org.elasticsearch.xpack.profiler.ProfilingPlugin' + description 'The profiling plugin adds support for retrieving data from Universal Profiling.' + classname 'org.elasticsearch.xpack.profiling.ProfilingPlugin' extendedPlugins = ['x-pack-core'] } diff --git a/x-pack/plugin/profiler/src/internalClusterTest/java/org/elasticsearch/xpack/profiler/CancellationIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/CancellationIT.java similarity index 99% rename from x-pack/plugin/profiler/src/internalClusterTest/java/org/elasticsearch/xpack/profiler/CancellationIT.java rename to x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/CancellationIT.java index 1fb9e2ffc23ee..b69b5b511180b 100644 --- a/x-pack/plugin/profiler/src/internalClusterTest/java/org/elasticsearch/xpack/profiler/CancellationIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/CancellationIT.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; diff --git a/x-pack/plugin/profiler/src/internalClusterTest/java/org/elasticsearch/xpack/profiler/GetProfilingActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetProfilingActionIT.java similarity index 97% rename from x-pack/plugin/profiler/src/internalClusterTest/java/org/elasticsearch/xpack/profiler/GetProfilingActionIT.java rename to x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetProfilingActionIT.java index be49453a47c89..ef65fa569d390 100644 --- a/x-pack/plugin/profiler/src/internalClusterTest/java/org/elasticsearch/xpack/profiler/GetProfilingActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetProfilingActionIT.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import java.util.List; diff --git a/x-pack/plugin/profiler/src/internalClusterTest/java/org/elasticsearch/xpack/profiler/ProfilingTestCase.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java similarity index 99% rename from x-pack/plugin/profiler/src/internalClusterTest/java/org/elasticsearch/xpack/profiler/ProfilingTestCase.java rename to x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java index d2b5abdcf37dd..240f05f6b4335 100644 --- a/x-pack/plugin/profiler/src/internalClusterTest/java/org/elasticsearch/xpack/profiler/ProfilingTestCase.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/AbstractProfilingPersistenceManager.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/AbstractProfilingPersistenceManager.java similarity index 99% rename from x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/AbstractProfilingPersistenceManager.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/AbstractProfilingPersistenceManager.java index 8ea0087565236..65d0c8894bfb0 100644 --- a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/AbstractProfilingPersistenceManager.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/AbstractProfilingPersistenceManager.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/EventsIndex.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/EventsIndex.java similarity index 98% rename from x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/EventsIndex.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/EventsIndex.java index 4eb39dd9cac6a..d9e3dff616671 100644 --- a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/EventsIndex.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/EventsIndex.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import java.util.Collection; import java.util.Collections; diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/GetProfilingAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetProfilingAction.java similarity index 93% rename from x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/GetProfilingAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetProfilingAction.java index eaec2e4b7739a..a83313ffdbf80 100644 --- a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/GetProfilingAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetProfilingAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.elasticsearch.action.ActionType; diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/GetProfilingRequest.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetProfilingRequest.java similarity index 99% rename from x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/GetProfilingRequest.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetProfilingRequest.java index 13c3b2e38ea45..f29d67fedafe9 100644 --- a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/GetProfilingRequest.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetProfilingRequest.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/GetProfilingResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetProfilingResponse.java similarity index 99% rename from x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/GetProfilingResponse.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetProfilingResponse.java index 1962b31af2cf6..9c6ca0a1b6c22 100644 --- a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/GetProfilingResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetProfilingResponse.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.collect.Iterators; diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/GetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStatusAction.java similarity index 98% rename from x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/GetStatusAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStatusAction.java index c75b675a4d3ec..d95758b44c04a 100644 --- a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/GetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStatusAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/KvIndexResolver.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/KvIndexResolver.java similarity index 99% rename from x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/KvIndexResolver.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/KvIndexResolver.java index cf4034b6fbdbf..9f2a874e61ad9 100644 --- a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/KvIndexResolver.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/KvIndexResolver.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/Migration.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Migration.java similarity index 99% rename from x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/Migration.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Migration.java index 9483eb84babcb..b6ccc2cee91c9 100644 --- a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/Migration.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Migration.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/ProfilingDataStreamManager.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManager.java similarity index 99% rename from x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/ProfilingDataStreamManager.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManager.java index fb431ce93e6d3..37959df0638ba 100644 --- a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/ProfilingDataStreamManager.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManager.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/ProfilingIndexManager.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexManager.java similarity index 99% rename from x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/ProfilingIndexManager.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexManager.java index 6c64983687710..fe5188ce7d020 100644 --- a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/ProfilingIndexManager.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexManager.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/ProfilingIndexTemplateRegistry.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java similarity index 85% rename from x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/ProfilingIndexTemplateRegistry.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java index f2b9d0cedd974..332f9301e4c28 100644 --- a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/ProfilingIndexTemplateRegistry.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -89,7 +89,7 @@ protected boolean requiresMasterNode() { private static final List LIFECYCLE_POLICIES = List.of( new LifecyclePolicyConfig( "profiling-60-days", - "/org/elasticsearch/xpack/profiler/ilm-policy/profiling-60-days.json", + "/profiling/ilm-policy/profiling-60-days.json", Map.of(PROFILING_TEMPLATE_VERSION_VARIABLE, String.valueOf(INDEX_TEMPLATE_VERSION)) ).load(LifecyclePolicyConfig.DEFAULT_X_CONTENT_REGISTRY) ); @@ -106,61 +106,61 @@ protected List getPolicyConfigs() { for (IndexTemplateConfig config : List.of( new IndexTemplateConfig( "profiling-events", - "/org/elasticsearch/xpack/profiler/component-template/profiling-events.json", + "/profiling/component-template/profiling-events.json", INDEX_TEMPLATE_VERSION, PROFILING_TEMPLATE_VERSION_VARIABLE, indexVersion("events", PROFILING_EVENTS_VERSION) ), new IndexTemplateConfig( "profiling-executables", - "/org/elasticsearch/xpack/profiler/component-template/profiling-executables.json", + "/profiling/component-template/profiling-executables.json", INDEX_TEMPLATE_VERSION, PROFILING_TEMPLATE_VERSION_VARIABLE, indexVersion("executables", PROFILING_EXECUTABLES_VERSION) ), new IndexTemplateConfig( "profiling-ilm", - "/org/elasticsearch/xpack/profiler/component-template/profiling-ilm.json", + "/profiling/component-template/profiling-ilm.json", INDEX_TEMPLATE_VERSION, PROFILING_TEMPLATE_VERSION_VARIABLE ), new IndexTemplateConfig( "profiling-hot-tier", - "/org/elasticsearch/xpack/profiler/component-template/profiling-hot-tier.json", + "/profiling/component-template/profiling-hot-tier.json", INDEX_TEMPLATE_VERSION, PROFILING_TEMPLATE_VERSION_VARIABLE ), new IndexTemplateConfig( "profiling-metrics", - "/org/elasticsearch/xpack/profiler/component-template/profiling-metrics.json", + "/profiling/component-template/profiling-metrics.json", INDEX_TEMPLATE_VERSION, PROFILING_TEMPLATE_VERSION_VARIABLE, indexVersion("metrics", PROFILING_METRICS_VERSION) ), new IndexTemplateConfig( "profiling-hosts", - "/org/elasticsearch/xpack/profiler/component-template/profiling-hosts.json", + "/profiling/component-template/profiling-hosts.json", INDEX_TEMPLATE_VERSION, PROFILING_TEMPLATE_VERSION_VARIABLE, indexVersion("hosts", PROFILING_HOSTS_VERSION) ), new IndexTemplateConfig( "profiling-stackframes", - "/org/elasticsearch/xpack/profiler/component-template/profiling-stackframes.json", + "/profiling/component-template/profiling-stackframes.json", INDEX_TEMPLATE_VERSION, PROFILING_TEMPLATE_VERSION_VARIABLE, indexVersion("stackframes", PROFILING_STACKFRAMES_VERSION) ), new IndexTemplateConfig( "profiling-stacktraces", - "/org/elasticsearch/xpack/profiler/component-template/profiling-stacktraces.json", + "/profiling/component-template/profiling-stacktraces.json", INDEX_TEMPLATE_VERSION, PROFILING_TEMPLATE_VERSION_VARIABLE, indexVersion("stacktraces", PROFILING_STACKTRACES_VERSION) ), new IndexTemplateConfig( "profiling-symbols", - "/org/elasticsearch/xpack/profiler/component-template/profiling-symbols.json", + "/profiling/component-template/profiling-symbols.json", INDEX_TEMPLATE_VERSION, PROFILING_TEMPLATE_VERSION_VARIABLE, indexVersion("symbols", PROFILING_SYMBOLS_VERSION) @@ -190,71 +190,71 @@ protected Map getComponentTemplateConfigs() { private static final Map COMPOSABLE_INDEX_TEMPLATE_CONFIGS = parseComposableTemplates( new IndexTemplateConfig( "profiling-events", - "/org/elasticsearch/xpack/profiler/index-template/profiling-events.json", + "/profiling/index-template/profiling-events.json", INDEX_TEMPLATE_VERSION, PROFILING_TEMPLATE_VERSION_VARIABLE ), new IndexTemplateConfig( "profiling-metrics", - "/org/elasticsearch/xpack/profiler/index-template/profiling-metrics.json", + "/profiling/index-template/profiling-metrics.json", INDEX_TEMPLATE_VERSION, PROFILING_TEMPLATE_VERSION_VARIABLE ), new IndexTemplateConfig( "profiling-hosts", - "/org/elasticsearch/xpack/profiler/index-template/profiling-hosts.json", + "/profiling/index-template/profiling-hosts.json", INDEX_TEMPLATE_VERSION, PROFILING_TEMPLATE_VERSION_VARIABLE ), new IndexTemplateConfig( "profiling-executables", - "/org/elasticsearch/xpack/profiler/index-template/profiling-executables.json", + "/profiling/index-template/profiling-executables.json", INDEX_TEMPLATE_VERSION, PROFILING_TEMPLATE_VERSION_VARIABLE ), new IndexTemplateConfig( "profiling-stackframes", - "/org/elasticsearch/xpack/profiler/index-template/profiling-stackframes.json", + "/profiling/index-template/profiling-stackframes.json", INDEX_TEMPLATE_VERSION, PROFILING_TEMPLATE_VERSION_VARIABLE ), new IndexTemplateConfig( "profiling-stacktraces", - "/org/elasticsearch/xpack/profiler/index-template/profiling-stacktraces.json", + "/profiling/index-template/profiling-stacktraces.json", INDEX_TEMPLATE_VERSION, PROFILING_TEMPLATE_VERSION_VARIABLE ), // templates for regular indices new IndexTemplateConfig( "profiling-returnpads-private", - "/org/elasticsearch/xpack/profiler/index-template/profiling-returnpads-private.json", + "/profiling/index-template/profiling-returnpads-private.json", INDEX_TEMPLATE_VERSION, PROFILING_TEMPLATE_VERSION_VARIABLE, indexVersion("returnpads.private", PROFILING_RETURNPADS_PRIVATE_VERSION) ), new IndexTemplateConfig( "profiling-sq-executables", - "/org/elasticsearch/xpack/profiler/index-template/profiling-sq-executables.json", + "/profiling/index-template/profiling-sq-executables.json", INDEX_TEMPLATE_VERSION, PROFILING_TEMPLATE_VERSION_VARIABLE, indexVersion("sq.executables", PROFILING_SQ_EXECUTABLES_VERSION) ), new IndexTemplateConfig( "profiling-sq-leafframes", - "/org/elasticsearch/xpack/profiler/index-template/profiling-sq-leafframes.json", + "/profiling/index-template/profiling-sq-leafframes.json", INDEX_TEMPLATE_VERSION, PROFILING_TEMPLATE_VERSION_VARIABLE, indexVersion("sq.leafframes", PROFILING_SQ_LEAFFRAMES_VERSION) ), new IndexTemplateConfig( "profiling-symbols-global", - "/org/elasticsearch/xpack/profiler/index-template/profiling-symbols-global.json", + "/profiling/index-template/profiling-symbols-global.json", INDEX_TEMPLATE_VERSION, PROFILING_TEMPLATE_VERSION_VARIABLE ), new IndexTemplateConfig( "profiling-symbols-private", - "/org/elasticsearch/xpack/profiler/index-template/profiling-symbols-private.json", + "/profiling/index-template/profiling-symbols-private.json", INDEX_TEMPLATE_VERSION, PROFILING_TEMPLATE_VERSION_VARIABLE ) diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/ProfilingPlugin.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java similarity index 99% rename from x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/ProfilingPlugin.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java index 37f0a91126a08..6dad964717d47 100644 --- a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/ProfilingPlugin.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/RestGetProfilingAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetProfilingAction.java similarity index 97% rename from x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/RestGetProfilingAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetProfilingAction.java index a2eeb6dcfb909..26e4937aa26ea 100644 --- a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/RestGetProfilingAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetProfilingAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/RestGetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetStatusAction.java similarity index 96% rename from x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/RestGetStatusAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetStatusAction.java index e9f41a0564076..c62d6dcad8c1a 100644 --- a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/RestGetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/RestGetStatusAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/StackFrame.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackFrame.java similarity index 98% rename from x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/StackFrame.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackFrame.java index 7bad037e6c819..5dc2b212ed55b 100644 --- a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/StackFrame.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackFrame.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/StackTrace.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java similarity index 99% rename from x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/StackTrace.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java index ec5309ff3c41e..d2f72dce2db55 100644 --- a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/StackTrace.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.elasticsearch.xcontent.ObjectPath; import org.elasticsearch.xcontent.ToXContentObject; diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/TransportGetProfilingAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetProfilingAction.java similarity index 99% rename from x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/TransportGetProfilingAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetProfilingAction.java index c1ef09d9499d9..a9743992dd89b 100644 --- a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/TransportGetProfilingAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetProfilingAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/TransportGetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStatusAction.java similarity index 98% rename from x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/TransportGetStatusAction.java rename to x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStatusAction.java index 19bb80b3fc233..e82358020b75f 100644 --- a/x-pack/plugin/profiler/src/main/java/org/elasticsearch/xpack/profiler/TransportGetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStatusAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; diff --git a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/EventsIndexTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/EventsIndexTests.java similarity index 98% rename from x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/EventsIndexTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/EventsIndexTests.java index b653fecfe37ff..4f943cbb62a7e 100644 --- a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/EventsIndexTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/EventsIndexTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/GetProfilingRequestTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetProfilingRequestTests.java similarity index 98% rename from x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/GetProfilingRequestTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetProfilingRequestTests.java index d4394b2b1926b..c9d9a0f95e58e 100644 --- a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/GetProfilingRequestTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetProfilingRequestTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.BytesStreamOutput; diff --git a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/GetProfilingResponseTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetProfilingResponseTests.java similarity index 98% rename from x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/GetProfilingResponseTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetProfilingResponseTests.java index 7e3b643a50ed7..b836ac660a684 100644 --- a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/GetProfilingResponseTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetProfilingResponseTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractChunkedSerializingTestCase; diff --git a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/KvIndexResolverTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/KvIndexResolverTests.java similarity index 99% rename from x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/KvIndexResolverTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/KvIndexResolverTests.java index 1add6207f0961..a1c5ee7697aec 100644 --- a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/KvIndexResolverTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/KvIndexResolverTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.elasticsearch.Version; import org.elasticsearch.action.support.IndicesOptions; diff --git a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/ProfilingDataStreamManagerTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManagerTests.java similarity index 99% rename from x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/ProfilingDataStreamManagerTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManagerTests.java index e0f4fd5eaf6f9..06a501c94f65c 100644 --- a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/ProfilingDataStreamManagerTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManagerTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; diff --git a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/ProfilingIndexManagerTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java similarity index 99% rename from x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/ProfilingIndexManagerTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java index b41a33f2c8705..1b2456381cebd 100644 --- a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/ProfilingIndexManagerTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; diff --git a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/ProfilingIndexTemplateRegistryTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistryTests.java similarity index 99% rename from x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/ProfilingIndexTemplateRegistryTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistryTests.java index 64601d1055cec..039eb92a73e72 100644 --- a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/ProfilingIndexTemplateRegistryTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistryTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; diff --git a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/RestGetProfilingActionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/RestGetProfilingActionTests.java similarity index 99% rename from x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/RestGetProfilingActionTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/RestGetProfilingActionTests.java index 1682cb3c82da6..5f25d182c3f11 100644 --- a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/RestGetProfilingActionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/RestGetProfilingActionTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.apache.lucene.util.SetOnce; import org.elasticsearch.common.bytes.BytesArray; diff --git a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/StackFrameTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackFrameTests.java similarity index 98% rename from x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/StackFrameTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackFrameTests.java index 81bc8dd8f3629..b9d5c549c2fbc 100644 --- a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/StackFrameTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackFrameTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/StackTraceTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackTraceTests.java similarity index 99% rename from x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/StackTraceTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackTraceTests.java index 9e73475707642..768ff4ae5f647 100644 --- a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/StackTraceTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackTraceTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/TransportGetProfilingActionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetProfilingActionTests.java similarity index 98% rename from x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/TransportGetProfilingActionTests.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetProfilingActionTests.java index a7003f7839413..69d7f460caf4d 100644 --- a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/TransportGetProfilingActionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetProfilingActionTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/VerifyingClient.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/VerifyingClient.java similarity index 97% rename from x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/VerifyingClient.java rename to x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/VerifyingClient.java index 10282adf8a56f..c37404c9209df 100644 --- a/x-pack/plugin/profiler/src/test/java/org/elasticsearch/xpack/profiler/VerifyingClient.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/VerifyingClient.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.profiler; +package org.elasticsearch.xpack.profiling; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/analyzer/AnalyzerRules.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/analyzer/AnalyzerRules.java index 62cf5e133a88c..0c5bc8b0a97fe 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/analyzer/AnalyzerRules.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/analyzer/AnalyzerRules.java @@ -145,7 +145,8 @@ public static List maybeResolveAgainstList( UnresolvedAttribute u, Collection attrList, boolean allowCompound, - boolean acceptPattern + boolean acceptPattern, + BiFunction specialFieldHandler ) { List matches = new ArrayList<>(); @@ -177,7 +178,7 @@ public static List maybeResolveAgainstList( // found exact match or multiple if pattern if (matches.size() == 1 || isPattern) { // only add the location if the match is univocal; b/c otherwise adding the location will overwrite any preexisting one - matches.replaceAll(e -> handleSpecialFields(u, e.withLocation(u.source()), allowCompound)); + matches.replaceAll(e -> specialFieldHandler.apply(u, e.withLocation(u.source()))); return matches; } @@ -203,7 +204,7 @@ public static List maybeResolveAgainstList( ); } - private static Attribute handleSpecialFields(UnresolvedAttribute u, Attribute named, boolean allowCompound) { + public static Attribute handleSpecialFields(UnresolvedAttribute u, Attribute named, boolean allowCompound) { // if it's a object/compound type, keep it unresolved with a nice error message if (named instanceof FieldAttribute fa) { diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/operator/arithmetic/Div.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/operator/arithmetic/Div.java index 896c6509a67f1..49337630aeaa3 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/operator/arithmetic/Div.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/operator/arithmetic/Div.java @@ -17,23 +17,33 @@ */ public class Div extends ArithmeticOperation implements BinaryComparisonInversible { + private DataType dataType; + public Div(Source source, Expression left, Expression right) { + this(source, left, right, null); + } + + public Div(Source source, Expression left, Expression right, DataType dataType) { super(source, left, right, DefaultBinaryArithmeticOperation.DIV); + this.dataType = dataType; } @Override protected NodeInfo
info() { - return NodeInfo.create(this, Div::new, left(), right()); + return NodeInfo.create(this, Div::new, left(), right(), dataType); } @Override protected Div replaceChildren(Expression newLeft, Expression newRight) { - return new Div(source(), newLeft, newRight); + return new Div(source(), newLeft, newRight, dataType); } @Override public DataType dataType() { - return DataTypeConverter.commonType(left().dataType(), right().dataType()); + if (dataType == null) { + dataType = DataTypeConverter.commonType(left().dataType(), right().dataType()); + } + return dataType; } @Override diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plan/logical/EsRelation.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plan/logical/EsRelation.java index b25593a57e00d..b9fa092868592 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plan/logical/EsRelation.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plan/logical/EsRelation.java @@ -27,9 +27,17 @@ public class EsRelation extends LeafPlan { private final boolean frozen; public EsRelation(Source source, EsIndex index, boolean frozen) { + this(source, index, flatten(source, index.mapping()), frozen); + } + + public EsRelation(Source source, EsIndex index, List attributes) { + this(source, index, attributes, false); + } + + private EsRelation(Source source, EsIndex index, List attributes, boolean frozen) { super(source); this.index = index; - this.attrs = flatten(source, index.mapping()); + this.attrs = attributes; this.frozen = frozen; } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plan/logical/Project.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plan/logical/Project.java index dc63705b05685..fedf468009779 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plan/logical/Project.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plan/logical/Project.java @@ -43,6 +43,10 @@ public List projections() { return projections; } + public Project withProjections(List projections) { + return new Project(source(), child(), projections); + } + @Override public boolean resolved() { return super.resolved() && Expressions.anyMatch(projections, Functions::isAggregate) == false; diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/EsField.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/EsField.java index eaf8a5c894db4..163667749de2d 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/EsField.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/EsField.java @@ -48,13 +48,6 @@ public DataType getDataType() { return esDataType; } - /** - * Create a new {@link EsField} replacing the type. - */ - public EsField withType(DataType esDataType) { - return new EsField(name, esDataType, properties, aggregatable, isAlias); - } - /** * This field can be aggregated */ diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/FrozenIndexInput.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/FrozenIndexInput.java index c297c77ada1ee..0154882e598a8 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/FrozenIndexInput.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/FrozenIndexInput.java @@ -91,7 +91,7 @@ private FrozenIndexInput( headerBlobCacheByteRange, footerBlobCacheByteRange ); - this.cacheFile = cacheFile; + this.cacheFile = cacheFile.copy(); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java index 0e86469ad0eea..3112544905396 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java @@ -8,6 +8,7 @@ import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.core.Tuple; +import org.elasticsearch.xpack.ql.analyzer.AnalyzerRules; import org.elasticsearch.xpack.ql.analyzer.AnalyzerRules.AddMissingEqualsToBoolField; import org.elasticsearch.xpack.ql.analyzer.AnalyzerRules.ParameterizedAnalyzerRule; import org.elasticsearch.xpack.ql.capabilities.Resolvables; @@ -166,7 +167,13 @@ private static Attribute resolveAgainstList(UnresolvedAttribute u, Collection attrList, boolean allowCompound) { - var matches = maybeResolveAgainstList(u, attrList, allowCompound, false); + var matches = maybeResolveAgainstList( + u, + attrList, + allowCompound, + false, + (ua, na) -> AnalyzerRules.handleSpecialFields(ua, na, allowCompound) + ); return matches.isEmpty() ? null : matches.get(0); } diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java index 044828f9ab01b..77c7272c3b60d 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java @@ -1846,6 +1846,96 @@ public void testPivotWithTopMetrics() throws Exception { assertEquals("business_3", actual); } + public void testPivotWithBoxplot() throws Exception { + String transformId = "boxplot_transform"; + String transformIndex = "boxplot_pivot_reviews"; + setupDataAccessRole(DATA_ACCESS_ROLE, REVIEWS_INDEX_NAME, transformIndex); + + final Request createTransformRequest = createRequestWithAuth( + "PUT", + getTransformEndpoint() + transformId, + BASIC_AUTH_VALUE_TRANSFORM_ADMIN_WITH_SOME_DATA_ACCESS + ); + + String config = Strings.format(""" + { + "source": { + "index": "%s" + }, + "dest": { + "index": "%s" + }, + "pivot": { + "group_by": { + "reviewer": { + "terms": { + "field": "user_id" + } + } + }, + "aggregations": { + "stars_boxplot": { + "boxplot": { + "field": "stars" + } + } + } + } + }""", REVIEWS_INDEX_NAME, transformIndex); + + createTransformRequest.setJsonEntity(config); + Map createTransformResponse = entityAsMap(client().performRequest(createTransformRequest)); + assertThat(createTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + + startAndWaitForTransform(transformId, transformIndex, BASIC_AUTH_VALUE_TRANSFORM_ADMIN_WITH_SOME_DATA_ACCESS); + assertTrue(indexExists(transformIndex)); + + Map searchResult = getAsMap(transformIndex + "/_search?q=reviewer:user_4"); + assertEquals(1, XContentMapValues.extractValue("hits.total.value", searchResult)); + assertThat( + ((List) XContentMapValues.extractValue("hits.hits._source.stars_boxplot.min", searchResult)).get(0), + is(equalTo(1.0)) + ); + assertThat( + ((List) XContentMapValues.extractValue("hits.hits._source.stars_boxplot.max", searchResult)).get(0), + is(equalTo(5.0)) + ); + assertThat(((List) XContentMapValues.extractValue("hits.hits._source.stars_boxplot.q1", searchResult)).get(0), is(equalTo(3.0))); + assertThat(((List) XContentMapValues.extractValue("hits.hits._source.stars_boxplot.q2", searchResult)).get(0), is(equalTo(5.0))); + assertThat(((List) XContentMapValues.extractValue("hits.hits._source.stars_boxplot.q3", searchResult)).get(0), is(equalTo(5.0))); + assertThat( + ((List) XContentMapValues.extractValue("hits.hits._source.stars_boxplot.lower", searchResult)).get(0), + is(equalTo(1.0)) + ); + assertThat( + ((List) XContentMapValues.extractValue("hits.hits._source.stars_boxplot.upper", searchResult)).get(0), + is(equalTo(5.0)) + ); + + searchResult = getAsMap(transformIndex + "/_search?q=reviewer:user_1"); + assertEquals(1, XContentMapValues.extractValue("hits.total.value", searchResult)); + assertThat( + ((List) XContentMapValues.extractValue("hits.hits._source.stars_boxplot.min", searchResult)).get(0), + is(equalTo(1.0)) + ); + assertThat( + ((List) XContentMapValues.extractValue("hits.hits._source.stars_boxplot.max", searchResult)).get(0), + is(equalTo(5.0)) + ); + assertThat(((List) XContentMapValues.extractValue("hits.hits._source.stars_boxplot.q1", searchResult)).get(0), is(equalTo(3.0))); + assertThat(((List) XContentMapValues.extractValue("hits.hits._source.stars_boxplot.q2", searchResult)).get(0), is(equalTo(5.0))); + assertThat(((List) XContentMapValues.extractValue("hits.hits._source.stars_boxplot.q3", searchResult)).get(0), is(equalTo(5.0))); + assertThat( + ((List) XContentMapValues.extractValue("hits.hits._source.stars_boxplot.lower", searchResult)).get(0), + is(equalTo(1.0)) + ); + assertThat( + ((List) XContentMapValues.extractValue("hits.hits._source.stars_boxplot.upper", searchResult)).get(0), + is(equalTo(5.0)) + ); + + } + public void testPivotWithAggregateMetricDouble() throws Exception { String transformId = "aggregate_metric_double_transform"; String transformIndex = "aggregate_metric_double_pivot_reviews"; diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/TransformAggregations.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/TransformAggregations.java index 0f7574a58bb5e..95e05d93ff03a 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/TransformAggregations.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/TransformAggregations.java @@ -56,7 +56,6 @@ public final class TransformAggregations { private static final List UNSUPPORTED_AGGS = Arrays.asList( "adjacency_matrix", "auto_date_histogram", - "boxplot", // https://github.com/elastic/elasticsearch/issues/52189 "composite", // DONT because it makes no sense "date_histogram", "date_range", @@ -120,7 +119,8 @@ enum AggregationType { RARE_TERMS("rare_terms", FLATTENED), MISSING("missing", LONG), TOP_METRICS("top_metrics", SOURCE), - STATS("stats", DOUBLE); + STATS("stats", DOUBLE), + BOXPLOT("boxplot", DOUBLE); private final String aggregationType; private final String targetMapping; diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/PivotTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/PivotTests.java index 9d127d89575b4..fbb3b2fb9a866 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/PivotTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/PivotTests.java @@ -85,7 +85,7 @@ public class PivotTests extends ESTestCase { private Client client; // exclude aggregations from the analytics module as we don't have parser for it here - private final Set externalAggregations = Collections.singleton("top_metrics"); + private final Set externalAggregations = Set.of("top_metrics", "boxplot"); private final Set supportedAggregations = Stream.of(AggregationType.values()) .map(AggregationType::getName) diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/TransformAggregationsTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/TransformAggregationsTests.java index 1666fde56df11..4564ec5cc67ea 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/TransformAggregationsTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/TransformAggregationsTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.search.aggregations.metrics.StatsAggregationBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.analytics.AnalyticsPlugin; +import org.elasticsearch.xpack.analytics.boxplot.BoxplotAggregationBuilder; import java.util.Arrays; import java.util.List; @@ -136,6 +137,9 @@ public void testResolveTargetMapping() { assertEquals("double", TransformAggregations.resolveTargetMapping("stats", null)); assertEquals("double", TransformAggregations.resolveTargetMapping("stats", "int")); + // boxplot + assertEquals("double", TransformAggregations.resolveTargetMapping("boxplot", "double")); + // corner case: source type null assertEquals(null, TransformAggregations.resolveTargetMapping("min", null)); } @@ -365,6 +369,23 @@ public void testGetAggregationOutputTypesSubAggregations() { assertEquals("percentiles", outputTypes.get("filter_1.filter_2.percentiles.99_5")); } + public void testGetAggregationOutputTypesBoxplot() { + AggregationBuilder boxplotAggregationBuilder = new BoxplotAggregationBuilder("boxplot"); + + Tuple, Map> inputAndOutputTypes = TransformAggregations.getAggregationInputAndOutputTypes( + boxplotAggregationBuilder + ); + Map outputTypes = inputAndOutputTypes.v2(); + assertEquals(7, outputTypes.size()); + assertEquals("boxplot", outputTypes.get("boxplot.min")); + assertEquals("boxplot", outputTypes.get("boxplot.max")); + assertEquals("boxplot", outputTypes.get("boxplot.q1")); + assertEquals("boxplot", outputTypes.get("boxplot.q2")); + assertEquals("boxplot", outputTypes.get("boxplot.q3")); + assertEquals("boxplot", outputTypes.get("boxplot.lower")); + assertEquals("boxplot", outputTypes.get("boxplot.upper")); + } + public void testGenerateKeyForRange() { assertThat(TransformAggregations.generateKeyForRange(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY), is(equalTo("*-*"))); assertThat(TransformAggregations.generateKeyForRange(Double.NEGATIVE_INFINITY, 0.0), is(equalTo("*-0")));