Skip to content

Commit

Permalink
Merge branch 'main' into kderusso/query-rules-type-stats
Browse files Browse the repository at this point in the history
  • Loading branch information
markjhoy authored Nov 13, 2024
2 parents 14e925b + b236011 commit 196192c
Show file tree
Hide file tree
Showing 751 changed files with 13,985 additions and 6,155 deletions.
2 changes: 1 addition & 1 deletion .buildkite/pipelines/intake.yml
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ steps:
timeout_in_minutes: 300
matrix:
setup:
BWC_VERSION: ["8.15.4", "8.16.0", "8.17.0", "9.0.0"]
BWC_VERSION: ["8.16.1", "8.17.0", "9.0.0"]
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
Expand Down
6 changes: 3 additions & 3 deletions .buildkite/pipelines/periodic-packaging.yml
Original file line number Diff line number Diff line change
Expand Up @@ -288,8 +288,8 @@ steps:
env:
BWC_VERSION: 8.15.4

- label: "{{matrix.image}} / 8.16.0 / packaging-tests-upgrade"
command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.16.0
- label: "{{matrix.image}} / 8.16.1 / packaging-tests-upgrade"
command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.16.1
timeout_in_minutes: 300
matrix:
setup:
Expand All @@ -302,7 +302,7 @@ steps:
machineType: custom-16-32768
buildDirectory: /dev/shm/bk
env:
BWC_VERSION: 8.16.0
BWC_VERSION: 8.16.1

- label: "{{matrix.image}} / 8.17.0 / packaging-tests-upgrade"
command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.17.0
Expand Down
10 changes: 5 additions & 5 deletions .buildkite/pipelines/periodic.yml
Original file line number Diff line number Diff line change
Expand Up @@ -306,8 +306,8 @@ steps:
- signal_reason: agent_stop
limit: 3

- label: 8.16.0 / bwc
command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.16.0#bwcTest
- label: 8.16.1 / bwc
command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.16.1#bwcTest
timeout_in_minutes: 300
agents:
provider: gcp
Expand All @@ -316,7 +316,7 @@ steps:
buildDirectory: /dev/shm/bk
preemptible: true
env:
BWC_VERSION: 8.16.0
BWC_VERSION: 8.16.1
retry:
automatic:
- exit_status: "-1"
Expand Down Expand Up @@ -429,7 +429,7 @@ steps:
setup:
ES_RUNTIME_JAVA:
- openjdk21
BWC_VERSION: ["8.15.4", "8.16.0", "8.17.0", "9.0.0"]
BWC_VERSION: ["8.16.1", "8.17.0", "9.0.0"]
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
Expand Down Expand Up @@ -471,7 +471,7 @@ steps:
ES_RUNTIME_JAVA:
- openjdk21
- openjdk23
BWC_VERSION: ["8.15.4", "8.16.0", "8.17.0", "9.0.0"]
BWC_VERSION: ["8.16.1", "8.17.0", "9.0.0"]
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
Expand Down
2 changes: 1 addition & 1 deletion .ci/bwcVersions
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,6 @@ BWC_VERSION:
- "8.13.4"
- "8.14.3"
- "8.15.4"
- "8.16.0"
- "8.16.1"
- "8.17.0"
- "9.0.0"
1 change: 1 addition & 0 deletions .ci/dockerOnLinuxExclusions
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ sles-15.2
sles-15.3
sles-15.4
sles-15.5
sles-15.6

# These OSes are deprecated and filtered starting with 8.0.0, but need to be excluded
# for PR checks
Expand Down
3 changes: 1 addition & 2 deletions .ci/snapshotBwcVersions
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
BWC_VERSION:
- "8.15.4"
- "8.16.0"
- "8.16.1"
- "8.17.0"
- "9.0.0"
59 changes: 27 additions & 32 deletions README.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ Elasticsearch is a distributed search and analytics engine, scalable data store

Use cases enabled by Elasticsearch include:

* https://www.elastic.co/search-labs/blog/articles/retrieval-augmented-generation-rag[Retrieval Augmented Generation (RAG)]
* https://www.elastic.co/search-labs/blog/articles/retrieval-augmented-generation-rag[Retrieval Augmented Generation (RAG)]
* https://www.elastic.co/search-labs/blog/categories/vector-search[Vector search]
* Full-text search
* Logs
Expand All @@ -17,7 +17,7 @@ Use cases enabled by Elasticsearch include:
To learn more about Elasticsearch's features and capabilities, see our
https://www.elastic.co/products/elasticsearch[product page].

To access information on https://www.elastic.co/search-labs/blog/categories/ml-research[machine learning innovations] and the latest https://www.elastic.co/search-labs/blog/categories/lucene[Lucene contributions from Elastic], more information can be found in https://www.elastic.co/search-labs[Search Labs].
To access information on https://www.elastic.co/search-labs/blog/categories/ml-research[machine learning innovations] and the latest https://www.elastic.co/search-labs/blog/categories/lucene[Lucene contributions from Elastic], more information can be found in https://www.elastic.co/search-labs[Search Labs].

[[get-started]]
== Get started
Expand All @@ -27,20 +27,20 @@ https://www.elastic.co/cloud/as-a-service[Elasticsearch Service on Elastic
Cloud].

If you prefer to install and manage Elasticsearch yourself, you can download
the latest version from
the latest version from
https://www.elastic.co/downloads/elasticsearch[elastic.co/downloads/elasticsearch].

=== Run Elasticsearch locally

////
////
IMPORTANT: This content is replicated in the Elasticsearch repo. See `run-elasticsearch-locally.asciidoc`.
Ensure both files are in sync.
https://github.com/elastic/start-local is the source of truth.
////
////

[WARNING]
====
====
DO NOT USE THESE INSTRUCTIONS FOR PRODUCTION DEPLOYMENTS.
This setup is intended for local development and testing only.
Expand Down Expand Up @@ -93,20 +93,20 @@ Use this key to connect to Elasticsearch with a https://www.elastic.co/guide/en/
From the `elastic-start-local` folder, check the connection to Elasticsearch using `curl`:

[source,sh]
----
----
source .env
curl $ES_LOCAL_URL -H "Authorization: ApiKey ${ES_LOCAL_API_KEY}"
----
// NOTCONSOLE

=== Send requests to Elasticsearch

You send data and other requests to Elasticsearch through REST APIs.
You can interact with Elasticsearch using any client that sends HTTP requests,
You send data and other requests to Elasticsearch through REST APIs.
You can interact with Elasticsearch using any client that sends HTTP requests,
such as the https://www.elastic.co/guide/en/elasticsearch/client/index.html[Elasticsearch
language clients] and https://curl.se[curl].
language clients] and https://curl.se[curl].

==== Using curl
==== Using curl

Here's an example curl command to create a new Elasticsearch index, using basic auth:

Expand Down Expand Up @@ -149,19 +149,19 @@ print(client.info())

==== Using the Dev Tools Console

Kibana's developer console provides an easy way to experiment and test requests.
Kibana's developer console provides an easy way to experiment and test requests.
To access the console, open Kibana, then go to **Management** > **Dev Tools**.

**Add data**

You index data into Elasticsearch by sending JSON objects (documents) through the REST APIs.
Whether you have structured or unstructured text, numerical data, or geospatial data,
Elasticsearch efficiently stores and indexes it in a way that supports fast searches.
You index data into Elasticsearch by sending JSON objects (documents) through the REST APIs.
Whether you have structured or unstructured text, numerical data, or geospatial data,
Elasticsearch efficiently stores and indexes it in a way that supports fast searches.

For timestamped data such as logs and metrics, you typically add documents to a
data stream made up of multiple auto-generated backing indices.

To add a single document to an index, submit an HTTP post request that targets the index.
To add a single document to an index, submit an HTTP post request that targets the index.

----
POST /customer/_doc/1
Expand All @@ -171,19 +171,19 @@ POST /customer/_doc/1
}
----

This request automatically creates the `customer` index if it doesn't exist,
adds a new document that has an ID of 1, and
This request automatically creates the `customer` index if it doesn't exist,
adds a new document that has an ID of 1, and
stores and indexes the `firstname` and `lastname` fields.

The new document is available immediately from any node in the cluster.
The new document is available immediately from any node in the cluster.
You can retrieve it with a GET request that specifies its document ID:

----
GET /customer/_doc/1
----

To add multiple documents in one request, use the `_bulk` API.
Bulk data must be newline-delimited JSON (NDJSON).
Bulk data must be newline-delimited JSON (NDJSON).
Each line must end in a newline character (`\n`), including the last line.

----
Expand All @@ -200,15 +200,15 @@ PUT customer/_bulk

**Search**

Indexed documents are available for search in near real-time.
The following search matches all customers with a first name of _Jennifer_
Indexed documents are available for search in near real-time.
The following search matches all customers with a first name of _Jennifer_
in the `customer` index.

----
GET customer/_search
{
"query" : {
"match" : { "firstname": "Jennifer" }
"match" : { "firstname": "Jennifer" }
}
}
----
Expand All @@ -223,9 +223,9 @@ data streams, or index aliases.

. Go to **Management > Stack Management > Kibana > Data Views**.
. Select **Create data view**.
. Enter a name for the data view and a pattern that matches one or more indices,
such as _customer_.
. Select **Save data view to Kibana**.
. Enter a name for the data view and a pattern that matches one or more indices,
such as _customer_.
. Select **Save data view to Kibana**.

To start exploring, go to **Analytics > Discover**.

Expand Down Expand Up @@ -254,11 +254,6 @@ To build a distribution for another platform, run the related command:
./gradlew :distribution:archives:windows-zip:assemble
----

To build distributions for all supported platforms, run:
----
./gradlew assemble
----

Distributions are output to `distribution/archives`.

To run the test suite, see xref:TESTING.asciidoc[TESTING].
Expand All @@ -281,7 +276,7 @@ The https://github.com/elastic/elasticsearch-labs[`elasticsearch-labs`] repo con
[[contribute]]
== Contribute

For contribution guidelines, see xref:CONTRIBUTING.md[CONTRIBUTING].
For contribution guidelines, see xref:CONTRIBUTING.md[CONTRIBUTING].

[[questions]]
== Questions? Problems? Suggestions?
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,133 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/

package org.elasticsearch.benchmark.indices.resolution;

import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.DataStream;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.indices.SystemIndices;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;

import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;

@State(Scope.Benchmark)
@Fork(3)
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@SuppressWarnings("unused") // invoked by benchmarking framework
public class IndexNameExpressionResolverBenchmark {

private static final String DATA_STREAM_PREFIX = "my-ds-";
private static final String INDEX_PREFIX = "my-index-";

@Param(
{
// # data streams | # indices
" 1000| 100",
" 5000| 500",
" 10000| 1000" }
)
public String resourceMix = "100|10";

@Setup
public void setUp() {
final String[] params = resourceMix.split("\\|");

int numDataStreams = toInt(params[0]);
int numIndices = toInt(params[1]);

Metadata.Builder mb = Metadata.builder();
String[] indices = new String[numIndices + numDataStreams * (numIndices + 1)];
int position = 0;
for (int i = 1; i <= numIndices; i++) {
String indexName = INDEX_PREFIX + i;
createIndexMetadata(indexName, mb);
indices[position++] = indexName;
}

for (int i = 1; i <= numDataStreams; i++) {
String dataStreamName = DATA_STREAM_PREFIX + i;
List<Index> backingIndices = new ArrayList<>();
for (int j = 1; j <= numIndices; j++) {
String backingIndexName = DataStream.getDefaultBackingIndexName(dataStreamName, j);
backingIndices.add(createIndexMetadata(backingIndexName, mb).getIndex());
indices[position++] = backingIndexName;
}
indices[position++] = dataStreamName;
mb.put(DataStream.builder(dataStreamName, backingIndices).build());
}
int mid = indices.length / 2;
clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(mb).build();
resolver = new IndexNameExpressionResolver(new ThreadContext(Settings.EMPTY), new SystemIndices(List.of()));
indexListRequest = new Request(IndicesOptions.lenientExpandOpenHidden(), indices);
starRequest = new Request(IndicesOptions.lenientExpandOpenHidden(), "*");
String[] mixed = indices.clone();
mixed[mid] = "my-*";
mixedRequest = new Request(IndicesOptions.lenientExpandOpenHidden(), mixed);
}

private IndexMetadata createIndexMetadata(String indexName, Metadata.Builder mb) {
IndexMetadata indexMetadata = IndexMetadata.builder(indexName)
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(0)
.build();
mb.put(indexMetadata, false);
return indexMetadata;
}

private IndexNameExpressionResolver resolver;
private ClusterState clusterState;
private Request starRequest;
private Request indexListRequest;
private Request mixedRequest;

@Benchmark
public String[] resolveResourcesListToConcreteIndices() {
return resolver.concreteIndexNames(clusterState, indexListRequest);
}

@Benchmark
public String[] resolveAllStarToConcreteIndices() {
return resolver.concreteIndexNames(clusterState, starRequest);
}

@Benchmark
public String[] resolveMixedConcreteIndices() {
return resolver.concreteIndexNames(clusterState, mixedRequest);
}

private int toInt(String v) {
return Integer.parseInt(v.trim());
}

record Request(IndicesOptions indicesOptions, String... indices) implements IndicesRequest {

}
}
Loading

0 comments on commit 196192c

Please sign in to comment.