Skip to content

Commit

Permalink
merging
Browse files Browse the repository at this point in the history
  • Loading branch information
masseyke committed Dec 7, 2023
2 parents 1fb746b + 88e00fe commit bd7cbe3
Show file tree
Hide file tree
Showing 143 changed files with 1,311 additions and 1,189 deletions.
6 changes: 6 additions & 0 deletions docs/changelog/103024.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 103024
summary: Fix template simulate setting application ordering
area: Indices APIs
type: bug
issues:
- 103008
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,9 @@
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction.AnalyzeToken;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction.Response;
import org.elasticsearch.action.admin.indices.analyze.ReloadAnalyzerAction;
import org.elasticsearch.action.admin.indices.analyze.ReloadAnalyzersRequest;
import org.elasticsearch.action.admin.indices.analyze.ReloadAnalyzersResponse;
import org.elasticsearch.action.admin.indices.analyze.TransportReloadAnalyzersAction;
import org.elasticsearch.index.mapper.MapperException;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.plugins.Plugin;
Expand Down Expand Up @@ -121,7 +121,7 @@ private void updateSynonyms(Path synonymsFile, boolean preview) throws IOExcepti
out.println("foo, baz, buzz");
}
ReloadAnalyzersResponse reloadResponse = client().execute(
ReloadAnalyzerAction.INSTANCE,
TransportReloadAnalyzersAction.TYPE,
new ReloadAnalyzersRequest(null, preview, INDEX_NAME)
).actionGet();
assertNoFailures(reloadResponse);
Expand Down Expand Up @@ -183,7 +183,7 @@ public void testSynonymsInMultiplexerUpdateable() throws FileNotFoundException,
out.println("foo, baz, buzz");
}
ReloadAnalyzersResponse reloadResponse = client().execute(
ReloadAnalyzerAction.INSTANCE,
TransportReloadAnalyzersAction.TYPE,
new ReloadAnalyzersRequest(null, false, INDEX_NAME)
).actionGet();
assertNoFailures(reloadResponse);
Expand Down Expand Up @@ -303,7 +303,7 @@ public void testKeywordMarkerUpdateable() throws IOException {
}

ReloadAnalyzersResponse reloadResponse = client().execute(
ReloadAnalyzerAction.INSTANCE,
TransportReloadAnalyzersAction.TYPE,
new ReloadAnalyzersRequest(null, false, INDEX_NAME)
).actionGet();
assertNoFailures(reloadResponse);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,9 @@

import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction.AnalyzeToken;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction.Response;
import org.elasticsearch.action.admin.indices.analyze.ReloadAnalyzerAction;
import org.elasticsearch.action.admin.indices.analyze.ReloadAnalyzersRequest;
import org.elasticsearch.action.admin.indices.analyze.ReloadAnalyzersResponse;
import org.elasticsearch.action.admin.indices.analyze.TransportReloadAnalyzersAction;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.plugins.Plugin;
Expand Down Expand Up @@ -91,7 +91,7 @@ private void testSynonymsUpdate(boolean preview) throws FileNotFoundException, I
out.println("foo, baz, " + testTerm);
}
ReloadAnalyzersResponse reloadResponse = client().execute(
ReloadAnalyzerAction.INSTANCE,
TransportReloadAnalyzersAction.TYPE,
new ReloadAnalyzersRequest(null, preview, "test")
).actionGet();
assertNoFailures(reloadResponse);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@
import org.elasticsearch.action.admin.indices.refresh.RefreshAction;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
import org.elasticsearch.action.index.IndexAction;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.index.TransportIndexAction;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlocks;
Expand Down Expand Up @@ -221,7 +221,7 @@ public void testIndexChunks() throws IOException {

AtomicInteger chunkIndex = new AtomicInteger();

client.addHandler(IndexAction.INSTANCE, (IndexRequest request, ActionListener<DocWriteResponse> listener) -> {
client.addHandler(TransportIndexAction.TYPE, (IndexRequest request, ActionListener<DocWriteResponse> listener) -> {
int chunk = chunkIndex.getAndIncrement();
assertEquals(OpType.CREATE, request.opType());
assertThat(request.id(), Matchers.startsWith("test_" + (chunk + 15) + "_"));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
package org.elasticsearch.http;

import org.apache.http.client.methods.HttpGet;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction;
import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.client.Cancellable;
import org.elasticsearch.client.Request;
Expand Down Expand Up @@ -73,14 +73,14 @@ public void onFailure(Exception e) {

safeAwait(barrier);

awaitTaskWithPrefixOnMaster(ClusterHealthAction.NAME);
awaitTaskWithPrefixOnMaster(TransportClusterHealthAction.NAME);

logger.info("--> cancelling cluster health request");
cancellable.cancel();
expectThrows(CancellationException.class, future::actionGet);

logger.info("--> checking cluster health task cancelled");
assertAllCancellableTasksAreCancelled(ClusterHealthAction.NAME);
assertAllCancellableTasksAreCancelled(TransportClusterHealthAction.NAME);

safeAwait(barrier);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction;
import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction;
import org.elasticsearch.action.admin.cluster.state.ClusterStateAction;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction;
import org.elasticsearch.action.admin.indices.recovery.RecoveryAction;
Expand Down Expand Up @@ -50,7 +50,7 @@ public void testCatRecoveryRestCancellation() {
}

public void testClusterHealthRestCancellation() {
runRestActionCancellationTest(new Request(HttpGet.METHOD_NAME, "/_cluster/health"), ClusterHealthAction.NAME);
runRestActionCancellationTest(new Request(HttpGet.METHOD_NAME, "/_cluster/health"), TransportClusterHealthAction.NAME);
}

public void testClusterStateRestCancellation() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.elasticsearch.action.admin.indices.close.CloseIndexAction;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
import org.elasticsearch.action.admin.indices.flush.FlushRequest;
Expand Down Expand Up @@ -47,12 +47,12 @@
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.explain.ExplainRequest;
import org.elasticsearch.action.explain.TransportExplainAction;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesAction;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
import org.elasticsearch.action.get.GetAction;
import org.elasticsearch.action.fieldcaps.TransportFieldCapabilitiesAction;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.MultiGetAction;
import org.elasticsearch.action.get.MultiGetRequest;
import org.elasticsearch.action.get.TransportGetAction;
import org.elasticsearch.action.get.TransportMultiGetAction;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchTransportService;
Expand All @@ -62,7 +62,7 @@
import org.elasticsearch.action.termvectors.MultiTermVectorsRequest;
import org.elasticsearch.action.termvectors.TermVectorsAction;
import org.elasticsearch.action.termvectors.TermVectorsRequest;
import org.elasticsearch.action.update.UpdateAction;
import org.elasticsearch.action.update.TransportUpdateAction;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.internal.Requests;
Expand Down Expand Up @@ -181,7 +181,7 @@ public void testGetFieldMappings() {
}

public void testFieldCapabilities() {
String fieldCapabilitiesShardAction = FieldCapabilitiesAction.NAME + "[n]";
String fieldCapabilitiesShardAction = TransportFieldCapabilitiesAction.NAME + "[n]";
interceptTransportActions(fieldCapabilitiesShardAction);

FieldCapabilitiesRequest fieldCapabilitiesRequest = new FieldCapabilitiesRequest();
Expand Down Expand Up @@ -229,7 +229,10 @@ public void testDelete() {

public void testUpdate() {
// update action goes to the primary, index op gets executed locally, then replicated
String[] updateShardActions = new String[] { UpdateAction.NAME + "[s]", BulkAction.NAME + "[s][p]", BulkAction.NAME + "[s][r]" };
String[] updateShardActions = new String[] {
TransportUpdateAction.NAME + "[s]",
BulkAction.NAME + "[s][p]",
BulkAction.NAME + "[s][r]" };
interceptTransportActions(updateShardActions);

String indexOrAlias = randomIndexOrAlias();
Expand All @@ -244,7 +247,10 @@ public void testUpdate() {

public void testUpdateUpsert() {
// update action goes to the primary, index op gets executed locally, then replicated
String[] updateShardActions = new String[] { UpdateAction.NAME + "[s]", BulkAction.NAME + "[s][p]", BulkAction.NAME + "[s][r]" };
String[] updateShardActions = new String[] {
TransportUpdateAction.NAME + "[s]",
BulkAction.NAME + "[s][p]",
BulkAction.NAME + "[s][r]" };
interceptTransportActions(updateShardActions);

String indexOrAlias = randomIndexOrAlias();
Expand All @@ -259,7 +265,10 @@ public void testUpdateUpsert() {

public void testUpdateDelete() {
// update action goes to the primary, delete op gets executed locally, then replicated
String[] updateShardActions = new String[] { UpdateAction.NAME + "[s]", BulkAction.NAME + "[s][p]", BulkAction.NAME + "[s][r]" };
String[] updateShardActions = new String[] {
TransportUpdateAction.NAME + "[s]",
BulkAction.NAME + "[s][p]",
BulkAction.NAME + "[s][r]" };
interceptTransportActions(updateShardActions);

String indexOrAlias = randomIndexOrAlias();
Expand Down Expand Up @@ -307,7 +316,7 @@ public void testBulk() {
}

public void testGet() {
String getShardAction = GetAction.NAME + "[s]";
String getShardAction = TransportGetAction.TYPE.name() + "[s]";
interceptTransportActions(getShardAction);

GetRequest getRequest = new GetRequest(randomIndexOrAlias(), "id");
Expand Down Expand Up @@ -358,7 +367,7 @@ public void testMultiTermVector() {
}

public void testMultiGet() {
String multiGetShardAction = MultiGetAction.NAME + "[shard][s]";
String multiGetShardAction = TransportMultiGetAction.NAME + "[shard][s]";
interceptTransportActions(multiGetShardAction);

List<String> indicesOrAliases = new ArrayList<>();
Expand Down Expand Up @@ -484,13 +493,13 @@ public void testOpenIndex() {
}

public void testCloseIndex() {
interceptTransportActions(CloseIndexAction.NAME);
interceptTransportActions(TransportCloseIndexAction.NAME);

CloseIndexRequest closeIndexRequest = new CloseIndexRequest(randomUniqueIndicesOrAliases());
internalCluster().coordOnlyNodeClient().admin().indices().close(closeIndexRequest).actionGet();

clearInterceptedActions();
assertSameIndices(closeIndexRequest, CloseIndexAction.NAME);
assertSameIndices(closeIndexRequest, TransportCloseIndexAction.NAME);
}

public void testDeleteIndex() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.TaskOperationFailure;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction;
import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction;
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse;
import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequest;
import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskResponse;
Expand All @@ -26,7 +26,7 @@
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction;
import org.elasticsearch.action.bulk.BulkAction;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.index.IndexAction;
import org.elasticsearch.action.index.TransportIndexAction;
import org.elasticsearch.action.search.SearchTransportService;
import org.elasticsearch.action.search.TransportSearchAction;
import org.elasticsearch.action.support.WriteRequest;
Expand Down Expand Up @@ -137,24 +137,24 @@ public void testTaskCounts() {
}

public void testMasterNodeOperationTasks() throws Exception {
registerTaskManagerListeners(ClusterHealthAction.NAME);
registerTaskManagerListeners(TransportClusterHealthAction.NAME);

// First run the health on the master node - should produce only one task on the master node
internalCluster().masterClient().admin().cluster().prepareHealth().get();
assertEquals(1, numberOfEvents(ClusterHealthAction.NAME, Tuple::v1)); // counting only registration events
assertEquals(1, numberOfEvents(TransportClusterHealthAction.NAME, Tuple::v1)); // counting only registration events
// counting only unregistration events
// When checking unregistration events there might be some delay since receiving the response from the cluster doesn't
// guarantee that the task has been unregistered.
assertBusy(() -> assertEquals(1, numberOfEvents(ClusterHealthAction.NAME, event -> event.v1() == false)));
assertBusy(() -> assertEquals(1, numberOfEvents(TransportClusterHealthAction.NAME, event -> event.v1() == false)));

resetTaskManagerListeners(ClusterHealthAction.NAME);
resetTaskManagerListeners(TransportClusterHealthAction.NAME);

// Now run the health on a non-master node - should produce one task on master and one task on another node
internalCluster().nonMasterClient().admin().cluster().prepareHealth().get();
assertEquals(2, numberOfEvents(ClusterHealthAction.NAME, Tuple::v1)); // counting only registration events
assertEquals(2, numberOfEvents(TransportClusterHealthAction.NAME, Tuple::v1)); // counting only registration events
// counting only unregistration events
assertBusy(() -> assertEquals(2, numberOfEvents(ClusterHealthAction.NAME, event -> event.v1() == false)));
List<TaskInfo> tasks = findEvents(ClusterHealthAction.NAME, Tuple::v1);
assertBusy(() -> assertEquals(2, numberOfEvents(TransportClusterHealthAction.NAME, event -> event.v1() == false)));
List<TaskInfo> tasks = findEvents(TransportClusterHealthAction.NAME, Tuple::v1);

// Verify that one of these tasks is a parent of another task
if (tasks.get(0).parentTaskId().isSet()) {
Expand Down Expand Up @@ -436,7 +436,7 @@ public void testCanFetchIndexStatus() throws Exception {
((MockTaskManager) transportService.getTaskManager()).addListener(new MockTaskManagerListener() {
@Override
public void onTaskRegistered(Task task) {
if (task.getAction().startsWith(IndexAction.NAME)) {
if (task.getAction().startsWith(TransportIndexAction.NAME)) {
taskRegistered.countDown();
logger.debug("Blocking [{}] starting", task);
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.fieldcaps.FieldCapabilities;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesAction;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesFailure;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse;
Expand Down Expand Up @@ -478,7 +477,7 @@ public void testTargetNodeFails() throws Exception {
if (randomBoolean()) {
request.indexFilter(QueryBuilders.rangeQuery("timestamp").gte("2020-01-01"));
}
final FieldCapabilitiesResponse response = client().execute(FieldCapabilitiesAction.INSTANCE, request).actionGet();
final FieldCapabilitiesResponse response = client().execute(TransportFieldCapabilitiesAction.TYPE, request).actionGet();
assertTrue(failedRequest.get());
assertThat(response.getIndices(), arrayContainingInAnyOrder("log-index-1", "log-index-2"));
assertThat(response.getField("field1"), aMapWithSize(2));
Expand Down Expand Up @@ -512,7 +511,7 @@ public void testNoActiveCopy() throws Exception {
if (randomBoolean()) {
request.indexFilter(QueryBuilders.rangeQuery("timestamp").gte("2020-01-01"));
}
final FieldCapabilitiesResponse response = client().execute(FieldCapabilitiesAction.INSTANCE, request).actionGet();
final FieldCapabilitiesResponse response = client().execute(TransportFieldCapabilitiesAction.TYPE, request).actionGet();
assertThat(response.getIndices(), arrayContainingInAnyOrder("log-index-1", "log-index-2"));
assertThat(response.getField("field1"), aMapWithSize(2));
assertThat(response.getField("field1"), hasKey("long"));
Expand Down Expand Up @@ -574,7 +573,7 @@ public void testRelocation() throws Exception {
if (randomBoolean()) {
request.indexFilter(QueryBuilders.rangeQuery("timestamp").gte("2020-01-01"));
}
final FieldCapabilitiesResponse response = client().execute(FieldCapabilitiesAction.INSTANCE, request).actionGet();
final FieldCapabilitiesResponse response = client().execute(TransportFieldCapabilitiesAction.TYPE, request).actionGet();
assertThat(response.getIndices(), arrayContainingInAnyOrder("log-index-1", "log-index-2"));
assertThat(response.getField("field1"), aMapWithSize(2));
assertThat(response.getField("field1"), hasKey("long"));
Expand Down Expand Up @@ -630,7 +629,7 @@ public void testManyIndicesWithSameMapping() {
}
};
// Single mapping
verifyResponse.accept(client().execute(FieldCapabilitiesAction.INSTANCE, request).actionGet());
verifyResponse.accept(client().execute(TransportFieldCapabilitiesAction.TYPE, request).actionGet());

// add an extra field for some indices
String[] indicesWithExtraField = randomSubsetOf(between(1, indices.length), indices).stream().sorted().toArray(String[]::new);
Expand All @@ -639,7 +638,7 @@ public void testManyIndicesWithSameMapping() {
for (String index : indicesWithExtraField) {
prepareIndex(index).setSource("extra_field", randomIntBetween(1, 1000)).get();
}
FieldCapabilitiesResponse resp = client().execute(FieldCapabilitiesAction.INSTANCE, request).actionGet();
FieldCapabilitiesResponse resp = client().execute(TransportFieldCapabilitiesAction.TYPE, request).actionGet();
verifyResponse.accept(resp);
assertThat(resp.getField("extra_field"), hasKey("integer"));
assertThat(resp.getField("extra_field").get("integer").indices(), nullValue());
Expand Down
Loading

0 comments on commit bd7cbe3

Please sign in to comment.